Skip to content
9 changes: 4 additions & 5 deletions augur/application/cli/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def update_api_key():
core_reset_header = "Core Reset Time"
graphql_request_header = "Graphql Requests Left"
graphql_reset_header = "Graphql Reset Time"
print(f"{'Key'.center(40)} {core_request_header} {core_reset_header} {graphql_request_header} {graphql_reset_header}")
print(f"{'Key'.center(40)} {core_request_header} {core_reset_header.center(24)} {graphql_request_header} {graphql_reset_header.center(24)}")
for key, core_key_data, graphql_key_data in valid_key_data:
core_requests = str(core_key_data['requests_remaining']).center(len(core_request_header))
core_reset_time = str(epoch_to_local_time_with_am_pm(core_key_data["reset_epoch"])).center(len(core_reset_header))
Expand Down Expand Up @@ -87,13 +87,12 @@ def update_api_key():

engine.dispose()


def epoch_to_local_time_with_am_pm(epoch):
local_time = datetime.fromtimestamp(epoch)
formatted_time = local_time.strftime('%I:%M %p') # This format includes the date as well
# Convert epoch to local time with timezone awareness
local_time = datetime.fromtimestamp(epoch).astimezone()
formatted_time = local_time.strftime('%I:%M %p %Z (UTC%z)').center(24)
return formatted_time


def find_duplicates(lst):
counter = Counter(lst)
return [item for item, count in counter.items() if count > 1]
Expand Down
15 changes: 6 additions & 9 deletions augur/tasks/git/facade_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,6 @@ def facade_phase(repo_git, full_collection):
#force_analysis = session.force_analysis
run_facade_contributors = facade_helper.run_facade_contributors

facade_sequence = []
facade_core_collection = []

if not limited_run or (limited_run and pull_repos):
Expand All @@ -509,14 +508,12 @@ def facade_phase(repo_git, full_collection):


#These tasks need repos to be cloned by facade before they can work.
facade_sequence.append(
group(
chain(*facade_core_collection),
process_dependency_metrics.si(repo_git),
process_libyear_dependency_metrics.si(repo_git),
process_scc_value_metrics.si(repo_git)
)
facade_sequence = group(
chain(*facade_core_collection),
process_dependency_metrics.si(repo_git),
process_libyear_dependency_metrics.si(repo_git),
process_scc_value_metrics.si(repo_git)
)

logger.info(f"Facade sequence: {facade_sequence}")
return chain(*facade_sequence)
return facade_sequence
62 changes: 29 additions & 33 deletions augur/tasks/util/collection_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,37 +597,33 @@ def send_messages(self):
for repo_git, full_collection in col_hook.repo_list:

repo = get_repo_by_repo_git(repo_git)
platform_name = "github"
# this needs to be here and not up a level since it should be set/reset for each repo.
# otherwise a gitlab repo would reset it and cause subsequent github repos to use gitlab phases.
phases = None
if "github" in repo.repo_git:
augur_collection_sequence = []
for job in col_hook.phases:
#Add the phase to the sequence in order as a celery task.
#The preliminary task creates the larger task chain
augur_collection_sequence.append(job(repo_git, full_collection))

#augur_collection_sequence.append(core_task_success_util.si(repo_git))
#Link all phases in a chain and send to celery
augur_collection_chain = chain(*augur_collection_sequence)
task_id = augur_collection_chain.apply_async().task_id

self.logger.info(f"Setting github repo {col_hook.name} status to collecting for repo: {repo_git}")

#yield the value of the task_id to the calling method so that the proper collectionStatus field can be updated
yield repo_git, task_id, col_hook.name
else:
if col_hook.gitlab_phases is not None:

augur_collection_sequence = []
for job in col_hook.gitlab_phases:
#Add the phase to the sequence in order as a celery task.
#The preliminary task creates the larger task chain
augur_collection_sequence.append(job(repo_git, full_collection))

#augur_collection_sequence.append(core_task_success_util.si(repo_git))
#Link all phases in a chain and send to celery
augur_collection_chain = chain(*augur_collection_sequence)
task_id = augur_collection_chain.apply_async().task_id

self.logger.info(f"Setting gitlab repo {col_hook.name} status to collecting for repo: {repo_git}")

#yield the value of the task_id to the calling method so that the proper collectionStatus field can be updated
yield repo_git, task_id, col_hook.name
phases = col_hook.phases
# use default platform name

elif "gitlab" in repo.repo_git:
platform_name = "gitlab"
if col_hook.gitlab_phases is None:
return
phases = col_hook.gitlab_phases

augur_collection_sequence = []
for job in phases:
#Add the phase to the sequence in order as a celery task.
#The preliminary task creates the larger task chain
augur_collection_sequence.append(job(repo_git, full_collection))

#augur_collection_sequence.append(core_task_success_util.si(repo_git))
#Link all phases in a chain and send to celery
augur_collection_chain = chain(*augur_collection_sequence)
task_id = augur_collection_chain.apply_async().task_id

self.logger.info(f"Setting {platform_name} repo {col_hook.name} status to collecting for repo: {repo_git}")

#yield the value of the task_id to the calling method so that the proper collectionStatus field can be updated
yield repo_git, task_id, col_hook.name

Loading