From f102209de40e96470a2d8e8df7f5758ca30ef128 Mon Sep 17 00:00:00 2001 From: Joseph Sawaya Date: Tue, 22 Apr 2025 15:11:40 -0400 Subject: [PATCH] fix: process flakes summary usage metrics aren't being gathered despite following the docs, so instead of using the decorator i will try using the context manager to see if the problem is with the way im emitting metrics, since every other metric being emitted using the context manager is working fine --- services/processing/flake_processing.py | 3 --- services/test_analytics/ta_process_flakes.py | 4 ++-- tasks/process_flakes.py | 4 +++- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/services/processing/flake_processing.py b/services/processing/flake_processing.py index f6b6c3ee5..b6fc40570 100644 --- a/services/processing/flake_processing.py +++ b/services/processing/flake_processing.py @@ -10,15 +10,12 @@ TestInstance, ) -from services.test_analytics.ta_metrics import process_flakes_summary - log = logging.getLogger(__name__) FLAKE_EXPIRY_COUNT = 30 -@process_flakes_summary.labels("old").time() def process_flake_for_repo_commit( repo_id: int, commit_id: str, diff --git a/services/test_analytics/ta_process_flakes.py b/services/test_analytics/ta_process_flakes.py index 7ae87564b..bd09164af 100644 --- a/services/test_analytics/ta_process_flakes.py +++ b/services/test_analytics/ta_process_flakes.py @@ -77,7 +77,6 @@ def handle_failure( curr_flakes[test_id] = new_flake -@process_flakes_summary.labels("new").time() def process_flakes_for_commit(repo_id: int, commit_id: str): uploads = get_relevant_uploads(repo_id, commit_id) @@ -116,7 +115,8 @@ def process_flakes_for_repo(repo_id: int): with redis_client.lock(lock_name, timeout=300, blocking_timeout=3): while commit_ids := redis_client.lpop(key_name, 10): for commit_id in commit_ids: - process_flakes_for_commit(repo_id, commit_id.decode()) + with process_flakes_summary.labels("new").time(): + process_flakes_for_commit(repo_id, commit_id.decode()) return True except LockError: log.warning("Failed to acquire lock for repo %s", repo_id) diff --git a/tasks/process_flakes.py b/tasks/process_flakes.py index 75adb1b08..e62d1d4aa 100644 --- a/tasks/process_flakes.py +++ b/tasks/process_flakes.py @@ -9,6 +9,7 @@ from app import celery_app from services.processing.flake_processing import process_flake_for_repo_commit +from services.test_analytics.ta_metrics import process_flakes_summary from services.test_analytics.ta_process_flakes import process_flakes_for_repo from tasks.base import BaseCodecovTask @@ -89,7 +90,8 @@ def run_impl( break for commit_sha in commit_shas: - process_func(repo_id, commit_sha.decode()) + with process_flakes_summary.labels("old").time(): + process_func(repo_id, commit_sha.decode()) except LockError: log.warning("Unable to acquire process flakeslock for key %s.", lock_name)