-
Notifications
You must be signed in to change notification settings - Fork 4
Fix/perf thread safe skip #161
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -903,16 +903,16 @@ def test_parallel_processor_sequential_and_thread_modes(tmp_path: Path) -> None: | |
| def process_func(p: Path): | ||
| if p.name == "bad.py": | ||
| raise ValueError("boom") | ||
| return None, None | ||
| return None, None, None | ||
|
|
||
| p_seq = ParallelProcessor(max_workers=1, use_processes=False, enabled=True) | ||
| _, errors = p_seq.process_files(files, process_func) | ||
| _, errors, skips = p_seq.process_files(files, process_func) | ||
| assert p_seq.enabled is False | ||
| assert len(errors) == 1 | ||
| assert isinstance(errors[0], FileAnalysisError) | ||
|
|
||
| p_thr = ParallelProcessor(max_workers=2, use_processes=False, enabled=True) | ||
| results, errors = p_thr.process_files(files, lambda p: (None, None)) | ||
| results, errors, skips = p_thr.process_files(files, lambda p: (None, None, None)) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. RUF059: underscore unused unpacked return values in these tests. Several tuple-unpacked variables are never read. Rename only the unused ones to underscore-prefixed names to clear Ruff warnings. ♻️ Suggested lint-safe updates- results, errors, skips = p_thr.process_files(files, lambda p: (None, None, None))
+ results, errors, _skips = p_thr.process_files(files, lambda p: (None, None, None))
- results, errors, skips = pp.process_files([Path("a.py")], raises_func)
+ _results, errors, _skips = pp.process_files([Path("a.py")], raises_func)
- results, errors, skips = pp.process_files(files, success_func)
+ results, _errors, skips = pp.process_files(files, success_func)
- results, errors, skips = pp.process_files([Path("a.py"), Path("b.py")], raises_func)
+ _results, errors, _skips = pp.process_files([Path("a.py"), Path("b.py")], raises_func)
- results, errors, skips = pp.process_files([Path("a.py")], success_func)
+ results, _errors, _skips = pp.process_files([Path("a.py")], success_func)
- results, errors, skips = pp.process_files([Path("a.py")], success_func)
+ results, _errors, _skips = pp.process_files([Path("a.py")], success_func)Also applies to: 1229-1229, 1247-1247, 1253-1253, 1268-1268, 1278-1278 🧰 Tools🪛 Ruff (0.15.9)[warning] 915-915: Unpacked variable Prefix it with an underscore or any other dummy variable pattern (RUF059) 🤖 Prompt for AI Agents |
||
| assert results == [] | ||
| assert errors == [] | ||
| assert p_thr.get_config()["max_workers"] == 2 | ||
|
|
@@ -1171,11 +1171,11 @@ def make_error(path): | |
|
|
||
|
|
||
| def success_func(p): | ||
| return make_metrics(p), None | ||
| return make_metrics(p), None, None | ||
|
|
||
|
|
||
| def error_func(p): | ||
| return None, make_error(p) | ||
| return None, make_error(p), None | ||
|
|
||
|
|
||
| def raises_func(p): | ||
|
|
@@ -1210,23 +1210,23 @@ def test_get_config(self): | |
| class TestSequentialProcessing: | ||
| def test_empty_files(self): | ||
| pp = ParallelProcessor(enabled=False) | ||
| results, errors = pp.process_files([], success_func) | ||
| assert results == [] and errors == [] | ||
| results, errors, skips = pp.process_files([], success_func) | ||
| assert results == [] and errors == [] and skips == [] | ||
|
|
||
| def test_single_file_success(self): | ||
| pp = ParallelProcessor(enabled=False) | ||
| files = [Path("a.py")] | ||
| results, errors = pp.process_files(files, success_func) | ||
| assert len(results) == 1 and len(errors) == 0 | ||
| results, errors, skips = pp.process_files(files, success_func) | ||
| assert len(results) == 1 and len(errors) == 0 and len(skips) == 0 | ||
|
|
||
| def test_single_file_error(self): | ||
| pp = ParallelProcessor(enabled=False) | ||
| results, errors = pp.process_files([Path("a.py")], error_func) | ||
| assert len(results) == 0 and len(errors) == 1 | ||
| results, errors, skips = pp.process_files([Path("a.py")], error_func) | ||
| assert len(results) == 0 and len(errors) == 1 and len(skips) == 0 | ||
|
|
||
| def test_single_file_exception(self): | ||
| pp = ParallelProcessor(enabled=False) | ||
| results, errors = pp.process_files([Path("a.py")], raises_func) | ||
| results, errors, skips = pp.process_files([Path("a.py")], raises_func) | ||
| assert len(errors) == 1 | ||
|
|
||
| def test_progress_callback(self): | ||
|
|
@@ -1244,12 +1244,13 @@ class TestThreadedProcessing: | |
| def test_two_files_threads(self): | ||
| pp = ParallelProcessor(max_workers=2, use_processes=False, enabled=True) | ||
| files = [Path("a.py"), Path("b.py")] | ||
| results, errors = pp.process_files(files, success_func) | ||
| results, errors, skips = pp.process_files(files, success_func) | ||
| assert len(results) == 2 | ||
| assert len(skips) == 0 | ||
|
|
||
| def test_thread_error_handling(self): | ||
| pp = ParallelProcessor(max_workers=2, use_processes=False, enabled=True) | ||
| results, errors = pp.process_files([Path("a.py"), Path("b.py")], raises_func) | ||
| results, errors, skips = pp.process_files([Path("a.py"), Path("b.py")], raises_func) | ||
| assert len(errors) == 2 | ||
|
|
||
| def test_thread_progress_callback(self): | ||
|
|
@@ -1264,7 +1265,7 @@ def test_thread_progress_callback(self): | |
|
|
||
| def test_single_file_goes_sequential(self): | ||
| pp = ParallelProcessor(max_workers=4, use_processes=False, enabled=True) | ||
| results, errors = pp.process_files([Path("a.py")], success_func) | ||
| results, errors, skips = pp.process_files([Path("a.py")], success_func) | ||
| assert len(results) == 1 | ||
|
|
||
|
|
||
|
|
@@ -1274,13 +1275,13 @@ def test_process_pool_falls_back_on_exception(self): | |
| with patch( | ||
| "refactron.core.parallel.ProcessPoolExecutor", side_effect=Exception("spawn fail") | ||
| ): | ||
| results, errors = pp.process_files([Path("a.py")], success_func) | ||
| results, errors, skips = pp.process_files([Path("a.py")], success_func) | ||
| assert len(results) == 1 | ||
|
|
||
| def test_process_pool_success(self): | ||
| pp = ParallelProcessor(max_workers=2, use_processes=True, enabled=True) | ||
| mock_future = MagicMock() | ||
| mock_future.result.return_value = (make_metrics(Path("a.py")), None) | ||
| mock_future.result.return_value = (make_metrics(Path("a.py")), None, None) | ||
| mock_exec = MagicMock() | ||
| mock_exec.__enter__ = lambda s: s | ||
| mock_exec.__exit__ = MagicMock(return_value=False) | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Commit the formatter/linter output for this file.
Pre-commit is still red here: black and isort rewrite the file, and flake8 reports an overlong line in this changed signature/doc block.
As per coding guidelines,
**/*.py: Use line length of 100 characters, enforced by black, isort, and flake8.Also applies to: 58-75
🧰 Tools
🪛 GitHub Actions: Pre-commit
[error] pre-commit black hook id failed; files were modified by this hook (reformatted refactron/core/parallel.py). Commit the formatting changes.
[error] pre-commit isort hook id failed; files were modified by this hook (Fixing /home/runner/work/Refactron_lib/Refactron_lib/refactron/core/parallel.py). Commit the import/style changes.
🤖 Prompt for AI Agents