Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .refix.yaml.sample
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ models:
ci_log_max_lines: 120

# Automatically merge PR when it reaches refix:done state (Optional)
# When merge completes, refix:merged label is added.
# Default: false
auto_merge: false

Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ models:
ci_log_max_lines: 120

# Automatically merge PR when it reaches refix:done state (optional, default false)
# When merge completes, the refix:merged label is applied
auto_merge: false

# Automatically post `@coderabbitai resume` after a CodeRabbit rate-limit wait expires
Expand Down
139 changes: 137 additions & 2 deletions src/auto_fixer.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,12 +100,14 @@
CODERABBIT_BOT_LOGIN = "coderabbitai"
REFIX_RUNNING_LABEL = "refix:running"
REFIX_DONE_LABEL = "refix:done"
REFIX_MERGED_LABEL = "refix:merged"
CODERABBIT_PROCESSING_MARKER = "Currently processing new changes in this PR."
CODERABBIT_RATE_LIMIT_MARKER = "Rate limit exceeded"
CODERABBIT_RESUME_COMMENT = "@coderabbitai resume"
SUCCESSFUL_CI_STATES = {"SUCCESS", "SKIPPED", "NEUTRAL"}
REFIX_RUNNING_LABEL_COLOR = "FBCA04"
REFIX_DONE_LABEL_COLOR = "0E8A16"
REFIX_MERGED_LABEL_COLOR = "1D76DB"
FAILED_CI_CONCLUSIONS = {"FAILURE", "TIMED_OUT", "ACTION_REQUIRED", "CANCELLED", "STALE", "STARTUP_FAILURE"}
FAILED_CI_STATES = {"ERROR", "FAILURE"}
GITHUB_ACTIONS_RUN_URL_PATTERN = re.compile(r"/actions/runs/(\d+)")
Expand Down Expand Up @@ -1151,6 +1153,12 @@ def _ensure_refix_labels(repo: str) -> None:
color=REFIX_DONE_LABEL_COLOR,
description="Refix finished review checks/fixes for now.",
)
_ensure_repo_label_exists(
repo,
REFIX_MERGED_LABEL,
color=REFIX_MERGED_LABEL_COLOR,
description="PR has been merged after Refix auto-merge.",
)


def _edit_pr_label(repo: str, pr_number: int, *, add: bool, label: str) -> bool:
Expand Down Expand Up @@ -1199,6 +1207,120 @@ def _set_pr_done_label(repo: str, pr_number: int) -> None:
_edit_pr_label(repo, pr_number, add=True, label=REFIX_DONE_LABEL)


def _set_pr_merged_label(repo: str, pr_number: int) -> None:
_ensure_refix_labels(repo)
_edit_pr_label(repo, pr_number, add=False, label=REFIX_RUNNING_LABEL)
_edit_pr_label(repo, pr_number, add=True, label=REFIX_MERGED_LABEL)


def _pr_has_label(pr_data: dict[str, Any], label_name: str) -> bool:
labels = pr_data.get("labels", [])
if not isinstance(labels, list):
return False
for label in labels:
if isinstance(label, dict) and str(label.get("name", "")).strip() == label_name:
return True
return False


def _mark_pr_merged_label_if_needed(repo: str, pr_number: int) -> bool:
"""Add refix:merged label when PR is merged and eligible."""
cmd = ["gh", "pr", "view", str(pr_number), "--repo", repo, "--json", "mergedAt,labels,autoMergeRequest"]
result = subprocess.run(
cmd,
capture_output=True,
text=True,
check=False,
encoding="utf-8",
)
if result.returncode != 0:
print(
f"Warning: failed to inspect merge state for PR #{pr_number}: {(result.stderr or '').strip()}",
file=sys.stderr,
)
return False
try:
pr_data = json.loads(result.stdout) if result.stdout else {}
except json.JSONDecodeError:
print(
f"Warning: failed to parse merge state for PR #{pr_number}",
file=sys.stderr,
)
return False
if not isinstance(pr_data, dict):
return False

merged_at = str(pr_data.get("mergedAt") or "").strip()
if not merged_at:
return False
if not _pr_has_label(pr_data, REFIX_DONE_LABEL):
return False
if _pr_has_label(pr_data, REFIX_MERGED_LABEL):
return False
if not pr_data.get("autoMergeRequest"):
return False

print(f"PR #{pr_number} is merged; adding {REFIX_MERGED_LABEL} label.")
_set_pr_merged_label(repo, pr_number)
return True


def _backfill_merged_labels(repo: str, *, limit: int = 100) -> int:
"""Backfill refix:merged label for merged PRs already marked refix:done."""
search_query = f'label:"{REFIX_DONE_LABEL}" -label:"{REFIX_MERGED_LABEL}"'
cmd = [
"gh",
"pr",
"list",
"--repo",
repo,
"--state",
"merged",
"--search",
search_query,
"--json",
"number",
"--limit",
str(limit),
]
result = subprocess.run(
cmd,
capture_output=True,
text=True,
check=False,
encoding="utf-8",
)
if result.returncode != 0:
print(
f"Warning: failed to list merged PRs for {repo}: {(result.stderr or '').strip()}",
file=sys.stderr,
)
return 0
try:
prs = json.loads(result.stdout) if result.stdout else []
except json.JSONDecodeError:
print(
f"Warning: failed to parse merged PR list for {repo}",
file=sys.stderr,
)
return 0
if not isinstance(prs, list):
return 0

count = 0
for pr in prs:
if not isinstance(pr, dict):
continue
pr_number = pr.get("number")
if not isinstance(pr_number, int):
continue
if _mark_pr_merged_label_if_needed(repo, pr_number):
count += 1
if count:
print(f"Backfilled {REFIX_MERGED_LABEL} on {count} merged PR(s) in {repo}.")
return count


def _trigger_pr_auto_merge(repo: str, pr_number: int) -> bool:
cmd = ["gh", "pr", "merge", str(pr_number), "--repo", repo, "--auto", "--merge"]
result = subprocess.run(
Expand Down Expand Up @@ -1571,7 +1693,9 @@ def _update_done_label_if_completed(
print(f"PR #{pr_number} meets completion conditions; switching label to {REFIX_DONE_LABEL}.")
_set_pr_done_label(repo, pr_number)
if auto_merge_enabled:
_trigger_pr_auto_merge(repo, pr_number)
merge_requested = _trigger_pr_auto_merge(repo, pr_number)
if merge_requested:
_mark_pr_merged_label_if_needed(repo, pr_number)
return

print(f"PR #{pr_number} is not completed yet; switching label to {REFIX_RUNNING_LABEL}.")
Expand Down Expand Up @@ -1646,6 +1770,10 @@ def process_repo(
print(f"Error fetching PRs for {repo}: {e}", file=sys.stderr)
fetch_failed = True
return []
backfilled_count = 0
if auto_merge_enabled and not dry_run and not summarize_only:
backfill_limit = max_modified_prs if max_modified_prs > 0 else 100
backfilled_count = _backfill_merged_labels(repo, limit=backfill_limit)

if not prs:
print(f"No open PRs found in {repo}")
Expand All @@ -1664,7 +1792,7 @@ def process_repo(
continue

# A上限チェック: 変更PR数の上限に達した場合、PR全体をスキップ
if max_modified_prs > 0 and len(modified_prs) >= max_modified_prs:
if max_modified_prs > 0 and len(modified_prs) + backfilled_count >= max_modified_prs:
print(f"\nSkipping PR #{pr_number}: max_modified_prs_per_run limit reached ({max_modified_prs})")
continue

Expand Down Expand Up @@ -2377,6 +2505,13 @@ def process_repo(

if processed_count == 0 and not fetch_failed and not pr_fetch_failed:
print(f"No unresolved reviews or behind PRs found in {repo}")
if auto_merge_enabled and not dry_run and not summarize_only:
if max_modified_prs > 0:
remaining = max_modified_prs - len(modified_prs) - backfilled_count
if remaining > 0:
_backfill_merged_labels(repo, limit=remaining)
else:
_backfill_merged_labels(repo)
return commits_added_to


Expand Down
80 changes: 79 additions & 1 deletion tests/test_auto_fixer.py
Original file line number Diff line number Diff line change
Expand Up @@ -902,6 +902,21 @@ def test_empty_prs_returns_early(self, capsys):
mock_run.assert_not_called()
mock_popen.assert_not_called()

def test_auto_merge_enabled_backfills_merged_labels_even_without_open_prs(self):
config = {
"models": {"summarize": "haiku", "fix": "sonnet"},
"ci_log_max_lines": 120,
"auto_merge": True,
"process_draft_prs": False,
"repositories": [{"repo": "owner/repo", "user_name": None, "user_email": None}],
}
with (
patch("auto_fixer.fetch_open_prs", return_value=[]),
patch("auto_fixer._backfill_merged_labels") as mock_backfill,
):
auto_fixer.process_repo({"repo": "owner/repo"}, config=config)
mock_backfill.assert_called_once_with("owner/repo", limit=100)

def test_draft_pr_is_skipped_by_default(self):
prs = [{"number": 1, "title": "Draft PR", "isDraft": True}]
with (
Expand Down Expand Up @@ -1508,6 +1523,21 @@ def test_set_pr_done_label_ensures_labels_before_edit(self):
]
)

def test_set_pr_merged_label_ensures_labels_before_edit(self):
with (
patch("auto_fixer._ensure_refix_labels") as mock_ensure,
patch("auto_fixer._edit_pr_label") as mock_edit,
):
auto_fixer._set_pr_merged_label("owner/repo", 12)

mock_ensure.assert_called_once_with("owner/repo")
mock_edit.assert_has_calls(
[
call("owner/repo", 12, add=False, label="refix:running"),
call("owner/repo", 12, add=True, label="refix:merged"),
]
)

def test_trigger_pr_auto_merge_executes_gh_merge(self):
with patch("auto_fixer.subprocess.run", return_value=Mock(returncode=0, stdout="", stderr="")) as mock_run:
ok = auto_fixer._trigger_pr_auto_merge("owner/repo", 7)
Expand All @@ -1530,6 +1560,52 @@ def test_trigger_pr_auto_merge_treats_already_merged_as_success(self):

assert ok is True

def test_mark_pr_merged_label_if_needed_adds_label_for_done_merged_pr(self):
pr_view = {
"mergedAt": "2026-03-11T00:00:00Z",
"labels": [{"name": "refix:done"}],
"autoMergeRequest": {"enabledBy": {"login": "bot"}},
}
with (
patch("auto_fixer.subprocess.run", return_value=Mock(returncode=0, stdout=json.dumps(pr_view), stderr="")),
patch("auto_fixer._set_pr_merged_label") as mock_set_merged,
):
ok = auto_fixer._mark_pr_merged_label_if_needed("owner/repo", 21)
assert ok is True
mock_set_merged.assert_called_once_with("owner/repo", 21)

def test_mark_pr_merged_label_if_needed_skips_when_not_merged(self):
pr_view = {
"mergedAt": None,
"labels": [{"name": "refix:done"}],
}
with (
patch("auto_fixer.subprocess.run", return_value=Mock(returncode=0, stdout=json.dumps(pr_view), stderr="")),
patch("auto_fixer._set_pr_merged_label") as mock_set_merged,
):
ok = auto_fixer._mark_pr_merged_label_if_needed("owner/repo", 22)
assert ok is False
mock_set_merged.assert_not_called()

def test_backfill_merged_labels_applies_label_to_matching_prs(self):
merged_prs = [{"number": 31}, {"number": 32}]
with (
patch("auto_fixer.subprocess.run", return_value=Mock(returncode=0, stdout=json.dumps(merged_prs), stderr="")),
patch("auto_fixer._mark_pr_merged_label_if_needed", return_value=True) as mock_mark,
):
count = auto_fixer._backfill_merged_labels("owner/repo")
assert count == 2
mock_mark.assert_has_calls([call("owner/repo", 31), call("owner/repo", 32)])

def test_backfill_merged_labels_returns_zero_on_list_failure(self):
with (
patch("auto_fixer.subprocess.run", return_value=Mock(returncode=1, stdout="", stderr="boom")),
patch("auto_fixer._set_pr_merged_label") as mock_set_merged,
):
count = auto_fixer._backfill_merged_labels("owner/repo")
assert count == 0
mock_set_merged.assert_not_called()

def test_contains_coderabbit_processing_marker(self):
pr_data = {
"reviews": [],
Expand Down Expand Up @@ -1575,7 +1651,8 @@ def test_update_done_label_triggers_auto_merge_when_enabled(self):
patch("auto_fixer._are_all_ci_checks_successful", return_value=True),
patch("auto_fixer._set_pr_done_label") as mock_set_done,
patch("auto_fixer._set_pr_running_label") as mock_set_running,
patch("auto_fixer._trigger_pr_auto_merge") as mock_auto_merge,
patch("auto_fixer._trigger_pr_auto_merge", return_value=True) as mock_auto_merge,
patch("auto_fixer._mark_pr_merged_label_if_needed") as mock_mark_merged,
):
auto_fixer._update_done_label_if_completed(
repo="owner/repo",
Expand All @@ -1596,6 +1673,7 @@ def test_update_done_label_triggers_auto_merge_when_enabled(self):
mock_set_done.assert_called_once_with("owner/repo", 3)
mock_set_running.assert_not_called()
mock_auto_merge.assert_called_once_with("owner/repo", 3)
mock_mark_merged.assert_called_once_with("owner/repo", 3)

def test_update_done_label_sets_running_when_review_fix_added_commit(self):
with (
Expand Down
Loading