diff --git a/AUTO1_PATCHES.md b/AUTO1_PATCHES.md
index f579c39166..4e11613b6d 100644
--- a/AUTO1_PATCHES.md
+++ b/AUTO1_PATCHES.md
@@ -14,10 +14,11 @@ Keep this list minimal to ease upstream rebases.
| Patch ID | Ticket | Type | Files | Why | Upstream status | Removal criteria |
| --- | --- | --- | --- | --- | --- | --- |
-| improve-score-rubric | OPS-54090 | Enhancement | pr_agent/settings/code_suggestions/pr_code_suggestions_reflect_prompts.toml | Clarify /improve scoring so merge-blocking issues score 9-10 and map to High severity. | Not upstreamed yet. | Remove once upstream clarifies the scoring rubric for merge-blocking issues. |
-| review-severity-emoji | OPS-54090 | Enhancement | pr_agent/algo/utils.py | Add a header emoji for findings severity summary in PR review output. | Not upstreamed yet. | Remove once upstream adds a default emoji for findings severity summary. |
-| improve-reflect-fallback-score | OPS-54090 | Fix | pr_agent/tools/pr_code_suggestions.py | Prevent inline suggestion spam when self-reflection fails by defaulting all suggestion scores to 0. | Not upstreamed yet. | Remove once upstream handles reflect failure without inflating scores. |
-| describe-mermaid-sanitize | OPS-54090 | Fix | pr_agent/tools/pr_description.py | Strip backticks from Mermaid diagrams to avoid GitHub render failures. | Not upstreamed yet. | Remove once upstream sanitizes Mermaid diagrams or fixes prompt output. |
+| improve-score-rubric | OPS-24090 | Enhancement | pr_agent/settings/code_suggestions/pr_code_suggestions_reflect_prompts.toml | Clarify /improve scoring so merge-blocking issues score 9-10 and map to High severity. | Not upstreamed yet. | Remove once upstream clarifies the scoring rubric for merge-blocking issues. |
+| review-severity-emoji | OPS-24090 | Enhancement | pr_agent/algo/utils.py | Add a header emoji for findings severity summary in PR review output. | Not upstreamed yet. | Remove once upstream adds a default emoji for findings severity summary. |
+| improve-reflect-fallback-score | OPS-24090 | Fix | pr_agent/tools/pr_code_suggestions.py | Prevent inline suggestion spam when self-reflection fails by defaulting all suggestion scores to 0. | Not upstreamed yet. | Remove once upstream handles reflect failure without inflating scores. |
+| describe-mermaid-sanitize | OPS-24090 | Fix | pr_agent/tools/pr_description.py | Strip backticks from Mermaid diagrams to avoid GitHub render failures. | Not upstreamed yet. | Remove once upstream sanitizes Mermaid diagrams or fixes prompt output. |
+| jira-cloud-ticket-context | OPS-24091 | Enhancement | pr_agent/algo/utils.py, pr_agent/tickets/jira_cloud.py, pr_agent/tools/ticket_pr_compliance_check.py, pr_agent/tools/pr_reviewer.py, pr_agent/tools/pr_description.py, pr_agent/tools/pr_config.py, tests/unittest/test_convert_to_markdown.py, tests/unittest/test_jira_cloud_ticket_context.py, tests/unittest/test_pr_reviewer_ticket_note.py, tests/unittest/test_ticket_pr_compliance_check.py | Fetch Jira Cloud ticket context using PR title first and branch name only as fallback, support both basic auth and scoped-token Jira Cloud API access, ignore placeholder compliance bullets, and show review notes when the Jira ticket cannot be fetched. | Not upstreamed yet. | Remove once upstream supports Jira Cloud ticket context from PR metadata with title-first precedence, scoped-token auth, placeholder-safe compliance rendering, and comment-level fetch-failure notes. |
## Rebase checklist
diff --git a/pr_agent/algo/utils.py b/pr_agent/algo/utils.py
index 00b344b6f0..7739866171 100644
--- a/pr_agent/algo/utils.py
+++ b/pr_agent/algo/utils.py
@@ -129,7 +129,8 @@ def convert_to_markdown_v2(output_data: dict,
gfm_supported: bool = True,
incremental_review=None,
git_provider=None,
- files=None) -> str:
+ files=None,
+ ticket_note: str = '') -> str:
"""
Convert a dictionary of data into markdown format.
Args:
@@ -170,8 +171,12 @@ def convert_to_markdown_v2(output_data: dict,
if gfm_supported:
markdown_text += "
\n"
- todo_summary = output_data['review'].pop('todo_summary', '')
- for key, value in output_data['review'].items():
+ review_data = copy.deepcopy(output_data['review'])
+ todo_summary = review_data.pop('todo_summary', '')
+ ticket_compliance_value = review_data.pop('ticket_compliance_check', None)
+ ticket_note = (ticket_note or '').strip()
+ ticket_compliance_rendered = False
+ for key, value in review_data.items():
if value is None or value == '' or value == {} or value == []:
if key.lower() not in ['can_be_split', 'key_issues_to_review']:
continue
@@ -210,8 +215,6 @@ def convert_to_markdown_v2(output_data: dict,
markdown_text += f'### {emoji} No relevant tests\n\n'
else:
markdown_text += f"### {emoji} PR contains tests\n\n"
- elif 'ticket compliance check' in key_nice.lower():
- markdown_text = ticket_markdown_logic(emoji, markdown_text, value, gfm_supported)
elif 'contribution time cost estimate' in key_nice.lower():
if gfm_supported:
markdown_text += f"| {emoji} Contribution time estimate (best, average, worst case): "
@@ -272,7 +275,6 @@ def convert_to_markdown_v2(output_data: dict,
issues = value
if gfm_supported:
markdown_text += f" |
"
- # markdown_text += f"{emoji} {key_nice}
\n\n"
markdown_text += f"{emoji} Recommended focus areas for review
\n\n"
else:
markdown_text += f"### {emoji} Recommended focus areas for review\n\n#### \n"
@@ -320,12 +322,23 @@ def convert_to_markdown_v2(output_data: dict,
else:
markdown_text += f"### {emoji} {key_nice}: {value}\n\n"
+ if ('score' in key_nice.lower() and not ticket_compliance_rendered and
+ (ticket_compliance_value is not None or ticket_note)):
+ markdown_text = ticket_markdown_logic(
+ emojis['Ticket compliance check'], markdown_text, ticket_compliance_value, gfm_supported, ticket_note
+ )
+ ticket_compliance_rendered = True
+
+ if not ticket_compliance_rendered and (ticket_compliance_value is not None or ticket_note):
+ markdown_text = ticket_markdown_logic(
+ emojis['Ticket compliance check'], markdown_text, ticket_compliance_value, gfm_supported, ticket_note
+ )
+
if gfm_supported:
markdown_text += " |
\n"
return markdown_text
-
def extract_relevant_lines_str(end_line, files, relevant_file, start_line, dedent=False) -> str:
"""
Finds 'relevant_file' in 'files', and extracts the lines from 'start_line' to 'end_line' string from the file content.
@@ -366,22 +379,40 @@ def extract_relevant_lines_str(end_line, files, relevant_file, start_line, deden
return ""
-def ticket_markdown_logic(emoji, markdown_text, value, gfm_supported) -> str:
+def normalize_ticket_requirement_text(text: str) -> str:
+ text = (text or '').strip()
+ if not text:
+ return ''
+
+ normalized_lines = [line.strip() for line in text.splitlines() if line.strip()]
+ placeholder_lines = {'-', '*', 'none', '(none)', 'n/a', '(n/a)'}
+ normalized_lines = [line for line in normalized_lines if line.lower() not in placeholder_lines]
+ if not normalized_lines:
+ return ''
+
+ return '\n'.join(normalized_lines)
+
+
+def ticket_markdown_logic(emoji, markdown_text, value, gfm_supported, ticket_note: str = '') -> str:
ticket_compliance_str = ""
compliance_emoji = ''
# Track compliance levels across all tickets
all_compliance_levels = []
+ ticket_note = (ticket_note or '').strip()
if isinstance(value, list):
for ticket_analysis in value:
try:
+ if not isinstance(ticket_analysis, dict):
+ continue
ticket_url = ticket_analysis.get('ticket_url', '').strip()
explanation = ''
ticket_compliance_level = '' # Individual ticket compliance
- fully_compliant_str = ticket_analysis.get('fully_compliant_requirements', '').strip()
- not_compliant_str = ticket_analysis.get('not_compliant_requirements', '').strip()
- requires_further_human_verification = ticket_analysis.get('requires_further_human_verification',
- '').strip()
+ fully_compliant_str = normalize_ticket_requirement_text(ticket_analysis.get('fully_compliant_requirements'))
+ not_compliant_str = normalize_ticket_requirement_text(ticket_analysis.get('not_compliant_requirements'))
+ requires_further_human_verification = normalize_ticket_requirement_text(
+ ticket_analysis.get('requires_further_human_verification')
+ )
if not fully_compliant_str and not not_compliant_str:
get_logger().debug(f"Ticket compliance has no requirements",
@@ -450,18 +481,26 @@ def ticket_markdown_logic(emoji, markdown_text, value, gfm_supported) -> str:
# Set extra statistics outside the ticket loop
get_settings().set('config.extra_statistics', {'compliance_level': compliance_level})
- # editing table row for ticket compliance analysis
- if gfm_supported:
- markdown_text += f"| \n\n"
- markdown_text += f"**{emoji} Ticket compliance analysis {compliance_emoji}**\n\n"
- markdown_text += ticket_compliance_str
- markdown_text += f" |
\n"
- else:
- markdown_text += f"### {emoji} Ticket compliance analysis {compliance_emoji}\n\n"
- markdown_text += ticket_compliance_str + "\n\n"
+ if not ticket_compliance_str and not ticket_note:
+ return markdown_text
- return markdown_text
+ section_body = ticket_compliance_str
+ if ticket_note:
+ section_body = f"{ticket_note}\n\n{section_body}" if section_body else f"{ticket_note}\n\n"
+
+ header_suffix = f" {compliance_emoji}" if compliance_emoji else ''
+ # editing table row for ticket compliance analysis
+ if gfm_supported:
+ markdown_text += f"| \n\n"
+ markdown_text += f"**{emoji} Ticket compliance analysis{header_suffix}**\n\n"
+ markdown_text += section_body
+ markdown_text += f" |
\n"
+ else:
+ markdown_text += f"### {emoji} Ticket compliance analysis{header_suffix}\n\n"
+ markdown_text += section_body + "\n\n"
+
+ return markdown_text
def process_can_be_split(emoji, value):
try:
diff --git a/pr_agent/tickets/__init__.py b/pr_agent/tickets/__init__.py
new file mode 100644
index 0000000000..ed0b7ca79f
--- /dev/null
+++ b/pr_agent/tickets/__init__.py
@@ -0,0 +1,4 @@
+"""Ticket system integrations.
+
+This package contains optional adapters for external ticket systems (e.g., Jira).
+"""
diff --git a/pr_agent/tickets/jira_cloud.py b/pr_agent/tickets/jira_cloud.py
new file mode 100644
index 0000000000..21408bf999
--- /dev/null
+++ b/pr_agent/tickets/jira_cloud.py
@@ -0,0 +1,550 @@
+import json
+import os
+import re
+from dataclasses import dataclass
+from typing import Any, Dict, List, Optional
+from urllib.parse import urlparse, urlunparse
+
+import aiohttp
+import html2text
+
+
+def _get_logger():
+ # Lazy import to avoid circular imports during Dynaconf initialization.
+ try:
+ from pr_agent.log import get_logger
+
+ return get_logger()
+ except Exception:
+ import logging
+
+ return logging.getLogger(__name__)
+
+
+MAX_TICKET_CHARACTERS = 10000
+DEFAULT_TIMEOUT_SECONDS = 10
+
+
+_JIRA_KEY_RE = re.compile(r"^[A-Z][A-Z0-9]{1,9}-[1-9]\d{0,6}$")
+
+# Prefix-only extraction:
+# - PR title example: "OPS-1234 new feature", allow minor variants like "[OPS-1234] ..." or "OPS-1234: ...".
+_TITLE_PREFIX_RE = re.compile(r"^\s*[\[(]?(?P[A-Z][A-Z0-9]{1,9}-[1-9]\d{0,6})[\])]?([\s:]|$)")
+_BRANCH_PREFIX_RE = re.compile(r"^(?P[A-Z][A-Z0-9]{1,9}-[1-9]\d{0,6})(?:[-_/].*)?$")
+
+
+@dataclass(frozen=True)
+class JiraCloudAuth:
+ site_base_url: str
+ api_base_url: str
+ api_token: str
+ auth_mode: str
+ email: str = ""
+ cloud_id: str = ""
+
+
+@dataclass(frozen=True)
+class JiraTicketFetchResult:
+ status: str
+ key: Optional[str] = None
+ ticket: Optional[Dict[str, Any]] = None
+ note: str = ""
+
+
+def _normalize_jira_base_url(raw_url: str) -> str:
+ raw_url = (raw_url or "").strip()
+ if not raw_url:
+ return ""
+
+ if not raw_url.startswith(("http://", "https://")):
+ raw_url = f"https://{raw_url}"
+
+ parsed = urlparse(raw_url)
+ # Keep only scheme + netloc + path (drop query/fragment)
+ scheme = parsed.scheme or "https"
+ netloc = parsed.netloc
+ path = (parsed.path or "").rstrip("/")
+
+ # Cloud UI sometimes uses the /jira path. REST APIs are served from the root.
+ if netloc.endswith(".atlassian.net") and path == "/jira":
+ path = ""
+
+ normalized = urlunparse((scheme, netloc, path, "", "", "")).rstrip("/")
+ return normalized
+
+
+def _build_jira_api_base_url(cloud_id: str) -> str:
+ return f"https://api.atlassian.com/ex/jira/{cloud_id}"
+
+
+def _load_auth_from_env() -> Optional[JiraCloudAuth]:
+ base_url = _normalize_jira_base_url(os.getenv("JIRA_BASE_URL", ""))
+ email = (os.getenv("JIRA_API_EMAIL", "") or "").strip()
+ api_token = (os.getenv("JIRA_API_TOKEN", "") or "").strip()
+ cloud_id = (os.getenv("JIRA_CLOUD_ID", "") or "").strip()
+ auth_type = (os.getenv("JIRA_AUTH_TYPE", "") or "").strip().lower()
+
+ if not base_url or not api_token:
+ return None
+
+ # Scoped Atlassian API tokens use api.atlassian.com URLs with Basic auth and require a cloud id.
+ # Implicit scoped detection: if both JIRA_CLOUD_ID and JIRA_API_EMAIL are set, prefer scoped mode.
+ if auth_type == "scoped" or (not auth_type and cloud_id and email):
+ if not email or not cloud_id:
+ return None
+ return JiraCloudAuth(
+ site_base_url=base_url,
+ api_base_url=_build_jira_api_base_url(cloud_id),
+ api_token=api_token,
+ auth_mode="scoped",
+ email=email,
+ cloud_id=cloud_id,
+ )
+
+ if auth_type in {"bearer", "oauth", "oauth2", "oauth_3lo"}:
+ return JiraCloudAuth(
+ site_base_url=base_url,
+ api_base_url=_build_jira_api_base_url(cloud_id) if cloud_id else "",
+ api_token=api_token,
+ auth_mode="bearer",
+ email=email,
+ cloud_id=cloud_id,
+ )
+
+ if auth_type not in {"", "basic"}:
+ _get_logger().warning(f"Unrecognized JIRA_AUTH_TYPE '{auth_type}'")
+ return None
+
+ if not email:
+ return None
+
+ return JiraCloudAuth(
+ site_base_url=base_url,
+ api_base_url=base_url,
+ api_token=api_token,
+ auth_mode="basic",
+ email=email,
+ )
+
+
+def _has_any_jira_env() -> bool:
+ return any(
+ (
+ (os.getenv("JIRA_BASE_URL", "") or "").strip(),
+ (os.getenv("JIRA_API_EMAIL", "") or "").strip(),
+ (os.getenv("JIRA_API_TOKEN", "") or "").strip(),
+ (os.getenv("JIRA_CLOUD_ID", "") or "").strip(),
+ (os.getenv("JIRA_AUTH_TYPE", "") or "").strip(),
+ )
+ )
+
+
+def extract_jira_ticket_key_from_pr_title(pr_title: str) -> Optional[str]:
+ if not pr_title:
+ return None
+ match = _TITLE_PREFIX_RE.match(pr_title.strip())
+ if not match:
+ return None
+ return match.group("key")
+
+
+def extract_jira_ticket_key_from_branch(branch: str) -> Optional[str]:
+ if not branch:
+ return None
+ branch = branch.strip()
+ if branch.startswith("refs/heads/"):
+ branch = branch[len("refs/heads/") :]
+ match = _BRANCH_PREFIX_RE.match(branch)
+ if not match:
+ return None
+ return match.group("key")
+
+
+def extract_jira_ticket_key(pr_title: str, branch: str) -> Optional[str]:
+ # precedence: PR title first, then branch
+ return extract_jira_ticket_key_from_pr_title(pr_title) or extract_jira_ticket_key_from_branch(branch)
+
+
+def _build_ticket_mismatch_note(title_key: str, branch_key: str) -> str:
+ return (
+ f"PR title references Jira ticket `{title_key}` but branch name references `{branch_key}`. "
+ "Please verify that the correct Jira ticket is used in the PR metadata."
+ )
+
+
+def _append_note(note: str, extra_note: str) -> str:
+ note = (note or "").strip()
+ extra_note = (extra_note or "").strip()
+ if not note:
+ return extra_note
+ if not extra_note:
+ return note
+ return f"{note}\n\n{extra_note}"
+
+
+def _clip_text(text: str, limit: int = MAX_TICKET_CHARACTERS) -> str:
+ text = text or ""
+ if len(text) <= limit:
+ return text
+ return text[:limit] + "..."
+
+
+def _adf_to_text(adf: Any) -> str:
+ """Best-effort extraction of human-readable text from Atlassian Document Format (ADF)."""
+ parts: List[str] = []
+
+ def walk(node: Any, depth: int = 0):
+ if depth > 50:
+ return
+ if node is None:
+ return
+ if isinstance(node, str):
+ parts.append(node)
+ return
+ if isinstance(node, list):
+ for item in node:
+ walk(item, depth + 1)
+ return
+ if isinstance(node, dict):
+ node_type = node.get("type")
+ if node_type == "text" and "text" in node:
+ parts.append(str(node.get("text") or ""))
+ return
+ content = node.get("content")
+ if content is not None:
+ walk(content, depth + 1)
+ # Add lightweight paragraph separation when we see block-ish nodes
+ if node_type in {"paragraph", "heading", "blockquote", "listItem"}:
+ parts.append("\n")
+ return
+
+ walk(adf)
+ text = "".join(parts)
+ # Normalize excessive whitespace/newlines
+ text = re.sub(r"\n{3,}", "\n\n", text)
+ return text.strip()
+
+
+def _jira_issue_url(base_url: str, key: str) -> str:
+ return f"{base_url.rstrip('/')}/browse/{key}"
+
+
+def _extract_resource_urls(resources: Any) -> List[str]:
+ urls: List[str] = []
+ if not isinstance(resources, list):
+ return urls
+ for resource in resources:
+ if not isinstance(resource, dict):
+ continue
+ url = resource.get("url")
+ if isinstance(url, str) and url:
+ urls.append(_normalize_jira_base_url(url))
+ return urls
+
+
+def _build_ticket_compliance_note(key: str, reason: str) -> str:
+ reason_to_message = {
+ "missing_config": "Jira Cloud configuration is missing",
+ "not_found": "the Jira ticket was not found",
+ "auth_failed": "Jira authentication failed",
+ "fetch_error": "the Jira ticket could not be fetched",
+ }
+ message = reason_to_message.get(reason, "the Jira ticket could not be fetched")
+ return (
+ f"Jira ticket `{key}` was detected in the PR title or branch, but {message}, "
+ "so ticket compliance analysis was skipped."
+ )
+
+
+def _truncate_for_log(value: str, limit: int = 300) -> str:
+ value = (value or "").strip()
+ if len(value) <= limit:
+ return value
+ return value[:limit] + "..."
+
+
+async def _extract_error_message(resp: aiohttp.ClientResponse) -> str:
+ try:
+ body = await resp.text()
+ except Exception:
+ return ""
+
+ body = (body or "").replace("\n", " ").strip()
+ if not body:
+ return ""
+
+ try:
+ parsed = json.loads(body)
+ except Exception:
+ return _truncate_for_log(body)
+
+ error_messages = parsed.get("errorMessages") if isinstance(parsed, dict) else None
+ if isinstance(error_messages, list) and error_messages:
+ return _truncate_for_log("; ".join([str(message) for message in error_messages if message]))
+
+ errors = parsed.get("errors") if isinstance(parsed, dict) else None
+ if isinstance(errors, dict) and errors:
+ return _truncate_for_log("; ".join([f"{k}: {v}" for k, v in errors.items()]))
+
+ message = parsed.get("message") if isinstance(parsed, dict) else None
+ if message:
+ return _truncate_for_log(str(message))
+
+ return _truncate_for_log(body)
+
+
+async def _discover_cloud_id(session: aiohttp.ClientSession, site_base_url: str) -> Optional[str]:
+ discovery_url = "https://api.atlassian.com/oauth/token/accessible-resources"
+ normalized_site_base_url = _normalize_jira_base_url(site_base_url)
+ _get_logger().info(f"Discovering Jira cloud id for {normalized_site_base_url}")
+
+ async with session.get(discovery_url) as resp:
+ if resp.status in (401, 403):
+ error_message = await _extract_error_message(resp)
+ if error_message:
+ raise PermissionError(f"Jira cloud id discovery failed (HTTP {resp.status}): {error_message}")
+ raise PermissionError(f"Jira cloud id discovery failed (HTTP {resp.status})")
+ resp.raise_for_status()
+ resources = await resp.json()
+
+ for resource in resources if isinstance(resources, list) else []:
+ if not isinstance(resource, dict):
+ continue
+ resource_url = _normalize_jira_base_url(str(resource.get("url") or ""))
+ if resource_url == normalized_site_base_url:
+ cloud_id = str(resource.get("id") or "").strip()
+ if cloud_id:
+ _get_logger().info(f"Discovered Jira cloud id {cloud_id} for {normalized_site_base_url}")
+ return cloud_id
+
+ resource_urls = _extract_resource_urls(resources)
+ _get_logger().warning(
+ f"Could not match Jira site {normalized_site_base_url} in accessible resources: {resource_urls}"
+ )
+ return None
+
+
+async def _resolve_auth(session: aiohttp.ClientSession, auth: JiraCloudAuth) -> Optional[JiraCloudAuth]:
+ if auth.auth_mode == "scoped":
+ # api_base_url and cloud_id are validated by _load_auth_from_env for scoped tokens.
+ return auth
+
+ if auth.auth_mode != "bearer":
+ return auth
+
+ if auth.cloud_id and auth.api_base_url:
+ _get_logger().info(f"Using configured Jira cloud id {auth.cloud_id} for {auth.site_base_url}")
+ return auth
+
+ cloud_id = await _discover_cloud_id(session, auth.site_base_url)
+ if not cloud_id:
+ return None
+
+ return JiraCloudAuth(
+ site_base_url=auth.site_base_url,
+ api_base_url=_build_jira_api_base_url(cloud_id),
+ api_token=auth.api_token,
+ auth_mode=auth.auth_mode,
+ email=auth.email,
+ cloud_id=cloud_id,
+ )
+
+
+async def _fetch_issue_json(session: aiohttp.ClientSession, auth: JiraCloudAuth, key: str) -> Optional[Dict[str, Any]]:
+ url = f"{auth.api_base_url.rstrip('/')}/rest/api/3/issue/{key}"
+ params = {
+ "fields": "summary,description,labels,subtasks",
+ "expand": "renderedFields",
+ }
+
+ _get_logger().info(f"Requesting Jira ticket {key} using {auth.auth_mode} auth from {auth.api_base_url}")
+
+ async with session.get(url, params=params) as resp:
+ if resp.status == 404:
+ error_message = await _extract_error_message(resp)
+ if error_message:
+ _get_logger().warning(
+ f"Jira issue lookup for {key} at {auth.api_base_url} returned 404: {error_message}"
+ )
+ else:
+ _get_logger().warning(f"Jira issue lookup for {key} at {auth.api_base_url} returned 404")
+ return None
+ if resp.status in (401, 403):
+ # Include Jira's capped error message in logs for auth diagnostics.
+ error_message = await _extract_error_message(resp)
+ if error_message:
+ raise PermissionError(f"Jira auth failed (HTTP {resp.status}): {error_message}")
+ raise PermissionError(f"Jira auth failed (HTTP {resp.status})")
+ if resp.status >= 400:
+ error_message = await _extract_error_message(resp)
+ if error_message:
+ _get_logger().warning(
+ f"Jira issue lookup for {key} at {auth.api_base_url} returned HTTP {resp.status}: {error_message}"
+ )
+ else:
+ _get_logger().warning(f"Jira issue lookup for {key} at {auth.api_base_url} returned HTTP {resp.status}")
+ resp.raise_for_status()
+ return await resp.json()
+
+
+def _render_description_to_text(issue: Dict[str, Any]) -> str:
+ rendered_fields = issue.get("renderedFields") or {}
+ rendered_html = rendered_fields.get("description")
+ if isinstance(rendered_html, str) and rendered_html.strip():
+ try:
+ # html2text produces markdown-ish plain text, which is a good prompt format.
+ return html2text.html2text(rendered_html).strip()
+ except Exception:
+ # Fall back to raw HTML if conversion fails.
+ return rendered_html
+
+ fields = issue.get("fields") or {}
+ desc = fields.get("description")
+ if isinstance(desc, str):
+ return desc
+ if isinstance(desc, (dict, list)):
+ text = _adf_to_text(desc)
+ if text:
+ return text
+ return json.dumps(desc, ensure_ascii=True)
+ return ""
+
+
+def _extract_labels(issue: Dict[str, Any]) -> str:
+ fields = issue.get("fields") or {}
+ labels = fields.get("labels") or []
+ if not isinstance(labels, list):
+ return ""
+ return ", ".join(str(label) for label in labels if label is not None)
+
+
+def _extract_subtasks_as_sub_issues(issue: Dict[str, Any], base_url: str, limit: int = 5) -> List[Dict[str, str]]:
+ fields = issue.get("fields") or {}
+ subtasks = fields.get("subtasks") or []
+ if not isinstance(subtasks, list) or not subtasks:
+ return []
+
+ sub_issues: List[Dict[str, str]] = []
+ for subtask in subtasks[:limit]:
+ if not isinstance(subtask, dict):
+ continue
+ key = subtask.get("key")
+ if not isinstance(key, str) or not _JIRA_KEY_RE.match(key):
+ continue
+ summary = ""
+ sub_fields = subtask.get("fields")
+ if isinstance(sub_fields, dict):
+ summary_val = sub_fields.get("summary")
+ if isinstance(summary_val, str):
+ summary = summary_val
+ sub_issues.append(
+ {
+ "ticket_url": _jira_issue_url(base_url, key),
+ "title": summary or key,
+ "body": "",
+ }
+ )
+
+ return sub_issues
+
+
+async def fetch_jira_ticket_context(pr_title: str, branch: str) -> JiraTicketFetchResult:
+ title_key = extract_jira_ticket_key_from_pr_title(pr_title)
+ branch_key = extract_jira_ticket_key_from_branch(branch)
+ key = title_key or branch_key
+ mismatch_note = ""
+ if title_key and branch_key and title_key != branch_key:
+ mismatch_note = _build_ticket_mismatch_note(title_key, branch_key)
+ if not key:
+ _get_logger().info("No Jira ticket key found in PR title or branch")
+ return JiraTicketFetchResult(status="no_key")
+
+ _get_logger().info(f"Detected Jira ticket key {key} from PR metadata")
+
+ auth = _load_auth_from_env()
+ if not auth:
+ if not _has_any_jira_env():
+ _get_logger().info(f"Detected Jira ticket key {key}, but Jira integration is not configured")
+ return JiraTicketFetchResult(status="not_configured", key=key, note=mismatch_note)
+ _get_logger().warning(f"Detected Jira ticket key {key}, but Jira Cloud env configuration is missing")
+ return JiraTicketFetchResult(
+ status="missing_config",
+ key=key,
+ note=_append_note(_build_ticket_compliance_note(key, "missing_config"), mismatch_note),
+ )
+
+ timeout = aiohttp.ClientTimeout(total=DEFAULT_TIMEOUT_SECONDS)
+ session_kwargs: Dict[str, Any] = {"timeout": timeout, "headers": {"Accept": "application/json"}}
+ if auth.auth_mode in {"basic", "scoped"}:
+ session_kwargs["auth"] = aiohttp.BasicAuth(login=auth.email, password=auth.api_token)
+ else:
+ session_kwargs["headers"].update(
+ {
+ "Authorization": f"Bearer {auth.api_token}",
+ }
+ )
+ _get_logger().info(f"Using Jira auth mode {auth.auth_mode} for {auth.site_base_url}")
+
+ try:
+ async with aiohttp.ClientSession(**session_kwargs) as session:
+ resolved_auth = await _resolve_auth(session, auth)
+ if not resolved_auth:
+ return JiraTicketFetchResult(
+ status="fetch_error",
+ key=key,
+ note=_append_note(_build_ticket_compliance_note(key, "fetch_error"), mismatch_note),
+ )
+
+ issue = await _fetch_issue_json(session, resolved_auth, key)
+ if not issue:
+ _get_logger().warning(f"Jira ticket {key} was not found")
+ return JiraTicketFetchResult(
+ status="not_found",
+ key=key,
+ note=_append_note(_build_ticket_compliance_note(key, "not_found"), mismatch_note),
+ )
+
+ fields = issue.get("fields") or {}
+ summary = fields.get("summary") if isinstance(fields.get("summary"), str) else ""
+ body = _clip_text(_render_description_to_text(issue))
+ labels = _extract_labels(issue)
+ sub_issues = _extract_subtasks_as_sub_issues(issue, resolved_auth.site_base_url)
+
+ ticket = {
+ "ticket_id": key,
+ "ticket_url": _jira_issue_url(resolved_auth.site_base_url, key),
+ "title": summary or key,
+ "body": body,
+ "labels": labels,
+ }
+ if sub_issues:
+ ticket["sub_issues"] = sub_issues
+ _get_logger().info(f"Fetched Jira ticket {key} for PR ticket context")
+ return JiraTicketFetchResult(status="fetched", key=key, ticket=ticket, note=mismatch_note)
+ except PermissionError as e:
+ # Do not include raw exception details that could accidentally expose auth state.
+ _get_logger().warning(f"Jira ticket fetch skipped: {e}")
+ return JiraTicketFetchResult(
+ status="auth_failed",
+ key=key,
+ note=_append_note(_build_ticket_compliance_note(key, "auth_failed"), mismatch_note),
+ )
+ except aiohttp.ClientResponseError as e:
+ _get_logger().warning(f"Jira ticket fetch failed (HTTP {e.status}) for {key}")
+ return JiraTicketFetchResult(
+ status="fetch_error",
+ key=key,
+ note=_append_note(_build_ticket_compliance_note(key, "fetch_error"), mismatch_note),
+ )
+ except Exception as e:
+ _get_logger().warning(f"Jira ticket fetch failed for {key}: {type(e).__name__}")
+ return JiraTicketFetchResult(
+ status="fetch_error",
+ key=key,
+ note=_append_note(_build_ticket_compliance_note(key, "fetch_error"), mismatch_note),
+ )
+
+
+async def try_fetch_jira_ticket(pr_title: str, branch: str) -> Optional[Dict[str, Any]]:
+ result = await fetch_jira_ticket_context(pr_title, branch)
+ return result.ticket
diff --git a/pr_agent/tools/pr_config.py b/pr_agent/tools/pr_config.py
index 24ecaab97b..e16f2aef3d 100644
--- a/pr_agent/tools/pr_config.py
+++ b/pr_agent/tools/pr_config.py
@@ -57,7 +57,8 @@ def _prepare_pr_configs(self) -> str:
skip_keys = ['ai_disclaimer', 'ai_disclaimer_title', 'ANALYTICS_FOLDER', 'secret_provider', "skip_keys", "app_id", "redirect",
'trial_prefix_message', 'no_eligible_message', 'identity_provider', 'ALLOWED_REPOS',
'APP_NAME', 'PERSONAL_ACCESS_TOKEN', 'shared_secret', 'key', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'user_token',
- 'private_key', 'private_key_id', 'client_id', 'client_secret', 'token', 'bearer_token', 'jira_api_token','webhook_secret']
+ 'private_key', 'private_key_id', 'client_id', 'client_secret', 'token', 'bearer_token', 'jira_api_email',
+ 'jira_api_token', 'jira_base_url', 'jira_cloud_id', 'webhook_secret']
partial_skip_keys = ['key', 'secret', 'token', 'private']
extra_skip_keys = get_settings().config.get('config.skip_keys', [])
if extra_skip_keys:
diff --git a/pr_agent/tools/pr_description.py b/pr_agent/tools/pr_description.py
index c629da98fa..2db2c6ba08 100644
--- a/pr_agent/tools/pr_description.py
+++ b/pr_agent/tools/pr_description.py
@@ -25,9 +25,7 @@
from pr_agent.git_providers.git_provider import get_main_pr_language
from pr_agent.log import get_logger
from pr_agent.servers.help import HelpMessage
-from pr_agent.tools.ticket_pr_compliance_check import (
- extract_and_cache_pr_tickets, extract_ticket_links_from_pr_description,
- extract_tickets)
+from pr_agent.tools.ticket_pr_compliance_check import extract_and_cache_pr_tickets
class PRDescription:
@@ -71,7 +69,8 @@ def __init__(self, pr_url: str, args: list = None,
"enable_custom_labels": get_settings().config.enable_custom_labels,
"custom_labels_class": "", # will be filled if necessary in 'set_custom_labels' function
"enable_semantic_files_types": get_settings().pr_description.enable_semantic_files_types,
- "related_tickets": "",
+ "related_tickets": [],
+ "ticket_compliance_note": "",
"include_file_summary_changes": len(self.git_provider.get_diff_files()) <= self.COLLAPSIBLE_FILE_LIST_THRESHOLD,
"duplicate_prompt_examples": get_settings().config.get("duplicate_prompt_examples", False),
"enable_pr_diagram": enable_pr_diagram,
@@ -129,6 +128,7 @@ async def run(self):
if not self.git_provider.is_supported(
"publish_file_comments") or not get_settings().pr_description.inline_file_summary:
pr_body += "\n\n" + changes_walkthrough + "___\n\n"
+
get_logger().debug("PR output", artifact={"title": pr_title, "body": pr_body})
# Add help text if gfm_markdown is supported
diff --git a/pr_agent/tools/pr_reviewer.py b/pr_agent/tools/pr_reviewer.py
index c4917f3597..3d97ba2f3c 100644
--- a/pr_agent/tools/pr_reviewer.py
+++ b/pr_agent/tools/pr_reviewer.py
@@ -15,7 +15,8 @@
from pr_agent.algo.token_handler import TokenHandler
from pr_agent.algo.utils import (ModelType, PRReviewHeader,
convert_to_markdown_v2, github_action_output,
- load_yaml, show_relevant_configurations)
+ load_yaml, normalize_ticket_requirement_text,
+ show_relevant_configurations)
from pr_agent.config_loader import get_settings
from pr_agent.git_providers import (get_git_provider,
get_git_provider_with_context)
@@ -23,8 +24,42 @@
get_main_pr_language)
from pr_agent.log import get_logger
from pr_agent.servers.help import HelpMessage
-from pr_agent.tools.ticket_pr_compliance_check import (
- extract_and_cache_pr_tickets, extract_tickets)
+from pr_agent.tools.ticket_pr_compliance_check import extract_and_cache_pr_tickets
+
+
+def append_ticket_compliance_note(ticket_note: str, extra_note: str) -> str:
+ ticket_note = (ticket_note or "").strip()
+ extra_note = (extra_note or "").strip()
+ if not ticket_note:
+ return extra_note
+ if not extra_note:
+ return ticket_note
+ return f"{ticket_note}\n\n{extra_note}"
+
+
+def build_suspected_ticket_mismatch_note(ticket_compliance_check) -> str:
+ if not isinstance(ticket_compliance_check, list) or not ticket_compliance_check:
+ return ""
+
+ has_not_compliant_requirements = False
+ for ticket_analysis in ticket_compliance_check:
+ if not isinstance(ticket_analysis, dict):
+ continue
+
+ fully_compliant_str = normalize_ticket_requirement_text(ticket_analysis.get("fully_compliant_requirements"))
+ not_compliant_str = normalize_ticket_requirement_text(ticket_analysis.get("not_compliant_requirements"))
+ if fully_compliant_str:
+ return ""
+ if not_compliant_str:
+ has_not_compliant_requirements = True
+
+ if not has_not_compliant_requirements:
+ return ""
+
+ return (
+ "Ticket compliance analysis found no Jira requirements that are clearly implemented by this PR. "
+ "The PR may reference the wrong Jira ticket, or the ticket scope may not match the code changes."
+ )
class PRReviewer:
@@ -97,6 +132,7 @@ def __init__(self, pr_url: str, is_answer: bool = False, is_auto: bool = False,
"enable_custom_labels": get_settings().config.enable_custom_labels,
"is_ai_metadata": get_settings().get("config.enable_ai_metadata", False),
"related_tickets": get_settings().get('related_tickets', []),
+ "ticket_compliance_note": "",
'duplicate_prompt_examples': get_settings().config.get('duplicate_prompt_examples', False),
"date": datetime.datetime.now().strftime('%Y-%m-%d'),
}
@@ -248,6 +284,11 @@ def _prepare_pr_review(self) -> str:
key_issues_to_review = data['review'].pop('key_issues_to_review')
data['review']['key_issues_to_review'] = key_issues_to_review
+ ticket_note = append_ticket_compliance_note(
+ self.vars.get("ticket_compliance_note", ""),
+ build_suspected_ticket_mismatch_note(data['review'].get('ticket_compliance_check')),
+ )
+
incremental_review_markdown_text = None
# Add incremental review section
if self.incremental.is_incremental:
@@ -256,9 +297,10 @@ def _prepare_pr_review(self) -> str:
incremental_review_markdown_text = f"Starting from commit {last_commit_url}"
markdown_text = convert_to_markdown_v2(data, self.git_provider.is_supported("gfm_markdown"),
- incremental_review_markdown_text,
- git_provider=self.git_provider,
- files=self.git_provider.get_diff_files())
+ incremental_review_markdown_text,
+ git_provider=self.git_provider,
+ files=self.git_provider.get_diff_files(),
+ ticket_note=ticket_note)
# Add help text if gfm_markdown is supported
if self.git_provider.is_supported("gfm_markdown") and get_settings().pr_reviewer.enable_help_text:
diff --git a/pr_agent/tools/ticket_pr_compliance_check.py b/pr_agent/tools/ticket_pr_compliance_check.py
index 523e21f921..adbc9e4cca 100644
--- a/pr_agent/tools/ticket_pr_compliance_check.py
+++ b/pr_agent/tools/ticket_pr_compliance_check.py
@@ -5,6 +5,7 @@
from pr_agent.git_providers import GithubProvider
from pr_agent.git_providers import AzureDevopsProvider
from pr_agent.log import get_logger
+from pr_agent.tickets.jira_cloud import fetch_jira_ticket_context
# Compile the regex pattern once, outside the function
GITHUB_TICKET_PATTERN = re.compile(
@@ -64,13 +65,32 @@ def extract_ticket_links_from_pr_description(pr_description, repo_path, base_url
return list(github_tickets)
-async def extract_tickets(git_provider):
+async def extract_tickets(git_provider) -> tuple[list | None, str]:
MAX_TICKET_CHARACTERS = 10000
+ jira_ticket_compliance_note = ""
try:
+ tickets_content = []
+
+ # Jira Cloud ticket extraction (prefix-only) from PR title, then branch.
+ # Supports basic, scoped-token, and explicit bearer/OAuth env configurations.
+ try:
+ pr_title = ""
+ if hasattr(git_provider, 'get_title'):
+ pr_title = git_provider.get_title() or ""
+ elif hasattr(git_provider, 'pr') and hasattr(git_provider.pr, 'title'):
+ pr_title = git_provider.pr.title or ""
+ pr_branch = git_provider.get_pr_branch() if hasattr(git_provider, 'get_pr_branch') else ""
+ jira_result = await fetch_jira_ticket_context(pr_title, pr_branch)
+ if jira_result.note:
+ jira_ticket_compliance_note = jira_result.note
+ if jira_result.ticket:
+ tickets_content.append(jira_result.ticket)
+ except Exception as e:
+ get_logger().warning(f"Failed to extract Jira ticket: {type(e).__name__}")
+
if isinstance(git_provider, GithubProvider):
user_description = git_provider.get_user_description()
tickets = extract_ticket_links_from_pr_description(user_description, git_provider.repo, git_provider.base_url_html)
- tickets_content = []
if tickets:
@@ -130,11 +150,10 @@ async def extract_tickets(git_provider):
'sub_issues': sub_issues_content # Store sub-issues content
})
- return tickets_content
+ return tickets_content or None, jira_ticket_compliance_note
elif isinstance(git_provider, AzureDevopsProvider):
tickets_info = git_provider.get_linked_work_items()
- tickets_content = []
for ticket in tickets_info:
try:
ticket_body_str = ticket.get("body", "")
@@ -156,21 +175,32 @@ async def extract_tickets(git_provider):
f"Error processing Azure DevOps ticket: {e}",
artifact={"traceback": traceback.format_exc()},
)
- return tickets_content
+ return tickets_content, jira_ticket_compliance_note
+
+ if tickets_content:
+ return tickets_content, jira_ticket_compliance_note
except Exception as e:
get_logger().error(f"Error extracting tickets error= {e}",
artifact={"traceback": traceback.format_exc()})
+ return None, jira_ticket_compliance_note
+
async def extract_and_cache_pr_tickets(git_provider, vars):
if not get_settings().get('pr_reviewer.require_ticket_analysis_review', False):
return
+ vars['ticket_compliance_note'] = ""
+
related_tickets = get_settings().get('related_tickets', [])
+ cached_ticket_compliance_note = get_settings().get('ticket_compliance_note', '')
if not related_tickets:
- tickets_content = await extract_tickets(git_provider)
+ tickets_content, jira_ticket_compliance_note = await extract_tickets(git_provider)
+
+ vars['ticket_compliance_note'] = jira_ticket_compliance_note or ""
+ get_settings().set('ticket_compliance_note', vars['ticket_compliance_note'])
if tickets_content:
# Store sub-issues along with main issues
@@ -189,6 +219,7 @@ async def extract_and_cache_pr_tickets(git_provider, vars):
else:
get_logger().info("Using cached tickets", artifact={"tickets": related_tickets})
vars['related_tickets'] = related_tickets
+ vars['ticket_compliance_note'] = cached_ticket_compliance_note or ''
def check_tickets_relevancy():
diff --git a/tests/unittest/test_convert_to_markdown.py b/tests/unittest/test_convert_to_markdown.py
index 0d18e03cec..c19c5236da 100644
--- a/tests/unittest/test_convert_to_markdown.py
+++ b/tests/unittest/test_convert_to_markdown.py
@@ -1,4 +1,5 @@
# Generated by CodiumAI
+import copy
import textwrap
from unittest.mock import Mock
@@ -163,6 +164,110 @@ def test_ticket_compliance(self):
assert convert_to_markdown_v2(input_data).strip() == expected_output.strip()
+ def test_ticket_compliance_is_rendered_after_score_with_note(self):
+ input_data = {'review': {
+ 'estimated_effort_to_review_[1-5]': '3',
+ 'score': '78/100',
+ 'ticket_compliance_check': [
+ {
+ 'ticket_url': 'https://example.com/ticket/FAKE-1234',
+ 'ticket_requirements': '- Requirement 1\n',
+ 'fully_compliant_requirements': '',
+ 'not_compliant_requirements': '- Requirement 1\n',
+ 'requires_further_human_verification': '',
+ }
+ ],
+ 'relevant_tests': 'Yes',
+ }}
+
+ result = convert_to_markdown_v2(
+ input_data,
+ ticket_note='PR title references Jira ticket `FAKE-1234` but branch name references `MOCK-5678`.',
+ )
+
+ assert '**🎫 Ticket compliance analysis ❌**' in result
+ assert result.index('Score') < result.index('**🎫 Ticket compliance analysis ❌**')
+ assert result.index('PR title references Jira ticket `FAKE-1234`') < result.index('**[FAKE-1234]')
+
+ def test_ticket_compliance_ignores_placeholder_bullets(self):
+ input_data = {'review': {
+ 'ticket_compliance_check': [
+ {
+ 'ticket_url': 'https://example.com/ticket/FAKE-1234',
+ 'ticket_requirements': '',
+ 'fully_compliant_requirements': '-\n',
+ 'not_compliant_requirements': '- Requirement 1\n',
+ 'requires_further_human_verification': '-\n',
+ }
+ ]
+ }}
+
+ result = convert_to_markdown_v2(input_data)
+
+ assert '**🎫 Ticket compliance analysis ❌**' in result
+ assert 'Compliant requirements:' not in result
+ assert 'Non-compliant requirements:' in result
+
+ def test_ticket_compliance_filters_placeholder_lines_from_mixed_content(self):
+ input_data = {'review': {
+ 'ticket_compliance_check': [
+ {
+ 'ticket_url': 'https://example.com/ticket/FAKE-1234',
+ 'ticket_requirements': '',
+ 'fully_compliant_requirements': '- Requirement 1\n-\n',
+ 'not_compliant_requirements': '*\n- Requirement 2\n',
+ 'requires_further_human_verification': '',
+ }
+ ]
+ }}
+
+ result = convert_to_markdown_v2(input_data)
+
+ assert '- Requirement 1' in result
+ assert '- Requirement 2' in result
+ assert '\n-\n' not in result
+ assert '\n*\n' not in result
+
+ def test_ticket_compliance_ignores_none_placeholders(self):
+ input_data = {'review': {
+ 'ticket_compliance_check': [
+ {
+ 'ticket_url': 'https://example.com/ticket/FAKE-1234',
+ 'ticket_requirements': '',
+ 'fully_compliant_requirements': '(none)\n',
+ 'not_compliant_requirements': '- Requirement 1\n',
+ 'requires_further_human_verification': '(none)\n',
+ }
+ ]
+ }}
+
+ result = convert_to_markdown_v2(input_data)
+
+ assert '**🎫 Ticket compliance analysis ❌**' in result
+ assert 'Compliant requirements:' not in result
+ assert '(none)' not in result
+
+ def test_convert_to_markdown_does_not_mutate_review_input(self):
+ input_data = {'review': {
+ 'score': '78/100',
+ 'todo_summary': 'Follow up later',
+ 'ticket_compliance_check': [
+ {
+ 'ticket_url': 'https://example.com/ticket/FAKE-1234',
+ 'ticket_requirements': '- Requirement 1\n',
+ 'fully_compliant_requirements': '',
+ 'not_compliant_requirements': '- Requirement 1\n',
+ 'requires_further_human_verification': '',
+ }
+ ],
+ }}
+
+ original_input_data = copy.deepcopy(input_data)
+
+ convert_to_markdown_v2(input_data)
+
+ assert input_data == original_input_data
+
def test_can_be_split(self):
input_data = {'review': {
'can_be_split': [
diff --git a/tests/unittest/test_jira_cloud_ticket_context.py b/tests/unittest/test_jira_cloud_ticket_context.py
new file mode 100644
index 0000000000..23f69d2d17
--- /dev/null
+++ b/tests/unittest/test_jira_cloud_ticket_context.py
@@ -0,0 +1,331 @@
+import asyncio
+
+from pr_agent.tickets import jira_cloud
+
+
+class TestJiraCloudTicketKeyExtraction:
+ def test_extract_from_pr_title_prefix(self):
+ assert jira_cloud.extract_jira_ticket_key("FAKE-1234 new feature", "") == "FAKE-1234"
+
+ def test_reject_zero_issue_number(self):
+ assert jira_cloud.extract_jira_ticket_key("FAKE-0 invalid", "FAKE-0-invalid") is None
+
+ def test_extract_from_branch_prefix(self):
+ assert jira_cloud.extract_jira_ticket_key("", "FAKE-1234-new-feature") == "FAKE-1234"
+
+ def test_precedence_title_over_branch(self):
+ assert jira_cloud.extract_jira_ticket_key("FAKE-1111 something", "MOCK-2222-new") == "FAKE-1111"
+
+ def test_fetch_jira_ticket_context_returns_mismatch_note_when_title_and_branch_differ(self, monkeypatch):
+ monkeypatch.delenv("JIRA_BASE_URL", raising=False)
+ monkeypatch.delenv("JIRA_API_EMAIL", raising=False)
+ monkeypatch.delenv("JIRA_API_TOKEN", raising=False)
+ monkeypatch.delenv("JIRA_AUTH_TYPE", raising=False)
+ monkeypatch.delenv("JIRA_CLOUD_ID", raising=False)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1111 something", "MOCK-2222-new"))
+
+ assert result.status == "not_configured"
+ assert result.key == "FAKE-1111"
+ assert "FAKE-1111" in result.note
+ assert "MOCK-2222" in result.note
+ assert "Please verify" in result.note
+
+ def test_branch_refs_heads_prefix(self):
+ assert jira_cloud.extract_jira_ticket_key("", "refs/heads/FAKE-1234-new") == "FAKE-1234"
+
+ def test_normalize_atlassian_jira_path(self):
+ assert (
+ jira_cloud._normalize_jira_base_url("https://example.atlassian.net/jira") == "https://example.atlassian.net"
+ )
+
+
+class TestJiraCloudFetch:
+ def test_extract_error_message_from_jira_error_payload(self):
+ class FakeResponse:
+ async def text(self):
+ return '{"errorMessages": ["Issue does not exist or you do not have permission to see it."]}'
+
+ message = asyncio.run(jira_cloud._extract_error_message(FakeResponse()))
+ assert message == "Issue does not exist or you do not have permission to see it."
+
+ def test_render_description_to_text_from_adf(self):
+ issue = {
+ "renderedFields": {},
+ "fields": {
+ "description": {
+ "type": "doc",
+ "content": [
+ {
+ "type": "paragraph",
+ "content": [
+ {"type": "text", "text": "Hello"},
+ {"type": "text", "text": " Jira"},
+ ],
+ }
+ ],
+ }
+ },
+ }
+
+ assert jira_cloud._render_description_to_text(issue) == "Hello Jira"
+
+ def test_try_fetch_jira_ticket_maps_fields(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net/jira")
+ monkeypatch.setenv("JIRA_API_EMAIL", "user@example.com")
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+
+ async def fake_fetch_issue_json(session, auth, key):
+ assert auth.site_base_url == "https://example.atlassian.net"
+ assert auth.api_base_url == "https://example.atlassian.net"
+ assert auth.auth_mode == "basic"
+ assert key == "FAKE-1234"
+ return {
+ "fields": {
+ "summary": "Implement Jira support",
+ "labels": ["backend", "review"],
+ "subtasks": [
+ {"key": "SUB-2", "fields": {"summary": "Subtask 1"}},
+ {"key": "SUB-3", "fields": {"summary": "Subtask 2"}},
+ ],
+ },
+ "renderedFields": {"description": "Hello world
"},
+ }
+
+ monkeypatch.setattr(jira_cloud, "_fetch_issue_json", fake_fetch_issue_json)
+
+ ticket = asyncio.run(jira_cloud.try_fetch_jira_ticket("FAKE-1234 new feature", "MOCK-9999-ignored"))
+ assert ticket is not None
+ assert ticket["ticket_id"] == "FAKE-1234"
+ assert ticket["ticket_url"] == "https://example.atlassian.net/browse/FAKE-1234"
+ assert ticket["title"] == "Implement Jira support"
+ assert "Hello" in ticket["body"]
+ assert ticket["labels"] == "backend, review"
+
+ assert "sub_issues" in ticket
+ assert ticket["sub_issues"][0]["ticket_url"] == "https://example.atlassian.net/browse/SUB-2"
+ assert ticket["sub_issues"][0]["title"] == "Subtask 1"
+ assert ticket["sub_issues"][0]["body"] == ""
+
+ def test_fetch_jira_ticket_context_uses_scoped_token_with_cloud_id(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net")
+ monkeypatch.setenv("JIRA_API_EMAIL", "user@example.com")
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+ monkeypatch.setenv("JIRA_AUTH_TYPE", "scoped")
+ monkeypatch.setenv("JIRA_CLOUD_ID", "2672de8d-bce7-4cb5-9e65-128f96b0bd9a")
+
+ async def fake_fetch_issue_json(session, auth, key):
+ assert auth.auth_mode == "scoped"
+ assert auth.site_base_url == "https://example.atlassian.net"
+ assert auth.api_base_url == "https://api.atlassian.com/ex/jira/2672de8d-bce7-4cb5-9e65-128f96b0bd9a"
+ assert key == "FAKE-1234"
+ return {
+ "fields": {
+ "summary": "Implement Jira support",
+ "labels": [],
+ "subtasks": [],
+ },
+ "renderedFields": {"description": "Hello world
"},
+ }
+
+ monkeypatch.setattr(jira_cloud, "_fetch_issue_json", fake_fetch_issue_json)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "MOCK-9999-ignored"))
+ assert result.status == "fetched"
+ assert result.ticket is not None
+ assert result.ticket["ticket_url"] == "https://example.atlassian.net/browse/FAKE-1234"
+
+ def test_fetch_jira_ticket_context_returns_mismatch_note_when_ticket_is_fetched(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net")
+ monkeypatch.setenv("JIRA_API_EMAIL", "user@example.com")
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+
+ async def fake_fetch_issue_json(session, auth, key):
+ return {
+ "fields": {
+ "summary": "Implement Jira support",
+ "labels": [],
+ "subtasks": [],
+ },
+ "renderedFields": {"description": "Hello world
"},
+ }
+
+ monkeypatch.setattr(jira_cloud, "_fetch_issue_json", fake_fetch_issue_json)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "MOCK-9999-ignored"))
+
+ assert result.status == "fetched"
+ assert result.ticket is not None
+ assert "FAKE-1234" in result.note
+ assert "MOCK-9999" in result.note
+
+ def test_fetch_jira_ticket_context_uses_scoped_token_with_implicit_detection(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net")
+ monkeypatch.setenv("JIRA_API_EMAIL", "user@example.com")
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+ monkeypatch.delenv("JIRA_AUTH_TYPE", raising=False)
+ monkeypatch.setenv("JIRA_CLOUD_ID", "2672de8d-bce7-4cb5-9e65-128f96b0bd9a")
+
+ async def fake_fetch_issue_json(session, auth, key):
+ assert auth.auth_mode == "scoped"
+ assert auth.api_base_url == "https://api.atlassian.com/ex/jira/2672de8d-bce7-4cb5-9e65-128f96b0bd9a"
+ return {
+ "fields": {
+ "summary": "Implement Jira support",
+ "labels": [],
+ "subtasks": [],
+ },
+ "renderedFields": {"description": "Hello world
"},
+ }
+
+ monkeypatch.setattr(jira_cloud, "_fetch_issue_json", fake_fetch_issue_json)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "MOCK-9999-ignored"))
+ assert result.status == "fetched"
+ assert result.ticket is not None
+
+ def test_fetch_jira_ticket_context_discovers_cloud_id_for_bearer_token(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net")
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+ monkeypatch.setenv("JIRA_AUTH_TYPE", "oauth")
+ monkeypatch.delenv("JIRA_CLOUD_ID", raising=False)
+ monkeypatch.delenv("JIRA_API_EMAIL", raising=False)
+
+ async def fake_discover_cloud_id(session, site_base_url):
+ assert site_base_url == "https://example.atlassian.net"
+ return "2672de8d-bce7-4cb5-9e65-128f96b0bd9a"
+
+ async def fake_fetch_issue_json(session, auth, key):
+ assert auth.auth_mode == "bearer"
+ assert auth.api_base_url == "https://api.atlassian.com/ex/jira/2672de8d-bce7-4cb5-9e65-128f96b0bd9a"
+ return {
+ "fields": {
+ "summary": "Implement Jira support",
+ "labels": [],
+ "subtasks": [],
+ },
+ "renderedFields": {"description": "Hello world
"},
+ }
+
+ monkeypatch.setattr(jira_cloud, "_discover_cloud_id", fake_discover_cloud_id)
+ monkeypatch.setattr(jira_cloud, "_fetch_issue_json", fake_fetch_issue_json)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "MOCK-9999-ignored"))
+ assert result.status == "fetched"
+ assert result.ticket is not None
+
+ def test_explicit_bearer_auth_takes_precedence_over_implicit_scoped_detection(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net")
+ monkeypatch.setenv("JIRA_API_EMAIL", "user@example.com")
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+ monkeypatch.setenv("JIRA_AUTH_TYPE", "bearer")
+ monkeypatch.setenv("JIRA_CLOUD_ID", "2672de8d-bce7-4cb5-9e65-128f96b0bd9a")
+
+ async def fake_fetch_issue_json(session, auth, key):
+ assert auth.auth_mode == "bearer"
+ assert auth.api_base_url == "https://api.atlassian.com/ex/jira/2672de8d-bce7-4cb5-9e65-128f96b0bd9a"
+ return {
+ "fields": {
+ "summary": "Implement Jira support",
+ "labels": [],
+ "subtasks": [],
+ },
+ "renderedFields": {"description": "Hello world
"},
+ }
+
+ monkeypatch.setattr(jira_cloud, "_fetch_issue_json", fake_fetch_issue_json)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "MOCK-9999-ignored"))
+ assert result.status == "fetched"
+ assert result.ticket is not None
+
+ def test_fetch_jira_ticket_context_returns_note_when_scoped_token_missing_cloud_id(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net")
+ monkeypatch.setenv("JIRA_API_EMAIL", "user@example.com")
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+ monkeypatch.setenv("JIRA_AUTH_TYPE", "scoped")
+ monkeypatch.delenv("JIRA_CLOUD_ID", raising=False)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "FAKE-1234-new"))
+ assert result.status == "missing_config"
+ assert result.key == "FAKE-1234"
+ assert result.ticket is None
+ assert "configuration is missing" in result.note
+
+ def test_fetch_jira_ticket_context_returns_note_when_scoped_token_missing_email(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net")
+ monkeypatch.delenv("JIRA_API_EMAIL", raising=False)
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+ monkeypatch.setenv("JIRA_AUTH_TYPE", "scoped")
+ monkeypatch.setenv("JIRA_CLOUD_ID", "2672de8d-bce7-4cb5-9e65-128f96b0bd9a")
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "FAKE-1234-new"))
+ assert result.status == "missing_config"
+ assert result.key == "FAKE-1234"
+ assert result.ticket is None
+ assert "configuration is missing" in result.note
+
+ def test_fetch_jira_ticket_context_returns_note_when_auth_type_is_unknown(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net")
+ monkeypatch.setenv("JIRA_API_EMAIL", "user@example.com")
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+ monkeypatch.setenv("JIRA_AUTH_TYPE", "pat")
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "FAKE-1234-new"))
+ assert result.status == "missing_config"
+ assert result.key == "FAKE-1234"
+ assert result.ticket is None
+ assert "configuration is missing" in result.note
+
+ def test_try_fetch_jira_ticket_returns_none_without_env(self, monkeypatch):
+ monkeypatch.delenv("JIRA_BASE_URL", raising=False)
+ monkeypatch.delenv("JIRA_API_EMAIL", raising=False)
+ monkeypatch.delenv("JIRA_API_TOKEN", raising=False)
+ monkeypatch.delenv("JIRA_AUTH_TYPE", raising=False)
+ monkeypatch.delenv("JIRA_CLOUD_ID", raising=False)
+
+ ticket = asyncio.run(jira_cloud.try_fetch_jira_ticket("FAKE-1234 new feature", "FAKE-1234-new"))
+ assert ticket is None
+
+ def test_fetch_jira_ticket_context_returns_note_when_ticket_missing(self, monkeypatch):
+ monkeypatch.setenv("JIRA_BASE_URL", "https://example.atlassian.net")
+ monkeypatch.setenv("JIRA_API_EMAIL", "user@example.com")
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+
+ async def fake_fetch_issue_json(session, auth, key):
+ return None
+
+ monkeypatch.setattr(jira_cloud, "_fetch_issue_json", fake_fetch_issue_json)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "MOCK-9999-ignored"))
+ assert result.status == "not_found"
+ assert result.key == "FAKE-1234"
+ assert result.ticket is None
+ assert "FAKE-1234" in result.note
+ assert "ticket compliance analysis was skipped" in result.note
+
+ def test_fetch_jira_ticket_context_returns_note_when_env_missing(self, monkeypatch):
+ monkeypatch.delenv("JIRA_BASE_URL", raising=False)
+ monkeypatch.delenv("JIRA_API_EMAIL", raising=False)
+ monkeypatch.setenv("JIRA_API_TOKEN", "token")
+ monkeypatch.delenv("JIRA_AUTH_TYPE", raising=False)
+ monkeypatch.delenv("JIRA_CLOUD_ID", raising=False)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "FAKE-1234-new"))
+ assert result.status == "missing_config"
+ assert result.key == "FAKE-1234"
+ assert result.ticket is None
+ assert "configuration is missing" in result.note
+
+ def test_fetch_jira_ticket_context_returns_no_note_when_jira_not_configured(self, monkeypatch):
+ monkeypatch.delenv("JIRA_BASE_URL", raising=False)
+ monkeypatch.delenv("JIRA_API_EMAIL", raising=False)
+ monkeypatch.delenv("JIRA_API_TOKEN", raising=False)
+ monkeypatch.delenv("JIRA_AUTH_TYPE", raising=False)
+ monkeypatch.delenv("JIRA_CLOUD_ID", raising=False)
+
+ result = asyncio.run(jira_cloud.fetch_jira_ticket_context("FAKE-1234 new feature", "FAKE-1234-new"))
+ assert result.status == "not_configured"
+ assert result.key == "FAKE-1234"
+ assert result.ticket is None
+ assert result.note == ""
diff --git a/tests/unittest/test_pr_reviewer_ticket_note.py b/tests/unittest/test_pr_reviewer_ticket_note.py
new file mode 100644
index 0000000000..a9ca14d2f4
--- /dev/null
+++ b/tests/unittest/test_pr_reviewer_ticket_note.py
@@ -0,0 +1,94 @@
+from pr_agent.tools.pr_reviewer import append_ticket_compliance_note
+from pr_agent.tools.pr_reviewer import build_suspected_ticket_mismatch_note
+
+
+def test_append_ticket_compliance_note():
+ note = "First note"
+ extra_note = "Second note"
+
+ result = append_ticket_compliance_note(note, extra_note)
+
+ assert result == "First note\n\nSecond note"
+
+
+def test_build_suspected_ticket_mismatch_note_when_all_requirements_are_non_compliant():
+ result = build_suspected_ticket_mismatch_note(
+ [
+ {
+ "fully_compliant_requirements": "",
+ "not_compliant_requirements": "- Requirement 1\n",
+ }
+ ]
+ )
+
+ assert "wrong Jira ticket" in result
+
+
+def test_build_suspected_ticket_mismatch_note_skips_when_any_requirement_is_compliant():
+ result = build_suspected_ticket_mismatch_note(
+ [
+ {
+ "fully_compliant_requirements": "- Requirement 1\n",
+ "not_compliant_requirements": "",
+ }
+ ]
+ )
+
+ assert result == ""
+
+
+def test_build_suspected_ticket_mismatch_note_ignores_empty_ticket_entries():
+ result = build_suspected_ticket_mismatch_note(
+ [
+ {
+ "fully_compliant_requirements": "",
+ "not_compliant_requirements": "- Requirement 1\n",
+ },
+ {
+ "fully_compliant_requirements": "",
+ "not_compliant_requirements": "",
+ },
+ ]
+ )
+
+ assert "wrong Jira ticket" in result
+
+
+def test_build_suspected_ticket_mismatch_note_ignores_placeholder_bullets():
+ result = build_suspected_ticket_mismatch_note(
+ [
+ {
+ "fully_compliant_requirements": "-\n",
+ "not_compliant_requirements": "- Requirement 1\n",
+ }
+ ]
+ )
+
+ assert "wrong Jira ticket" in result
+
+
+def test_build_suspected_ticket_mismatch_note_ignores_none_placeholders():
+ result = build_suspected_ticket_mismatch_note(
+ [
+ {
+ "fully_compliant_requirements": "(none)\n",
+ "not_compliant_requirements": "- Requirement 1\n",
+ }
+ ]
+ )
+
+ assert "wrong Jira ticket" in result
+
+
+def test_build_suspected_ticket_mismatch_note_skips_non_dict_entries():
+ result = build_suspected_ticket_mismatch_note(
+ [
+ {
+ "fully_compliant_requirements": "",
+ "not_compliant_requirements": "- Requirement 1\n",
+ },
+ "unexpected",
+ ]
+ )
+
+ assert "wrong Jira ticket" in result
diff --git a/tests/unittest/test_ticket_pr_compliance_check.py b/tests/unittest/test_ticket_pr_compliance_check.py
new file mode 100644
index 0000000000..fe364be8a4
--- /dev/null
+++ b/tests/unittest/test_ticket_pr_compliance_check.py
@@ -0,0 +1,158 @@
+import asyncio
+
+from pr_agent.config_loader import get_settings
+from pr_agent.tools import ticket_pr_compliance_check
+
+
+class FakeGitProvider:
+ def __init__(self, title: str, branch: str):
+ self.pr = type("PR", (), {"title": title})()
+ self._branch = branch
+
+ def get_pr_branch(self):
+ return self._branch
+
+
+def test_extract_tickets_prefers_title_ticket_on_mismatch(monkeypatch):
+ async def fake_fetch_jira_ticket_context(pr_title, branch):
+ if pr_title == "FAKE-1234 add sample feature flow":
+ return type(
+ "Result",
+ (),
+ {
+ "note": (
+ "PR title references Jira ticket `FAKE-1234` but branch name references `MOCK-5678`. "
+ "Please verify that the correct Jira ticket is used in the PR metadata."
+ ),
+ "ticket": {
+ "ticket_id": "FAKE-1234",
+ "ticket_url": "https://example.com/FAKE-1234",
+ "title": "Wrong ticket",
+ "body": "",
+ "labels": "",
+ },
+ },
+ )()
+
+ raise AssertionError(f"Unexpected fetch args: {pr_title!r}, {branch!r}")
+
+ monkeypatch.setattr(ticket_pr_compliance_check, "fetch_jira_ticket_context", fake_fetch_jira_ticket_context)
+
+ tickets, note = asyncio.run(
+ ticket_pr_compliance_check.extract_tickets(
+ FakeGitProvider(
+ "FAKE-1234 add sample feature flow",
+ "MOCK-5678-sample-feature-flow",
+ )
+ )
+ )
+
+ assert [ticket["ticket_id"] for ticket in tickets] == ["FAKE-1234"]
+ assert "FAKE-1234" in note
+ assert "MOCK-5678" in note
+ assert note.count("PR title references Jira ticket") == 1
+
+
+def test_extract_and_cache_pr_tickets_preserves_cached_ticket_note(monkeypatch):
+ async def fake_extract_tickets(_git_provider):
+ return (
+ [{"ticket_id": "FAKE-5678", "ticket_url": "https://example.com/FAKE-5678", "title": "Ticket"}],
+ "cached note",
+ )
+
+ monkeypatch.setattr(ticket_pr_compliance_check, "extract_tickets", fake_extract_tickets)
+
+ settings = get_settings()
+ previous_require_ticket_analysis = settings.get("pr_reviewer.require_ticket_analysis_review", False)
+ previous_related_tickets = settings.get("related_tickets", [])
+ previous_ticket_note = settings.get("ticket_compliance_note", "")
+
+ try:
+ settings.set("pr_reviewer.require_ticket_analysis_review", True)
+ settings.set("related_tickets", [])
+ settings.set("ticket_compliance_note", "")
+
+ first_vars = {}
+ asyncio.run(ticket_pr_compliance_check.extract_and_cache_pr_tickets(object(), first_vars))
+
+ second_vars = {}
+ asyncio.run(ticket_pr_compliance_check.extract_and_cache_pr_tickets(object(), second_vars))
+
+ assert first_vars["ticket_compliance_note"] == "cached note"
+ assert second_vars["ticket_compliance_note"] == "cached note"
+ finally:
+ settings.set("pr_reviewer.require_ticket_analysis_review", previous_require_ticket_analysis)
+ settings.set("related_tickets", previous_related_tickets)
+ settings.set("ticket_compliance_note", previous_ticket_note)
+
+
+def test_extract_tickets_preserves_title_fetch_note_on_mismatch(monkeypatch):
+ async def fake_fetch_jira_ticket_context(pr_title, branch):
+ if pr_title == "FAKE-1234 add sample feature flow":
+ return type(
+ "Result",
+ (),
+ {
+ "note": "Title ticket note",
+ "ticket": {
+ "ticket_id": "FAKE-1234",
+ "ticket_url": "https://example.com/FAKE-1234",
+ "title": "Wrong ticket",
+ "body": "",
+ "labels": "",
+ },
+ },
+ )()
+
+ raise AssertionError(f"Unexpected fetch args: {pr_title!r}, {branch!r}")
+
+ monkeypatch.setattr(ticket_pr_compliance_check, "fetch_jira_ticket_context", fake_fetch_jira_ticket_context)
+
+ tickets, note = asyncio.run(
+ ticket_pr_compliance_check.extract_tickets(
+ FakeGitProvider(
+ "FAKE-1234 add sample feature flow",
+ "MOCK-5678-sample-feature-flow",
+ )
+ )
+ )
+
+ assert [ticket["ticket_id"] for ticket in tickets] == ["FAKE-1234"]
+ assert "Title ticket note" in note
+ assert "MOCK-5678" not in note
+
+
+def test_extract_tickets_preserves_title_failure_note_on_mismatch(monkeypatch):
+ async def fake_fetch_jira_ticket_context(pr_title, branch):
+ if pr_title == "FAKE-1234 add sample feature flow":
+ return type(
+ "Result",
+ (),
+ {
+ "note": (
+ "Jira ticket `FAKE-1234` was detected in the PR title but was not found.\n\n"
+ "PR title references Jira ticket `FAKE-1234` but branch name references `MOCK-5678`. "
+ "Please verify that the correct Jira ticket is used in the PR metadata."
+ ),
+ "ticket": None,
+ },
+ )()
+
+ raise AssertionError(f"Unexpected fetch args: {pr_title!r}, {branch!r}")
+
+ monkeypatch.setattr(ticket_pr_compliance_check, "fetch_jira_ticket_context", fake_fetch_jira_ticket_context)
+
+ tickets, note = asyncio.run(
+ ticket_pr_compliance_check.extract_tickets(
+ FakeGitProvider(
+ "FAKE-1234 add sample feature flow",
+ "MOCK-5678-sample-feature-flow",
+ )
+ )
+ )
+
+ assert tickets is None
+ assert "FAKE-1234" in note
+ assert "MOCK-5678" in note
+ assert "not found" in note
+ assert note.count("PR title references Jira ticket") == 1