diff --git a/charts/openab/files/project-screening/screen_once.sh b/charts/openab/files/project-screening/screen_once.sh
new file mode 100644
index 0000000..78b4193
--- /dev/null
+++ b/charts/openab/files/project-screening/screen_once.sh
@@ -0,0 +1,422 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+PROJECT_OWNER="${PROJECT_OWNER:-openabdev}"
+PROJECT_NUMBER="${PROJECT_NUMBER:-1}"
+INCOMING_STATUS_NAME="${INCOMING_STATUS_NAME:-Incoming}"
+SCREENING_STATUS_NAME="${SCREENING_STATUS_NAME:-PR-Screening}"
+REPORT_TO_STDOUT="${REPORT_TO_STDOUT:-true}"
+PROJECT_QUERY_EXTRA="${PROJECT_QUERY_EXTRA:-}"
+SENDER_CONTEXT_JSON="${SENDER_CONTEXT_JSON:-}"
+PROMPT_TEMPLATE="${PROMPT_TEMPLATE:-/opt/openab-project-screening/screening_prompt.md}"
+CODEX_AUTH_JSON_SOURCE="${CODEX_AUTH_JSON_SOURCE:-/opt/openab-project-screening-auth/auth.json}"
+DISCORD_BOT_TOKEN="${DISCORD_BOT_TOKEN:-}"
+DISCORD_REPORT_CHANNEL_ID="${DISCORD_REPORT_CHANNEL_ID:-}"
+WORK_DIR="${WORK_DIR:-/tmp/openab-project-screening}"
+HOME_DIR="${HOME:-/tmp/openab-project-screening-home}"
+
+timestamp() {
+ date -u +"%Y-%m-%dT%H:%M:%SZ"
+}
+
+log() {
+ printf '[%s] %s\n' "$(timestamp)" "$*"
+}
+
+require_cmd() {
+ if ! command -v "$1" >/dev/null 2>&1; then
+ log "missing required command: $1"
+ exit 1
+ fi
+}
+
+require_env() {
+ local name="$1"
+ if [[ -z "${!name:-}" ]]; then
+ log "missing required environment variable: $name"
+ exit 1
+ fi
+}
+
+project_query() {
+ if [[ -n "$PROJECT_QUERY_EXTRA" ]]; then
+ printf 'status:"%s" %s' "$INCOMING_STATUS_NAME" "$PROJECT_QUERY_EXTRA"
+ else
+ printf 'status:"%s"' "$INCOMING_STATUS_NAME"
+ fi
+}
+
+project_view_jq() {
+ local jq_expr="$1"
+ gh project view "$PROJECT_NUMBER" \
+ --owner "$PROJECT_OWNER" \
+ --format json \
+ --jq "$jq_expr"
+}
+
+field_list_jq() {
+ local jq_expr="$1"
+ gh project field-list "$PROJECT_NUMBER" \
+ --owner "$PROJECT_OWNER" \
+ --format json \
+ --jq "$jq_expr"
+}
+
+incoming_item_jq() {
+ local jq_expr="$1"
+ gh project item-list "$PROJECT_NUMBER" \
+ --owner "$PROJECT_OWNER" \
+ --query "$(project_query)" \
+ --limit 1 \
+ --format json \
+ --jq "$jq_expr"
+}
+
+fetch_content_json() {
+ local item_type="$1"
+ local item_number="$2"
+ local repo="$3"
+
+ case "$item_type" in
+ PullRequest)
+ gh pr view "$item_number" \
+ --repo "$repo" \
+ --json title,number,body,author,files,headRefName,baseRefName,url
+ ;;
+ Issue)
+ gh issue view "$item_number" \
+ --repo "$repo" \
+ --json title,number,body,author,labels,url
+ ;;
+ *)
+ printf '{"type":"%s","number":"%s","repository":"%s"}\n' \
+ "$item_type" "$item_number" "$repo"
+ ;;
+ esac
+}
+
+sender_context_field() {
+ local field="$1"
+ FIELD_NAME="$field" SENDER_CONTEXT_JSON="$SENDER_CONTEXT_JSON" node <<'EOF'
+const field = process.env.FIELD_NAME;
+const raw = process.env.SENDER_CONTEXT_JSON || "";
+if (!raw || !field) process.exit(0);
+const obj = JSON.parse(raw);
+const value = obj[field];
+if (value !== undefined && value !== null) process.stdout.write(String(value));
+EOF
+}
+
+discord_post_json() {
+ local url="$1"
+ local payload="$2"
+ local response_file status retry_after
+ response_file="$(mktemp)"
+
+ while true; do
+ status="$(
+ curl -sS \
+ -o "$response_file" \
+ -w '%{http_code}' \
+ -X POST \
+ -H "Authorization: Bot $DISCORD_BOT_TOKEN" \
+ -H "Content-Type: application/json" \
+ --data "$payload" \
+ "$url"
+ )"
+
+ if [[ "$status" == "429" ]]; then
+ retry_after="$(
+ RESPONSE_FILE="$response_file" node <<'EOF'
+const fs = require('fs');
+const raw = fs.readFileSync(process.env.RESPONSE_FILE, 'utf8');
+const obj = JSON.parse(raw || '{}');
+process.stdout.write(String(obj.retry_after ?? '1'));
+EOF
+ )"
+ log "Discord API rate limited; retrying after ${retry_after}s"
+ sleep "$retry_after"
+ continue
+ fi
+
+ if [[ "$status" != 2* ]]; then
+ log "Discord API request failed status=$status"
+ cat "$response_file" >&2
+ rm -f "$response_file"
+ return 1
+ fi
+
+ cat "$response_file"
+ rm -f "$response_file"
+ return 0
+ done
+}
+
+discord_get_json() {
+ local url="$1"
+ curl -sS \
+ -H "Authorization: Bot $DISCORD_BOT_TOKEN" \
+ -H "Content-Type: application/json" \
+ "$url"
+}
+
+discord_message_payload() {
+ local content="$1"
+ CONTENT="$content" node <<'EOF'
+const content = process.env.CONTENT || "";
+process.stdout.write(JSON.stringify({
+ content,
+ allowed_mentions: { parse: [] }
+}));
+EOF
+}
+
+discord_thread_payload() {
+ local name="$1"
+ THREAD_NAME="$name" node <<'EOF'
+const name = process.env.THREAD_NAME || "project-screening-report";
+process.stdout.write(JSON.stringify({
+ name: name.slice(0, 100),
+ auto_archive_duration: 1440
+}));
+EOF
+}
+
+discord_extract_id() {
+ local response="$1"
+ RESPONSE_JSON="$response" node <<'EOF'
+const raw = process.env.RESPONSE_JSON || "{}";
+const obj = JSON.parse(raw);
+if (obj.id) process.stdout.write(String(obj.id));
+EOF
+}
+
+discord_resolve_parent_channel_id() {
+ local channel_id="$1"
+ local response
+ response="$(discord_get_json "https://discord.com/api/v10/channels/${channel_id}")"
+ RESPONSE_JSON="$response" node <<'EOF'
+const obj = JSON.parse(process.env.RESPONSE_JSON || "{}");
+const threadTypes = new Set([10, 11, 12]);
+if (threadTypes.has(obj.type) && obj.parent_id) {
+ process.stdout.write(String(obj.parent_id));
+} else if (obj.id) {
+ process.stdout.write(String(obj.id));
+}
+EOF
+}
+
+discord_thread_name() {
+ local item_number="$1"
+ local item_title="$2"
+ ITEM_NUMBER="$item_number" ITEM_TITLE="$item_title" node <<'EOF'
+const number = process.env.ITEM_NUMBER || "item";
+const title = (process.env.ITEM_TITLE || "")
+ .replace(/\s+/g, " ")
+ .trim();
+const base = `Screening: #${number}${title ? ` ${title}` : ""}`.trim();
+process.stdout.write(base.slice(0, 100) || `Screening: #${number}`);
+EOF
+}
+
+post_report_to_discord() {
+ local item_number="$1"
+ local item_title="$2"
+ local item_url="$3"
+ local report_file="$4"
+ local channel_id starter_content starter_response starter_message_id thread_name thread_response thread_id
+
+ if [[ -z "$DISCORD_BOT_TOKEN" ]]; then
+ log "Discord report delivery skipped: DISCORD_BOT_TOKEN not set"
+ return 0
+ fi
+
+ channel_id="$DISCORD_REPORT_CHANNEL_ID"
+ if [[ -z "$channel_id" ]]; then
+ channel_id="$(sender_context_field channel_id)"
+ fi
+
+ if [[ -z "$channel_id" ]]; then
+ log "Discord report delivery skipped: no report channel id available"
+ return 0
+ fi
+
+ channel_id="$(discord_resolve_parent_channel_id "$channel_id")"
+ if [[ -z "$channel_id" ]]; then
+ log "Discord report delivery skipped: failed to resolve parent report channel id"
+ return 0
+ fi
+
+ starter_content="🔍 **PR Screening** — [#${item_number}](${item_url})
+${item_title}
+Status: moved to ${SCREENING_STATUS_NAME}"
+ starter_response="$(
+ discord_post_json \
+ "https://discord.com/api/v10/channels/${channel_id}/messages" \
+ "$(discord_message_payload "$starter_content")"
+ )"
+ starter_message_id="$(discord_extract_id "$starter_response")"
+
+ if [[ -z "$starter_message_id" ]]; then
+ log "Discord report delivery failed: no starter message id returned"
+ return 1
+ fi
+
+ thread_name="$(discord_thread_name "$item_number" "$item_title")"
+ thread_response="$(
+ discord_post_json \
+ "https://discord.com/api/v10/channels/${channel_id}/messages/${starter_message_id}/threads" \
+ "$(discord_thread_payload "$thread_name")"
+ )"
+ thread_id="$(discord_extract_id "$thread_response")"
+
+ if [[ -z "$thread_id" ]]; then
+ log "Discord report delivery failed: no thread id returned"
+ return 1
+ fi
+
+ while IFS= read -r chunk || [[ -n "$chunk" ]]; do
+ [[ -z "$chunk" ]] && continue
+ discord_post_json \
+ "https://discord.com/api/v10/channels/${thread_id}/messages" \
+ "$(discord_message_payload "$chunk")" >/dev/null
+ done < <(fold -s -w 1800 "$report_file")
+
+ log "report delivered to Discord thread ${thread_id}"
+}
+
+build_prompt() {
+ local item_id="$1"
+ local item_type="$2"
+ local item_number="$3"
+ local repo="$4"
+ local item_title="$5"
+ local item_url="$6"
+ local detail_json="$7"
+ local prompt_file="$8"
+
+ {
+ printf '\n'
+ printf '%s\n' "$SENDER_CONTEXT_JSON"
+ printf '\n\n'
+ cat "$PROMPT_TEMPLATE"
+ printf '\n## Board Context\n\n'
+ printf -- '- Claimed project item ID: `%s`\n' "$item_id"
+ printf -- '- Status transition: `%s` -> `%s`\n' "$INCOMING_STATUS_NAME" "$SCREENING_STATUS_NAME"
+ printf -- '- Project owner: `%s`\n' "$PROJECT_OWNER"
+ printf -- '- Project number: `%s`\n' "$PROJECT_NUMBER"
+ printf -- '- Current expectation: clarify intent, rewrite the implementation prompt, and prepare the item for Masami or Pahud follow-up\n'
+ printf '\n## Item Summary\n\n'
+ printf -- '- Type: `%s`\n' "$item_type"
+ printf -- '- Repository: `%s`\n' "$repo"
+ printf -- '- Number: `%s`\n' "$item_number"
+ printf -- '- Title: `%s`\n' "$item_title"
+ printf -- '- URL: %s\n' "$item_url"
+ printf '\n## Source Data\n\n'
+ printf '```json\n'
+ printf '%s\n' "$detail_json"
+ printf '```\n'
+ } >"$prompt_file"
+}
+
+generate_report() {
+ local prompt_file="$1"
+ local report_file="$2"
+
+ codex exec \
+ --skip-git-repo-check \
+ --cd "$WORK_DIR" \
+ --sandbox read-only \
+ --ephemeral \
+ --color never \
+ --output-last-message "$report_file" \
+ - <"$prompt_file" >/dev/null
+}
+
+main() {
+ require_cmd bash
+ require_cmd gh
+ require_cmd codex
+ require_cmd curl
+ require_cmd node
+ require_env GH_TOKEN
+ require_env SENDER_CONTEXT_JSON
+ if [[ ! -f "$CODEX_AUTH_JSON_SOURCE" ]]; then
+ log "missing Codex auth source file: $CODEX_AUTH_JSON_SOURCE"
+ exit 1
+ fi
+
+ mkdir -p "$WORK_DIR" "$HOME_DIR/.codex"
+ export HOME="$HOME_DIR"
+ cp "$CODEX_AUTH_JSON_SOURCE" "$HOME/.codex/auth.json"
+
+ local item_id
+ item_id="$(incoming_item_jq '.items[0].id // empty')"
+
+ if [[ -z "$item_id" ]]; then
+ log "no Incoming items found"
+ exit 0
+ fi
+
+ local item_type item_number repo item_title item_url
+ item_type="$(incoming_item_jq '.items[0].content.type // empty')"
+ item_number="$(incoming_item_jq '.items[0].content.number // empty')"
+ repo="$(incoming_item_jq '.items[0].content.repository // empty')"
+ item_title="$(incoming_item_jq '.items[0].content.title // empty')"
+ item_url="$(incoming_item_jq '.items[0].content.url // empty')"
+
+ if [[ -z "$item_type" || -z "$item_number" || -z "$repo" ]]; then
+ log "Incoming item is missing required metadata; refusing to claim"
+ exit 1
+ fi
+
+ local project_id status_field_id screening_option_id
+ project_id="$(project_view_jq '.id')"
+ status_field_id="$(field_list_jq '.fields[] | select(.name=="Status") | .id')"
+ screening_option_id="$(
+ field_list_jq ".fields[] | select(.name==\"Status\") | .options[] | select(.name==\"$SCREENING_STATUS_NAME\") | .id"
+ )"
+
+ if [[ -z "$project_id" || -z "$status_field_id" || -z "$screening_option_id" ]]; then
+ log "failed to resolve project metadata for claim operation"
+ exit 1
+ fi
+
+ gh project item-edit \
+ --id "$item_id" \
+ --project-id "$project_id" \
+ --field-id "$status_field_id" \
+ --single-select-option-id "$screening_option_id" >/dev/null
+ log "claimed item $item_id into $SCREENING_STATUS_NAME"
+
+ local detail_json
+ detail_json="$(fetch_content_json "$item_type" "$item_number" "$repo")"
+
+ local stamp prompt_file report_file
+ stamp="$(date -u +%Y%m%dT%H%M%SZ)"
+ prompt_file="$WORK_DIR/${stamp}-prompt.md"
+ report_file="$WORK_DIR/${stamp}-report.md"
+
+ build_prompt \
+ "$item_id" \
+ "$item_type" \
+ "$item_number" \
+ "$repo" \
+ "$item_title" \
+ "$item_url" \
+ "$detail_json" \
+ "$prompt_file"
+
+ generate_report "$prompt_file" "$report_file"
+ log "report generated for $repo#$item_number"
+
+ post_report_to_discord "$item_number" "$item_title" "$item_url" "$report_file"
+
+ if [[ "$REPORT_TO_STDOUT" == "true" ]]; then
+ printf '%s\n' '--- BEGIN OPENAB PROJECT SCREENING REPORT ---'
+ cat "$report_file"
+ printf '\n%s\n' '--- END OPENAB PROJECT SCREENING REPORT ---'
+ fi
+}
+
+main "$@"
diff --git a/charts/openab/files/project-screening/screening_prompt.md b/charts/openab/files/project-screening/screening_prompt.md
new file mode 100644
index 0000000..15afccc
--- /dev/null
+++ b/charts/openab/files/project-screening/screening_prompt.md
@@ -0,0 +1,101 @@
+# OpenAB PR-Screening Report Prompt
+
+You are generating a screening report for the OpenAB project board.
+
+## Workflow Context
+
+- Board flow: `Incoming` -> `PR-Screening` -> human or agent follow-up
+- After this screening pass, Masami or Pahud agent will pick up the item for deeper review and possible merge work
+- The purpose of this report is to clarify the item's intent and rewrite the implementation prompt so the next agent has a tighter brief
+
+## Required Output Sections
+
+Produce a Markdown report with exactly these sections, in this order:
+
+1. `Intent`
+2. `Feat`
+3. `Who It Serves`
+4. `Rewritten Prompt`
+5. `Merge Pitch`
+6. `Best-Practice Comparison`
+7. `Implementation Options`
+8. `Comparison Table`
+9. `Recommendation`
+
+## Section Requirements
+
+### Intent
+
+- State what the PR or issue is trying to achieve
+- Call out the user-visible or operator-visible problem being solved
+- Be concrete, not vague
+
+### Feat
+
+- Summarize the behavioral change or feature in plain language
+- Note whether the item is a feature, fix, refactor, docs improvement, or release operation
+
+### Who It Serves
+
+- Identify the primary beneficiary
+- Examples: Discord end users, Slack users, deployers, maintainers, agent runtime operators, reviewers
+
+### Rewritten Prompt
+
+- Rewrite the item into a cleaner implementation brief for a coding agent
+- Make the prompt more specific, more testable, and more mergeable
+- Keep it concise but operational
+
+### Merge Pitch
+
+- Write a short pitch for why this item is worth moving forward
+- Include the risk profile and likely reviewer concern
+
+### Best-Practice Comparison
+
+Compare the proposed direction against these reference systems:
+
+- OpenClaw:
+ - gateway-owned scheduling
+ - durable job persistence
+ - isolated executions
+ - explicit delivery routing
+ - retry/backoff and run logs
+- Hermes Agent:
+ - gateway daemon tick model
+ - file locking to prevent overlap
+ - atomic writes for persisted state
+ - fresh session per scheduled run
+ - self-contained prompts for scheduled tasks
+
+Do not force a comparison where it does not fit. Instead, say which principles are relevant and which are not.
+
+### Implementation Options
+
+- Think of at least 3 ways to implement or evolve the item
+- Each option should be meaningfully different
+- Include one conservative option, one balanced option, and one more ambitious option where possible
+
+### Comparison Table
+
+Add a table comparing the options across:
+
+- Speed to ship
+- Complexity
+- Reliability
+- Maintainability
+- User impact
+- Fit for OpenAB right now
+
+### Recommendation
+
+- Recommend one path
+- Explain why it is the right step for future merge discussion
+- Mention any follow-up split or sequencing if needed
+
+## Tone
+
+- Direct
+- Technical
+- Pragmatic
+- Useful to a maintainer deciding whether to advance the item
diff --git a/charts/openab/templates/_helpers.tpl b/charts/openab/templates/_helpers.tpl
index 770d557..d18b0da 100644
--- a/charts/openab/templates/_helpers.tpl
+++ b/charts/openab/templates/_helpers.tpl
@@ -65,3 +65,24 @@ app.kubernetes.io/component: {{ .agent }}
{{- define "openab.persistenceEnabled" -}}
{{- if and . .persistence (eq (.persistence.enabled | toString) "false") }}false{{ else }}true{{ end }}
{{- end }}
+
+{{- define "openab.screeningFullname" -}}
+{{- printf "%s-project-screening" (include "openab.fullname" .) | trunc 63 | trimSuffix "-" }}
+{{- end }}
+
+{{- define "openab.screeningLabels" -}}
+helm.sh/chart: {{ include "openab.chart" . }}
+app.kubernetes.io/name: {{ include "openab.name" . }}
+app.kubernetes.io/instance: {{ .Release.Name }}
+app.kubernetes.io/component: project-screening
+{{- if .Chart.AppVersion }}
+app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
+{{- end }}
+app.kubernetes.io/managed-by: {{ .Release.Service }}
+{{- end }}
+
+{{- define "openab.screeningSelectorLabels" -}}
+app.kubernetes.io/name: {{ include "openab.name" . }}
+app.kubernetes.io/instance: {{ .Release.Name }}
+app.kubernetes.io/component: project-screening
+{{- end }}
diff --git a/charts/openab/templates/project-screening-configmap.yaml b/charts/openab/templates/project-screening-configmap.yaml
new file mode 100644
index 0000000..e939b2e
--- /dev/null
+++ b/charts/openab/templates/project-screening-configmap.yaml
@@ -0,0 +1,14 @@
+{{- if .Values.projectScreening.enabled }}
+---
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: {{ include "openab.screeningFullname" . }}
+ labels:
+ {{- include "openab.screeningLabels" . | nindent 4 }}
+data:
+ screen_once.sh: |
+ {{- .Files.Get "files/project-screening/screen_once.sh" | nindent 4 }}
+ screening_prompt.md: |
+ {{- .Files.Get "files/project-screening/screening_prompt.md" | nindent 4 }}
+{{- end }}
diff --git a/charts/openab/templates/project-screening-cronjob.yaml b/charts/openab/templates/project-screening-cronjob.yaml
new file mode 100644
index 0000000..d412617
--- /dev/null
+++ b/charts/openab/templates/project-screening-cronjob.yaml
@@ -0,0 +1,113 @@
+{{- if .Values.projectScreening.enabled }}
+---
+apiVersion: batch/v1
+kind: CronJob
+metadata:
+ name: {{ include "openab.screeningFullname" . }}
+ labels:
+ {{- include "openab.screeningLabels" . | nindent 4 }}
+spec:
+ schedule: {{ .Values.projectScreening.schedule | quote }}
+ suspend: {{ .Values.projectScreening.suspend | default false }}
+ concurrencyPolicy: {{ .Values.projectScreening.concurrencyPolicy | default "Forbid" }}
+ successfulJobsHistoryLimit: {{ .Values.projectScreening.successfulJobsHistoryLimit | default 3 }}
+ failedJobsHistoryLimit: {{ .Values.projectScreening.failedJobsHistoryLimit | default 3 }}
+ jobTemplate:
+ spec:
+ backoffLimit: {{ .Values.projectScreening.backoffLimit | default 0 }}
+ ttlSecondsAfterFinished: {{ .Values.projectScreening.ttlSecondsAfterFinished | default 86400 }}
+ template:
+ metadata:
+ annotations:
+ checksum/config: {{ print (.Files.Get "files/project-screening/screen_once.sh") (.Files.Get "files/project-screening/screening_prompt.md") (.Values.projectScreening | toJson) | sha256sum }}
+ labels:
+ {{- include "openab.screeningSelectorLabels" . | nindent 12 }}
+ spec:
+ restartPolicy: Never
+ {{- with .Values.podSecurityContext }}
+ securityContext:
+ {{- toYaml . | nindent 12 }}
+ {{- end }}
+ containers:
+ - name: project-screening
+ image: {{ .Values.projectScreening.image | quote }}
+ imagePullPolicy: {{ .Values.projectScreening.imagePullPolicy | default "IfNotPresent" }}
+ command:
+ - bash
+ - /opt/openab-project-screening/screen_once.sh
+ env:
+ - name: GH_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: {{ include "openab.screeningFullname" . }}
+ key: gh-token
+ - name: SENDER_CONTEXT_JSON
+ value: {{ required "projectScreening.senderContextJson is required when projectScreening.enabled=true" .Values.projectScreening.senderContextJson | quote }}
+ - name: PROJECT_OWNER
+ value: {{ .Values.projectScreening.project.owner | default "openabdev" | quote }}
+ - name: PROJECT_NUMBER
+ value: {{ .Values.projectScreening.project.number | default 1 | quote }}
+ - name: INCOMING_STATUS_NAME
+ value: {{ .Values.projectScreening.project.incomingStatus | default "Incoming" | quote }}
+ - name: SCREENING_STATUS_NAME
+ value: {{ .Values.projectScreening.project.screeningStatus | default "PR-Screening" | quote }}
+ - name: REPORT_TO_STDOUT
+ value: {{ .Values.projectScreening.reportToStdout | default true | quote }}
+ - name: PROJECT_QUERY_EXTRA
+ value: {{ .Values.projectScreening.queryExtra | default "" | quote }}
+ - name: HOME
+ value: /tmp/openab-project-screening-home
+ - name: WORK_DIR
+ value: /tmp/openab-project-screening
+ - name: PROMPT_TEMPLATE
+ value: /opt/openab-project-screening/screening_prompt.md
+ - name: CODEX_AUTH_JSON_SOURCE
+ value: /opt/openab-project-screening-auth/auth.json
+ {{- if .Values.projectScreening.discordReport.enabled }}
+ - name: DISCORD_BOT_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: {{ .Values.projectScreening.discordReport.secretName | default "openab-kiro-codex" | quote }}
+ key: {{ .Values.projectScreening.discordReport.secretKey | default "discord-bot-token" | quote }}
+ - name: DISCORD_REPORT_CHANNEL_ID
+ value: {{ .Values.projectScreening.discordReport.channelId | default "1494378525640097921" | quote }}
+ {{- end }}
+ {{- with $.Values.containerSecurityContext }}
+ securityContext:
+ {{- toYaml . | nindent 16 }}
+ {{- end }}
+ {{- with .Values.projectScreening.resources }}
+ resources:
+ {{- toYaml . | nindent 16 }}
+ {{- end }}
+ volumeMounts:
+ - name: project-screening
+ mountPath: /opt/openab-project-screening
+ readOnly: true
+ - name: project-screening-auth
+ mountPath: /opt/openab-project-screening-auth
+ readOnly: true
+ - name: tmp
+ mountPath: /tmp
+ {{- with .Values.projectScreening.nodeSelector }}
+ nodeSelector:
+ {{- toYaml . | nindent 12 }}
+ {{- end }}
+ {{- with .Values.projectScreening.affinity }}
+ affinity:
+ {{- toYaml . | nindent 12 }}
+ {{- end }}
+ {{- with .Values.projectScreening.tolerations }}
+ tolerations:
+ {{- toYaml . | nindent 12 }}
+ {{- end }}
+ volumes:
+ - name: project-screening
+ configMap:
+ name: {{ include "openab.screeningFullname" . }}
+ - name: project-screening-auth
+ secret:
+ secretName: {{ include "openab.screeningFullname" . }}
+ - name: tmp
+ emptyDir: {}
+{{- end }}
diff --git a/charts/openab/templates/project-screening-secret.yaml b/charts/openab/templates/project-screening-secret.yaml
new file mode 100644
index 0000000..0976ff2
--- /dev/null
+++ b/charts/openab/templates/project-screening-secret.yaml
@@ -0,0 +1,15 @@
+{{- if .Values.projectScreening.enabled }}
+---
+apiVersion: v1
+kind: Secret
+metadata:
+ name: {{ include "openab.screeningFullname" . }}
+ labels:
+ {{- include "openab.screeningLabels" . | nindent 4 }}
+ annotations:
+ "helm.sh/resource-policy": keep
+type: Opaque
+data:
+ gh-token: {{ required "projectScreening.githubToken is required when projectScreening.enabled=true" .Values.projectScreening.githubToken | b64enc | quote }}
+ auth.json: {{ required "projectScreening.codexAuthJson is required when projectScreening.enabled=true" .Values.projectScreening.codexAuthJson | b64enc | quote }}
+{{- end }}
diff --git a/charts/openab/values.yaml b/charts/openab/values.yaml
index 1ca8fef..4d3b8b2 100644
--- a/charts/openab/values.yaml
+++ b/charts/openab/values.yaml
@@ -19,6 +19,44 @@ containerSecurityContext:
drop:
- ALL
+projectScreening:
+ enabled: false
+ # Runs a one-shot screening pass that:
+ # 1. checks the Incoming project lane
+ # 2. moves the first item to PR-Screening
+ # 3. generates a Codex screening report to stdout/job logs
+ schedule: "*/30 * * * *"
+ suspend: false
+ concurrencyPolicy: Forbid
+ successfulJobsHistoryLimit: 3
+ failedJobsHistoryLimit: 3
+ backoffLimit: 0
+ ttlSecondsAfterFinished: 86400
+ image: ghcr.io/openabdev/openab-codex:latest
+ imagePullPolicy: IfNotPresent
+ project:
+ owner: openabdev
+ number: 1
+ incomingStatus: Incoming
+ screeningStatus: PR-Screening
+ reportToStdout: true
+ queryExtra: ""
+ # Required when enabled. This is passed through to the generated Codex prompt.
+ senderContextJson: ""
+ githubToken: ""
+ # Required when enabled. This should be the contents of ~/.codex/auth.json
+ # from a ChatGPT-authenticated Codex session.
+ codexAuthJson: ""
+ discordReport:
+ enabled: false
+ secretName: "openab-kiro-codex"
+ secretKey: "discord-bot-token"
+ channelId: "1494378525640097921"
+ resources: {}
+ nodeSelector: {}
+ tolerations: []
+ affinity: {}
+
agents:
kiro:
enabled: true # set to false to skip creating resources for this agent
diff --git a/docs/project-screening-cronjob.md b/docs/project-screening-cronjob.md
new file mode 100644
index 0000000..991caa7
--- /dev/null
+++ b/docs/project-screening-cronjob.md
@@ -0,0 +1,105 @@
+# Project Screening CronJob
+
+This CronJob performs a one-shot screening pass every 30 minutes:
+
+1. Query the OpenAB GitHub Project `Incoming` lane
+2. Move the first matching item into `PR-Screening`
+3. Generate a Codex screening report
+4. Emit the report to the job logs
+
+The job is intentionally stateless:
+
+- GitHub auth comes from `GH_TOKEN`
+- Codex auth comes from a mounted `auth.json` copied from `~/.codex/auth.json`
+- Discord delivery uses `DISCORD_BOT_TOKEN` from the existing `openab-kiro-codex` secret
+- scripts and prompt live in a mounted ConfigMap
+- no shared agent PVC is required
+
+## Why This Shape
+
+Do not reuse the long-lived codex pod's home directory or PVC for screening jobs.
+
+That pattern is fragile because:
+
+- the running codex agent already owns its PVC
+- CronJob pods should not depend on an interactive device-login state
+- ephemeral jobs are easier to reason about when auth is secret-driven
+
+This design follows the stronger parts of OpenClaw and Hermes:
+
+- scheduler outside the model runtime
+- isolated execution per run
+- explicit credentials and prompt construction
+- no always-on sleeper process
+
+## Required Secrets
+
+The GitHub token should include at least the scopes needed to read and update GitHub Projects:
+
+- `project`
+- `repo`
+- `read:org`
+
+The Codex auth file should be copied from a ChatGPT-authenticated Codex session:
+
+```bash
+cat ~/.codex/auth.json
+```
+
+The CronJob copies that file into its writable temp home before running `codex exec`.
+
+If you want the report posted back to Discord as a new thread, make sure the existing `openab-kiro-codex` secret already contains `discord-bot-token`. The job will:
+
+1. post a starter message in the target channel
+2. create a thread from that message
+3. send the screening report into that thread
+
+This repo now sets an explicit override to the parent review channel `1494378525640097921`.
+
+## Raw Kubernetes Apply
+
+Apply these files:
+
+```bash
+kubectl apply -f k8s/project-screening-secret.yaml
+kubectl apply -f k8s/project-screening-configmap.yaml
+kubectl apply -f k8s/project-screening-cronjob.yaml
+```
+
+Inspect the most recent run:
+
+```bash
+kubectl get jobs --sort-by=.metadata.creationTimestamp
+kubectl logs job/
+```
+
+## Helm
+
+Enable with values like:
+
+```yaml
+projectScreening:
+ enabled: true
+ schedule: "*/30 * * * *"
+ image: ghcr.io/openabdev/openab-codex:latest
+ githubToken: ""
+ codexAuthJson: |
+ PASTE_THE_CONTENTS_OF_YOUR__HOME__CODEX__AUTH_JSON_HERE
+ discordReport:
+ enabled: true
+ secretName: "openab-kiro-codex"
+ secretKey: "discord-bot-token"
+ channelId: "1494378525640097921"
+ senderContextJson: '{"schema":"openab.sender.v1","sender_id":"196299853884686336","sender_name":"mrshroom69","display_name":"mrshroom69","channel":"discord","channel_id":"1494398610173853816","is_bot":false}'
+ project:
+ owner: openabdev
+ number: 1
+ incomingStatus: Incoming
+ screeningStatus: PR-Screening
+```
+
+## Current Limitation
+
+This app pod cannot create the CronJob directly from inside the cluster right now because its service account lacks `get/create` permissions on `batch/cronjobs`.
+
+The manifests in this repo are the recommended fix. Apply them from a cluster-admin context or through your normal Helm release pipeline.
diff --git a/k8s/project-screening-configmap.yaml b/k8s/project-screening-configmap.yaml
new file mode 100644
index 0000000..27fff96
--- /dev/null
+++ b/k8s/project-screening-configmap.yaml
@@ -0,0 +1,530 @@
+apiVersion: v1
+kind: ConfigMap
+metadata:
+ name: openab-project-screening
+data:
+ screen_once.sh: |
+ #!/usr/bin/env bash
+ set -euo pipefail
+
+ PROJECT_OWNER="${PROJECT_OWNER:-openabdev}"
+ PROJECT_NUMBER="${PROJECT_NUMBER:-1}"
+ INCOMING_STATUS_NAME="${INCOMING_STATUS_NAME:-Incoming}"
+ SCREENING_STATUS_NAME="${SCREENING_STATUS_NAME:-PR-Screening}"
+ REPORT_TO_STDOUT="${REPORT_TO_STDOUT:-true}"
+ PROJECT_QUERY_EXTRA="${PROJECT_QUERY_EXTRA:-}"
+ SENDER_CONTEXT_JSON="${SENDER_CONTEXT_JSON:-}"
+ PROMPT_TEMPLATE="${PROMPT_TEMPLATE:-/opt/openab-project-screening/screening_prompt.md}"
+ CODEX_AUTH_JSON_SOURCE="${CODEX_AUTH_JSON_SOURCE:-/opt/openab-project-screening-auth/auth.json}"
+ DISCORD_BOT_TOKEN="${DISCORD_BOT_TOKEN:-}"
+ DISCORD_REPORT_CHANNEL_ID="${DISCORD_REPORT_CHANNEL_ID:-}"
+ WORK_DIR="${WORK_DIR:-/tmp/openab-project-screening}"
+ HOME_DIR="${HOME:-/tmp/openab-project-screening-home}"
+
+ timestamp() {
+ date -u +"%Y-%m-%dT%H:%M:%SZ"
+ }
+
+ log() {
+ printf '[%s] %s\n' "$(timestamp)" "$*"
+ }
+
+ require_cmd() {
+ if ! command -v "$1" >/dev/null 2>&1; then
+ log "missing required command: $1"
+ exit 1
+ fi
+ }
+
+ require_env() {
+ local name="$1"
+ if [[ -z "${!name:-}" ]]; then
+ log "missing required environment variable: $name"
+ exit 1
+ fi
+ }
+
+ project_query() {
+ if [[ -n "$PROJECT_QUERY_EXTRA" ]]; then
+ printf 'status:"%s" %s' "$INCOMING_STATUS_NAME" "$PROJECT_QUERY_EXTRA"
+ else
+ printf 'status:"%s"' "$INCOMING_STATUS_NAME"
+ fi
+ }
+
+ project_view_jq() {
+ local jq_expr="$1"
+ gh project view "$PROJECT_NUMBER" \
+ --owner "$PROJECT_OWNER" \
+ --format json \
+ --jq "$jq_expr"
+ }
+
+ field_list_jq() {
+ local jq_expr="$1"
+ gh project field-list "$PROJECT_NUMBER" \
+ --owner "$PROJECT_OWNER" \
+ --format json \
+ --jq "$jq_expr"
+ }
+
+ incoming_item_jq() {
+ local jq_expr="$1"
+ gh project item-list "$PROJECT_NUMBER" \
+ --owner "$PROJECT_OWNER" \
+ --query "$(project_query)" \
+ --limit 1 \
+ --format json \
+ --jq "$jq_expr"
+ }
+
+ fetch_content_json() {
+ local item_type="$1"
+ local item_number="$2"
+ local repo="$3"
+
+ case "$item_type" in
+ PullRequest)
+ gh pr view "$item_number" \
+ --repo "$repo" \
+ --json title,number,body,author,files,headRefName,baseRefName,url
+ ;;
+ Issue)
+ gh issue view "$item_number" \
+ --repo "$repo" \
+ --json title,number,body,author,labels,url
+ ;;
+ *)
+ printf '{"type":"%s","number":"%s","repository":"%s"}\n' \
+ "$item_type" "$item_number" "$repo"
+ ;;
+ esac
+ }
+
+ sender_context_field() {
+ local field="$1"
+ FIELD_NAME="$field" SENDER_CONTEXT_JSON="$SENDER_CONTEXT_JSON" node <<'EOF'
+ const field = process.env.FIELD_NAME;
+ const raw = process.env.SENDER_CONTEXT_JSON || "";
+ if (!raw || !field) process.exit(0);
+ const obj = JSON.parse(raw);
+ const value = obj[field];
+ if (value !== undefined && value !== null) process.stdout.write(String(value));
+ EOF
+ }
+
+ discord_post_json() {
+ local url="$1"
+ local payload="$2"
+ local response_file status retry_after
+ response_file="$(mktemp)"
+
+ while true; do
+ status="$(
+ curl -sS \
+ -o "$response_file" \
+ -w '%{http_code}' \
+ -X POST \
+ -H "Authorization: Bot $DISCORD_BOT_TOKEN" \
+ -H "Content-Type: application/json" \
+ --data "$payload" \
+ "$url"
+ )"
+
+ if [[ "$status" == "429" ]]; then
+ retry_after="$(
+ RESPONSE_FILE="$response_file" node <<'EOF'
+ const fs = require('fs');
+ const raw = fs.readFileSync(process.env.RESPONSE_FILE, 'utf8');
+ const obj = JSON.parse(raw || '{}');
+ process.stdout.write(String(obj.retry_after ?? '1'));
+ EOF
+ )"
+ log "Discord API rate limited; retrying after ${retry_after}s"
+ sleep "$retry_after"
+ continue
+ fi
+
+ if [[ "$status" != 2* ]]; then
+ log "Discord API request failed status=$status"
+ cat "$response_file" >&2
+ rm -f "$response_file"
+ return 1
+ fi
+
+ cat "$response_file"
+ rm -f "$response_file"
+ return 0
+ done
+ }
+
+ discord_get_json() {
+ local url="$1"
+ curl -sS \
+ -H "Authorization: Bot $DISCORD_BOT_TOKEN" \
+ -H "Content-Type: application/json" \
+ "$url"
+ }
+
+ discord_message_payload() {
+ local content="$1"
+ CONTENT="$content" node <<'EOF'
+ const content = process.env.CONTENT || "";
+ process.stdout.write(JSON.stringify({
+ content,
+ allowed_mentions: { parse: [] }
+ }));
+ EOF
+ }
+
+ discord_thread_payload() {
+ local name="$1"
+ THREAD_NAME="$name" node <<'EOF'
+ const name = process.env.THREAD_NAME || "project-screening-report";
+ process.stdout.write(JSON.stringify({
+ name: name.slice(0, 100),
+ auto_archive_duration: 1440
+ }));
+ EOF
+ }
+
+ discord_extract_id() {
+ local response="$1"
+ RESPONSE_JSON="$response" node <<'EOF'
+ const raw = process.env.RESPONSE_JSON || "{}";
+ const obj = JSON.parse(raw);
+ if (obj.id) process.stdout.write(String(obj.id));
+ EOF
+ }
+
+ discord_resolve_parent_channel_id() {
+ local channel_id="$1"
+ local response
+ response="$(discord_get_json "https://discord.com/api/v10/channels/${channel_id}")"
+ RESPONSE_JSON="$response" node <<'EOF'
+ const obj = JSON.parse(process.env.RESPONSE_JSON || "{}");
+ const threadTypes = new Set([10, 11, 12]);
+ if (threadTypes.has(obj.type) && obj.parent_id) {
+ process.stdout.write(String(obj.parent_id));
+ } else if (obj.id) {
+ process.stdout.write(String(obj.id));
+ }
+ EOF
+ }
+
+ discord_thread_name() {
+ local item_number="$1"
+ local item_title="$2"
+ ITEM_NUMBER="$item_number" ITEM_TITLE="$item_title" node <<'EOF'
+ const number = process.env.ITEM_NUMBER || "item";
+ const title = (process.env.ITEM_TITLE || "")
+ .replace(/\\s+/g, " ")
+ .trim();
+ const base = `Screening: #${number}${title ? ` ${title}` : ""}`.trim();
+ process.stdout.write(base.slice(0, 100) || `Screening: #${number}`);
+ EOF
+ }
+
+ post_report_to_discord() {
+ local item_number="$1"
+ local item_title="$2"
+ local item_url="$3"
+ local report_file="$4"
+ local channel_id starter_content starter_response starter_message_id thread_name thread_response thread_id
+
+ if [[ -z "$DISCORD_BOT_TOKEN" ]]; then
+ log "Discord report delivery skipped: DISCORD_BOT_TOKEN not set"
+ return 0
+ fi
+
+ channel_id="$DISCORD_REPORT_CHANNEL_ID"
+ if [[ -z "$channel_id" ]]; then
+ channel_id="$(sender_context_field channel_id)"
+ fi
+
+ if [[ -z "$channel_id" ]]; then
+ log "Discord report delivery skipped: no report channel id available"
+ return 0
+ fi
+
+ channel_id="$(discord_resolve_parent_channel_id "$channel_id")"
+ if [[ -z "$channel_id" ]]; then
+ log "Discord report delivery skipped: failed to resolve parent report channel id"
+ return 0
+ fi
+
+ starter_content="🔍 **PR Screening** — [#${item_number}](${item_url})
+ ${item_title}
+ Status: moved to ${SCREENING_STATUS_NAME}"
+ starter_response="$(
+ discord_post_json \
+ "https://discord.com/api/v10/channels/${channel_id}/messages" \
+ "$(discord_message_payload "$starter_content")"
+ )"
+ starter_message_id="$(discord_extract_id "$starter_response")"
+
+ if [[ -z "$starter_message_id" ]]; then
+ log "Discord report delivery failed: no starter message id returned"
+ return 1
+ fi
+
+ thread_name="$(discord_thread_name "$item_number" "$item_title")"
+ thread_response="$(
+ discord_post_json \
+ "https://discord.com/api/v10/channels/${channel_id}/messages/${starter_message_id}/threads" \
+ "$(discord_thread_payload "$thread_name")"
+ )"
+ thread_id="$(discord_extract_id "$thread_response")"
+
+ if [[ -z "$thread_id" ]]; then
+ log "Discord report delivery failed: no thread id returned"
+ return 1
+ fi
+
+ while IFS= read -r chunk || [[ -n "$chunk" ]]; do
+ [[ -z "$chunk" ]] && continue
+ discord_post_json \
+ "https://discord.com/api/v10/channels/${thread_id}/messages" \
+ "$(discord_message_payload "$chunk")" >/dev/null
+ done < <(fold -s -w 1800 "$report_file")
+
+ log "report delivered to Discord thread ${thread_id}"
+ }
+
+ build_prompt() {
+ local item_id="$1"
+ local item_type="$2"
+ local item_number="$3"
+ local repo="$4"
+ local item_title="$5"
+ local item_url="$6"
+ local detail_json="$7"
+ local prompt_file="$8"
+
+ {
+ printf '\n'
+ printf '%s\n' "$SENDER_CONTEXT_JSON"
+ printf '\n\n'
+ cat "$PROMPT_TEMPLATE"
+ printf '\n## Board Context\n\n'
+ printf -- '- Claimed project item ID: `%s`\n' "$item_id"
+ printf -- '- Status transition: `%s` -> `%s`\n' "$INCOMING_STATUS_NAME" "$SCREENING_STATUS_NAME"
+ printf -- '- Project owner: `%s`\n' "$PROJECT_OWNER"
+ printf -- '- Project number: `%s`\n' "$PROJECT_NUMBER"
+ printf -- '- Current expectation: clarify intent, rewrite the implementation prompt, and prepare the item for Masami or Pahud follow-up\n'
+ printf '\n## Item Summary\n\n'
+ printf -- '- Type: `%s`\n' "$item_type"
+ printf -- '- Repository: `%s`\n' "$repo"
+ printf -- '- Number: `%s`\n' "$item_number"
+ printf -- '- Title: `%s`\n' "$item_title"
+ printf -- '- URL: %s\n' "$item_url"
+ printf '\n## Source Data\n\n'
+ printf '```json\n'
+ printf '%s\n' "$detail_json"
+ printf '```\n'
+ } >"$prompt_file"
+ }
+
+ generate_report() {
+ local prompt_file="$1"
+ local report_file="$2"
+
+ codex exec \
+ --skip-git-repo-check \
+ --cd "$WORK_DIR" \
+ --sandbox read-only \
+ --ephemeral \
+ --color never \
+ --output-last-message "$report_file" \
+ - <"$prompt_file" >/dev/null
+ }
+
+ main() {
+ require_cmd bash
+ require_cmd gh
+ require_cmd codex
+ require_cmd curl
+ require_cmd node
+ require_env GH_TOKEN
+ require_env SENDER_CONTEXT_JSON
+ if [[ ! -f "$CODEX_AUTH_JSON_SOURCE" ]]; then
+ log "missing Codex auth source file: $CODEX_AUTH_JSON_SOURCE"
+ exit 1
+ fi
+
+ mkdir -p "$WORK_DIR" "$HOME_DIR/.codex"
+ export HOME="$HOME_DIR"
+ cp "$CODEX_AUTH_JSON_SOURCE" "$HOME/.codex/auth.json"
+
+ local item_id
+ item_id="$(incoming_item_jq '.items[0].id // empty')"
+
+ if [[ -z "$item_id" ]]; then
+ log "no Incoming items found"
+ exit 0
+ fi
+
+ local item_type item_number repo item_title item_url
+ item_type="$(incoming_item_jq '.items[0].content.type // empty')"
+ item_number="$(incoming_item_jq '.items[0].content.number // empty')"
+ repo="$(incoming_item_jq '.items[0].content.repository // empty')"
+ item_title="$(incoming_item_jq '.items[0].content.title // empty')"
+ item_url="$(incoming_item_jq '.items[0].content.url // empty')"
+
+ if [[ -z "$item_type" || -z "$item_number" || -z "$repo" ]]; then
+ log "Incoming item is missing required metadata; refusing to claim"
+ exit 1
+ fi
+
+ local project_id status_field_id screening_option_id
+ project_id="$(project_view_jq '.id')"
+ status_field_id="$(field_list_jq '.fields[] | select(.name=="Status") | .id')"
+ screening_option_id="$(
+ field_list_jq ".fields[] | select(.name==\"Status\") | .options[] | select(.name==\"$SCREENING_STATUS_NAME\") | .id"
+ )"
+
+ if [[ -z "$project_id" || -z "$status_field_id" || -z "$screening_option_id" ]]; then
+ log "failed to resolve project metadata for claim operation"
+ exit 1
+ fi
+
+ gh project item-edit \
+ --id "$item_id" \
+ --project-id "$project_id" \
+ --field-id "$status_field_id" \
+ --single-select-option-id "$screening_option_id" >/dev/null
+ log "claimed item $item_id into $SCREENING_STATUS_NAME"
+
+ local detail_json
+ detail_json="$(fetch_content_json "$item_type" "$item_number" "$repo")"
+
+ local stamp prompt_file report_file
+ stamp="$(date -u +%Y%m%dT%H%M%SZ)"
+ prompt_file="$WORK_DIR/${stamp}-prompt.md"
+ report_file="$WORK_DIR/${stamp}-report.md"
+
+ build_prompt \
+ "$item_id" \
+ "$item_type" \
+ "$item_number" \
+ "$repo" \
+ "$item_title" \
+ "$item_url" \
+ "$detail_json" \
+ "$prompt_file"
+
+ generate_report "$prompt_file" "$report_file"
+ log "report generated for $repo#$item_number"
+
+ post_report_to_discord "$item_number" "$item_title" "$item_url" "$report_file"
+
+ if [[ "$REPORT_TO_STDOUT" == "true" ]]; then
+ printf '%s\n' '--- BEGIN OPENAB PROJECT SCREENING REPORT ---'
+ cat "$report_file"
+ printf '\n%s\n' '--- END OPENAB PROJECT SCREENING REPORT ---'
+ fi
+ }
+
+ main "$@"
+ screening_prompt.md: |
+ # OpenAB PR-Screening Report Prompt
+
+ You are generating a screening report for the OpenAB project board.
+
+ ## Workflow Context
+
+ - Board flow: `Incoming` -> `PR-Screening` -> human or agent follow-up
+ - After this screening pass, Masami or Pahud agent will pick up the item for deeper review and possible merge work
+ - The purpose of this report is to clarify the item's intent and rewrite the implementation prompt so the next agent has a tighter brief
+
+ ## Required Output Sections
+
+ Produce a Markdown report with exactly these sections, in this order:
+
+ 1. `Intent`
+ 2. `Feat`
+ 3. `Who It Serves`
+ 4. `Rewritten Prompt`
+ 5. `Merge Pitch`
+ 6. `Best-Practice Comparison`
+ 7. `Implementation Options`
+ 8. `Comparison Table`
+ 9. `Recommendation`
+
+ ## Section Requirements
+
+ ### Intent
+
+ - State what the PR or issue is trying to achieve
+ - Call out the user-visible or operator-visible problem being solved
+ - Be concrete, not vague
+
+ ### Feat
+
+ - Summarize the behavioral change or feature in plain language
+ - Note whether the item is a feature, fix, refactor, docs improvement, or release operation
+
+ ### Who It Serves
+
+ - Identify the primary beneficiary
+ - Examples: Discord end users, Slack users, deployers, maintainers, agent runtime operators, reviewers
+
+ ### Rewritten Prompt
+
+ - Rewrite the item into a cleaner implementation brief for a coding agent
+ - Make the prompt more specific, more testable, and more mergeable
+ - Keep it concise but operational
+
+ ### Merge Pitch
+
+ - Write a short pitch for why this item is worth moving forward
+ - Include the risk profile and likely reviewer concern
+
+ ### Best-Practice Comparison
+
+ Compare the proposed direction against these reference systems:
+
+ - OpenClaw:
+ - gateway-owned scheduling
+ - durable job persistence
+ - isolated executions
+ - explicit delivery routing
+ - retry/backoff and run logs
+ - Hermes Agent:
+ - gateway daemon tick model
+ - file locking to prevent overlap
+ - atomic writes for persisted state
+ - fresh session per scheduled run
+ - self-contained prompts for scheduled tasks
+
+ Do not force a comparison where it does not fit. Instead, say which principles are relevant and which are not.
+
+ ### Implementation Options
+
+ - Think of at least 3 ways to implement or evolve the item
+ - Each option should be meaningfully different
+ - Include one conservative option, one balanced option, and one more ambitious option where possible
+
+ ### Comparison Table
+
+ Add a table comparing the options across:
+
+ - Speed to ship
+ - Complexity
+ - Reliability
+ - Maintainability
+ - User impact
+ - Fit for OpenAB right now
+
+ ### Recommendation
+
+ - Recommend one path
+ - Explain why it is the right step for future merge discussion
+ - Mention any follow-up split or sequencing if needed
+
+ ## Tone
+
+ - Direct
+ - Technical
+ - Pragmatic
+ - Useful to a maintainer deciding whether to advance the item
diff --git a/k8s/project-screening-cronjob.yaml b/k8s/project-screening-cronjob.yaml
new file mode 100644
index 0000000..ed99c96
--- /dev/null
+++ b/k8s/project-screening-cronjob.yaml
@@ -0,0 +1,90 @@
+apiVersion: batch/v1
+kind: CronJob
+metadata:
+ name: openab-project-screening
+spec:
+ schedule: "*/30 * * * *"
+ suspend: false
+ concurrencyPolicy: Forbid
+ successfulJobsHistoryLimit: 3
+ failedJobsHistoryLimit: 3
+ jobTemplate:
+ spec:
+ backoffLimit: 0
+ ttlSecondsAfterFinished: 86400
+ template:
+ spec:
+ restartPolicy: Never
+ securityContext:
+ runAsNonRoot: true
+ runAsUser: 1000
+ runAsGroup: 1000
+ fsGroup: 1000
+ seccompProfile:
+ type: RuntimeDefault
+ containers:
+ - name: project-screening
+ image: ghcr.io/openabdev/openab-codex:latest
+ imagePullPolicy: IfNotPresent
+ command:
+ - bash
+ - /opt/openab-project-screening/screen_once.sh
+ env:
+ - name: GH_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: openab-project-screening
+ key: gh-token
+ - name: SENDER_CONTEXT_JSON
+ value: '{"schema":"openab.sender.v1","sender_id":"196299853884686336","sender_name":"mrshroom69","display_name":"mrshroom69","channel":"discord","channel_id":"1494398610173853816","is_bot":false}'
+ - name: PROJECT_OWNER
+ value: "openabdev"
+ - name: PROJECT_NUMBER
+ value: "1"
+ - name: INCOMING_STATUS_NAME
+ value: "Incoming"
+ - name: SCREENING_STATUS_NAME
+ value: "PR-Screening"
+ - name: REPORT_TO_STDOUT
+ value: "true"
+ - name: PROJECT_QUERY_EXTRA
+ value: ""
+ - name: HOME
+ value: /tmp/openab-project-screening-home
+ - name: WORK_DIR
+ value: /tmp/openab-project-screening
+ - name: PROMPT_TEMPLATE
+ value: /opt/openab-project-screening/screening_prompt.md
+ - name: CODEX_AUTH_JSON_SOURCE
+ value: /opt/openab-project-screening-auth/auth.json
+ - name: DISCORD_BOT_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: openab-kiro-codex
+ key: discord-bot-token
+ - name: DISCORD_REPORT_CHANNEL_ID
+ value: "1494378525640097921"
+ securityContext:
+ allowPrivilegeEscalation: false
+ readOnlyRootFilesystem: true
+ capabilities:
+ drop:
+ - ALL
+ volumeMounts:
+ - name: project-screening
+ mountPath: /opt/openab-project-screening
+ readOnly: true
+ - name: project-screening-auth
+ mountPath: /opt/openab-project-screening-auth
+ readOnly: true
+ - name: tmp
+ mountPath: /tmp
+ volumes:
+ - name: project-screening
+ configMap:
+ name: openab-project-screening
+ - name: project-screening-auth
+ secret:
+ secretName: openab-project-screening
+ - name: tmp
+ emptyDir: {}
diff --git a/k8s/project-screening-secret.yaml b/k8s/project-screening-secret.yaml
new file mode 100644
index 0000000..552d794
--- /dev/null
+++ b/k8s/project-screening-secret.yaml
@@ -0,0 +1,9 @@
+apiVersion: v1
+kind: Secret
+metadata:
+ name: openab-project-screening
+type: Opaque
+stringData:
+ gh-token: "REPLACE_WITH_GITHUB_TOKEN_WITH_PROJECT_SCOPE"
+ auth.json: |
+ REPLACE_WITH_CONTENTS_OF_CODEX_AUTH_JSON