diff --git a/.github/email-templates/validate-samples-failure.html b/.github/email-templates/validate-samples-failure.html
new file mode 100644
index 0000000..2482e00
--- /dev/null
+++ b/.github/email-templates/validate-samples-failure.html
@@ -0,0 +1,332 @@
+
+
+
+
+
+
+
+
+
+
+
+ |
+
+ |
+
+
+
+
+ |
+
+ GitHub Actions
+
+
+ Hosted Agent Sample Validation
+
+
+ ${EMAIL_OVERALL}
+ ${EMAIL_PASSED} / ${EMAIL_TOTAL} samples
+ passed
+ |
+
+
+
+
+ |
+
+ |
+
+
+
+
+ |
+
+ Sample Results
+
+
+
+
+ |
+ Branch
+ |
+
+ Sample
+ |
+
+ Result
+ |
+
+
+
+ ${EMAIL_ROWS}
+
+
+ |
+
+
+
+
+ |
+
+ |
+
+
+
+
+ |
+
+ Run Details
+
+
+
+ |
+ Workflow
+ |
+
+ ${EMAIL_WORKFLOW}
+ |
+
+
+ |
+ Branch
+ |
+
+ ${EMAIL_BRANCH}
+ |
+
+
+ |
+ Commit
+ |
+
+ ${EMAIL_COMMIT}
+ |
+
+
+ |
+ Triggered by
+ |
+
+ ${EMAIL_TRIGGERED_BY} · ${EMAIL_ACTOR}
+ |
+
+
+ |
+ Run
+ |
+
+ #${EMAIL_RUN_NUMBER} ·
+ ${EMAIL_RUN_ID}
+ |
+
+
+ |
+
+
+
+
+ |
+ View Run Log →
+ |
+
+
+
+
+ |
+
+ Sent by GitHub Actions ·
+ ${EMAIL_REPOSITORY}
+
+ |
+
+
+
+ |
+
+
+
+
+
diff --git a/.github/scripts/sync_agent_framework_samples.py b/.github/scripts/sync_agent_framework_samples.py
new file mode 100644
index 0000000..e26eb37
--- /dev/null
+++ b/.github/scripts/sync_agent_framework_samples.py
@@ -0,0 +1,237 @@
+from __future__ import annotations
+
+from copy import deepcopy
+import json
+import os
+import xml.etree.ElementTree as ET
+from dataclasses import dataclass
+from pathlib import Path
+from urllib.request import Request, urlopen
+
+
+REPO_ROOT = Path(os.environ["REPO_ROOT"]) if "REPO_ROOT" in os.environ else Path(__file__).resolve().parents[2]
+PLACEHOLDER = "{{SafeProjectName}}"
+UPSTREAM_REPO_API = "https://api.github.com/repos/microsoft/agent-framework/contents"
+ALLOWED_SUFFIXES = {".cs", ".csproj"}
+
+
+@dataclass(frozen=True)
+class SampleConfig:
+ upstream_name: str
+ target_dir: Path
+
+ @property
+ def upstream_path(self) -> str:
+ return f"dotnet/samples/05-end-to-end/HostedAgents/{self.upstream_name}"
+
+
+SAMPLES = (
+ SampleConfig(
+ upstream_name="FoundrySingleAgent",
+ target_dir=REPO_ROOT / "samples" / "hosted-agent" / "dotnet" / "agent",
+ ),
+ SampleConfig(
+ upstream_name="FoundryMultiAgent",
+ target_dir=REPO_ROOT / "samples" / "hosted-agent" / "dotnet" / "workflow",
+ ),
+)
+
+
+GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN", "")
+
+
+def _auth_headers() -> dict[str, str]:
+ headers: dict[str, str] = {"User-Agent": "microsoft-foundry-for-vscode-sync"}
+ if GITHUB_TOKEN:
+ headers["Authorization"] = f"Bearer {GITHUB_TOKEN}"
+ return headers
+
+
+def fetch_json(url: str) -> list[dict[str, object]]:
+ headers = _auth_headers()
+ headers["Accept"] = "application/vnd.github+json"
+ request = Request(url, headers=headers)
+ with urlopen(request) as response:
+ return json.load(response)
+
+
+def fetch_text(url: str) -> str:
+ request = Request(url, headers=_auth_headers())
+ with urlopen(request) as response:
+ return response.read().decode("utf-8")
+
+
+def normalize_content(content: str, upstream_name: str) -> str:
+ return content.replace(upstream_name, PLACEHOLDER)
+
+
+def normalize_relative_path(relative_path: Path, upstream_name: str) -> Path:
+ normalized_parts = [part.replace(upstream_name, PLACEHOLDER) for part in relative_path.parts]
+ return Path(*normalized_parts)
+
+
+def list_relevant_files(upstream_path: str) -> list[dict[str, object]]:
+ items = fetch_json(f"{UPSTREAM_REPO_API}/{upstream_path}?ref=main")
+ relevant_files: list[dict[str, object]] = []
+
+ for item in items:
+ item_type = item.get("type")
+ item_path = str(item["path"])
+
+ if item_type == "dir":
+ relevant_files.extend(list_relevant_files(item_path))
+ continue
+
+ if item_type != "file":
+ continue
+
+ if Path(str(item["name"])).suffix not in ALLOWED_SUFFIXES:
+ continue
+
+ relevant_files.append(item)
+
+ return relevant_files
+
+
+def replace_element_contents(target: ET.Element, source: ET.Element) -> None:
+ target.attrib.clear()
+ target.attrib.update(source.attrib)
+ target.text = source.text
+ target.tail = source.tail
+
+ for child in list(target):
+ target.remove(child)
+
+ for child in source:
+ target.append(deepcopy(child))
+
+
+def element_attribute_key(element: ET.Element) -> tuple[tuple[str, str], ...]:
+ return tuple(sorted(element.attrib.items()))
+
+
+def find_or_create_matching_group(root: ET.Element, source_group: ET.Element, tag: str) -> ET.Element:
+ group_key = element_attribute_key(source_group)
+
+ for existing_group in root.findall(tag):
+ if element_attribute_key(existing_group) == group_key:
+ return existing_group
+
+ new_group = ET.Element(tag, source_group.attrib)
+ insert_at = sum(1 for child in root if child.tag == tag)
+ root.insert(insert_at, new_group)
+ return new_group
+
+
+def merge_csproj_content(upstream_content: str, local_path: Path) -> str:
+ """Merge upstream .csproj changes into the local file, preserving local additions.
+
+ Only PropertyGroup properties and ItemGroup PackageReferences from upstream
+ are compared and updated. Local-only properties and packages are kept as-is.
+ If the local file does not exist or cannot be parsed, the full upstream
+ content is returned unchanged.
+ """
+ if not local_path.exists():
+ return upstream_content
+
+ local_content = local_path.read_text(encoding="utf-8")
+
+ try:
+ upstream_root = ET.fromstring(upstream_content)
+ local_root = ET.fromstring(local_content)
+ except ET.ParseError:
+ return upstream_content
+
+ # --- Merge PropertyGroup ---
+ for upstream_pg in upstream_root.findall("PropertyGroup"):
+ local_pg = find_or_create_matching_group(local_root, upstream_pg, "PropertyGroup")
+ local_props = {prop.tag: prop for prop in local_pg}
+
+ for upstream_prop in upstream_pg:
+ if upstream_prop.tag in local_props:
+ replace_element_contents(local_props[upstream_prop.tag], upstream_prop)
+ else:
+ local_pg.append(deepcopy(upstream_prop))
+
+ # --- Merge PackageReference items ---
+ upstream_packages: dict[str, ET.Element] = {}
+ for ig in upstream_root.findall("ItemGroup"):
+ for pr in ig.findall("PackageReference"):
+ include = pr.get("Include")
+ if include:
+ upstream_packages[include] = deepcopy(pr)
+
+ if upstream_packages:
+ # Find the first ItemGroup that already contains PackageReferences
+ local_pkg_ig: ET.Element | None = None
+ for ig in local_root.findall("ItemGroup"):
+ if ig.findall("PackageReference"):
+ local_pkg_ig = ig
+ break
+
+ if local_pkg_ig is None:
+ local_pkg_ig = ET.SubElement(local_root, "ItemGroup")
+
+ local_pkg_map: dict[str, ET.Element] = {}
+ for ig in local_root.findall("ItemGroup"):
+ for pr in ig.findall("PackageReference"):
+ include = pr.get("Include")
+ if include:
+ local_pkg_map[include] = pr
+
+ for include, upstream_package in upstream_packages.items():
+ if include in local_pkg_map:
+ replace_element_contents(local_pkg_map[include], upstream_package)
+ else:
+ local_pkg_ig.append(deepcopy(upstream_package))
+
+ ET.indent(local_root, space=" ")
+ return ET.tostring(local_root, encoding="unicode", xml_declaration=False) + "\n"
+
+
+def sync_sample(config: SampleConfig) -> list[Path]:
+ items = list_relevant_files(config.upstream_path)
+ expected_paths: list[Path] = []
+
+ for item in items:
+ download_url = str(item["download_url"])
+ relative_upstream_path = Path(str(item["path"])).relative_to(config.upstream_path)
+ target_relative_path = normalize_relative_path(relative_upstream_path, config.upstream_name)
+ target_path = config.target_dir / target_relative_path
+ normalized_content = normalize_content(fetch_text(download_url), config.upstream_name)
+
+ target_path.parent.mkdir(parents=True, exist_ok=True)
+
+ if target_path.suffix == ".csproj":
+ merged_content = merge_csproj_content(normalized_content, target_path)
+ target_path.write_text(merged_content, encoding="utf-8", newline="\n")
+ else:
+ target_path.write_text(normalized_content, encoding="utf-8", newline="\n")
+
+ expected_paths.append(target_path)
+
+ expected_path_set = set(expected_paths)
+ for existing_path in config.target_dir.rglob("*"):
+ if existing_path.suffix not in ALLOWED_SUFFIXES:
+ continue
+ if existing_path not in expected_path_set:
+ existing_path.unlink()
+
+ for existing_dir in sorted((path for path in config.target_dir.rglob("*") if path.is_dir()), reverse=True):
+ if not any(existing_dir.iterdir()):
+ existing_dir.rmdir()
+
+ return expected_paths
+
+
+def main() -> None:
+ synced_paths: list[Path] = []
+ for sample in SAMPLES:
+ synced_paths.extend(sync_sample(sample))
+
+ for path in sorted(synced_paths):
+ print(path.relative_to(REPO_ROOT).as_posix())
+
+
+if __name__ == "__main__":
+ main()
diff --git a/.github/workflows/sync-agent-framework-samples.yml b/.github/workflows/sync-agent-framework-samples.yml
new file mode 100644
index 0000000..7b55e99
--- /dev/null
+++ b/.github/workflows/sync-agent-framework-samples.yml
@@ -0,0 +1,210 @@
+name: Sync Agent Framework Samples
+
+on:
+ workflow_dispatch:
+ schedule:
+ - cron: "0 6 * * 1"
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ sync:
+ runs-on: ubuntu-latest
+ env:
+ PR_LABEL_CANDIDATES: |
+ automated-pr
+ area:samples
+ area:dotnet
+ area:hosted-agent
+ strategy:
+ fail-fast: false
+ matrix:
+ base_branch:
+ - stable
+ - pre-release
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Generate GitHub App token
+ id: app-token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.SYNC_APP_ID }}
+ private-key: ${{ secrets.SYNC_APP_PRIVATE_KEY }}
+
+ - name: Stage sync script
+ shell: bash
+ run: |
+ set -euo pipefail
+ cp .github/scripts/sync_agent_framework_samples.py "$RUNNER_TEMP/sync_agent_framework_samples.py"
+
+ - name: Prepare target branch
+ id: prepare
+ shell: bash
+ run: |
+ set -euo pipefail
+ target_branch="${{ matrix.base_branch }}"
+
+ if ! git ls-remote --exit-code --heads origin "$target_branch" >/dev/null 2>&1; then
+ echo "Branch $target_branch does not exist. Skipping."
+ echo "branch_exists=false" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ git fetch origin "$target_branch"
+ git checkout -B "$target_branch" "origin/$target_branch"
+
+ echo "branch_exists=true" >> "$GITHUB_OUTPUT"
+
+ - name: Set up Python
+ if: steps.prepare.outputs.branch_exists == 'true'
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+
+ - name: Sync tracked sample files
+ if: steps.prepare.outputs.branch_exists == 'true'
+ env:
+ REPO_ROOT: ${{ github.workspace }}
+ GITHUB_TOKEN: ${{ github.token }}
+ run: python "$RUNNER_TEMP/sync_agent_framework_samples.py"
+
+ - name: Detect sample changes
+ if: steps.prepare.outputs.branch_exists == 'true'
+ id: diff
+ shell: bash
+ run: |
+ set -euo pipefail
+
+ checked=$(find samples/hosted-agent/dotnet/agent samples/hosted-agent/dotnet/workflow \
+ -type f \( -name '*.cs' -o -name '*.csproj' \) | wc -l)
+ mapfile -t changed_files < <(
+ git diff --name-only -- \
+ samples/hosted-agent/dotnet/agent \
+ samples/hosted-agent/dotnet/workflow | sort
+ )
+ unchanged=$(( checked - ${#changed_files[@]} ))
+
+ echo "Checked $checked files, ${#changed_files[@]} changed, $unchanged unchanged."
+ if [[ ${#changed_files[@]} -gt 0 ]]; then
+ echo "Changed files:"
+ printf ' - %s\n' "${changed_files[@]}"
+ fi
+
+ {
+ echo "## Sample Sync Diff"
+ echo
+ echo "- Checked: $checked"
+ echo "- Changed: ${#changed_files[@]}"
+ echo "- Unchanged: $unchanged"
+ echo
+ if [[ ${#changed_files[@]} -gt 0 ]]; then
+ echo "### Changed files"
+ printf -- '- \`%s\`\n' "${changed_files[@]}"
+ echo
+ fi
+ } >> "$GITHUB_STEP_SUMMARY"
+
+ if [[ ${#changed_files[@]} -eq 0 ]]; then
+ echo "has_changes=false" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ git diff -- samples/hosted-agent/dotnet/agent samples/hosted-agent/dotnet/workflow
+ echo "has_changes=true" >> "$GITHUB_OUTPUT"
+
+ - name: Resolve pull request labels
+ if: steps.prepare.outputs.branch_exists == 'true' && steps.diff.outputs.has_changes == 'true'
+ id: labels
+ shell: bash
+ env:
+ GH_TOKEN: ${{ steps.app-token.outputs.token }}
+ run: |
+ set -euo pipefail
+
+ existing_labels=$(gh api --paginate repos/${{ github.repository }}/labels --jq '.[].name')
+ matched_labels=""
+
+ while IFS= read -r candidate; do
+ if [[ -z "$candidate" ]]; then
+ continue
+ fi
+
+ if grep -Fqx "$candidate" <<< "$existing_labels"; then
+ matched_labels+="$candidate"
+ matched_labels+=$'\n'
+ fi
+ done <<< "$PR_LABEL_CANDIDATES"
+
+ {
+ echo "labels<> "$GITHUB_OUTPUT"
+
+ - name: Generate PR branch name
+ if: steps.prepare.outputs.branch_exists == 'true' && steps.diff.outputs.has_changes == 'true'
+ id: branch
+ shell: bash
+ run: |
+ date_suffix=$(date -u +%Y%m%d)
+ echo "name=ci/sync-af_hosted-agent-samples-${{ matrix.base_branch }}-${date_suffix}" >> "$GITHUB_OUTPUT"
+
+ - name: Create draft pull request
+ if: steps.prepare.outputs.branch_exists == 'true' && steps.diff.outputs.has_changes == 'true'
+ id: cpr
+ uses: peter-evans/create-pull-request@v8
+ with:
+ token: ${{ steps.app-token.outputs.token }}
+ branch: ${{ steps.branch.outputs.name }}
+ base: ${{ matrix.base_branch }}
+ delete-branch: true
+ draft: always-true
+ commit-message: |
+ ci: sync Agent Framework hosted-agent samples (${{ matrix.base_branch }})
+ title: "ci: sync Agent Framework hosted-agent samples (${{ matrix.base_branch }})"
+ body: |
+ ## Summary
+ This automated draft PR syncs the hosted-agent sample sources from `microsoft/agent-framework` `main` into `${{ matrix.base_branch }}`.
+
+ ## Upstream Sources
+ - `samples/hosted-agent/dotnet/agent` <- `dotnet/samples/05-end-to-end/HostedAgents/FoundrySingleAgent`
+ - `samples/hosted-agent/dotnet/workflow` <- `dotnet/samples/05-end-to-end/HostedAgents/FoundryMultiAgent`
+
+ ## Sync Rules
+ - Only `.cs` and `.csproj` files are compared and updated.
+ - Upstream concrete project names are converted back to the local `{{SafeProjectName}}` placeholder.
+ - Placeholder replacement also applies to file names when needed.
+ - The sync scope is limited to the local `agent` and `workflow` sample folders.
+
+ ## Reviewer Checks
+ - Confirm the diff only contains `.cs` and `.csproj` changes under the expected sample folders.
+ - Confirm any namespace, type name, or project file rename still maps correctly to `{{SafeProjectName}}`.
+ - Confirm package reference or SDK changes are acceptable for this repo's release branch.
+ - Confirm no repo-specific customization was unintentionally overwritten.
+
+ ## Labels
+ Preferred labels for this automation are:
+ - `automated-pr`
+
+ This workflow only applies labels that already exist in the repository.
+ labels: ${{ steps.labels.outputs.labels }}
+ add-paths: |
+ samples/hosted-agent/dotnet/agent/*.cs
+ samples/hosted-agent/dotnet/agent/*.csproj
+ samples/hosted-agent/dotnet/workflow/*.cs
+ samples/hosted-agent/dotnet/workflow/*.csproj
+
+ - name: Report PR result
+ if: steps.prepare.outputs.branch_exists == 'true' && steps.diff.outputs.has_changes == 'true'
+ shell: bash
+ run: |
+ echo "PR operation: ${{ steps.cpr.outputs.pull-request-operation }}"
+ echo "PR URL: ${{ steps.cpr.outputs.pull-request-url }}"
diff --git a/.github/workflows/sync-dev-to-main.yaml b/.github/workflows/sync-dev-to-main.yaml
new file mode 100644
index 0000000..5c32043
--- /dev/null
+++ b/.github/workflows/sync-dev-to-main.yaml
@@ -0,0 +1,96 @@
+name: Sync Dev to Main
+
+on:
+ workflow_dispatch:
+ schedule:
+ - cron: "0 1 * * *"
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ sync:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Generate GitHub App token
+ id: app-token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.SYNC_APP_ID }}
+ private-key: ${{ secrets.SYNC_APP_PRIVATE_KEY }}
+
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ token: ${{ steps.app-token.outputs.token }}
+
+ - name: Configure git
+ run: |
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+
+ - name: Sync dev to main (preserve version-manifest.json)
+ id: sync
+ shell: bash
+ run: |
+ set -euo pipefail
+
+ EXCLUDED_FILE="samples/hosted-agent/version-manifest.json"
+
+ git fetch origin dev main
+
+ # Save the current version-manifest.json from main
+ git show origin/main:"$EXCLUDED_FILE" > /tmp/version-manifest.json
+
+ SYNC_BRANCH="sync/dev-to-main-$(date +%Y%m%d%H%M%S)"
+ git checkout -b "$SYNC_BRANCH" origin/main
+
+ # Merge dev, automatically resolving conflicts by preferring dev
+ git merge origin/dev --no-edit -X theirs || true
+
+ # Restore the excluded file from main
+ cp /tmp/version-manifest.json "$EXCLUDED_FILE"
+ git add "$EXCLUDED_FILE"
+
+ # Check if there are any staged changes to the excluded file
+ if git diff --cached --quiet "$EXCLUDED_FILE"; then
+ echo "version-manifest.json unchanged after restore, no extra commit needed"
+ else
+ git commit --amend --no-edit
+ fi
+
+ # Check if there is any diff between the sync branch and main
+ if git diff --quiet origin/main; then
+ echo "No differences between dev and main (after exclusions). Skipping PR creation."
+ echo "has_changes=false" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ echo "has_changes=true" >> "$GITHUB_OUTPUT"
+ git push origin "$SYNC_BRANCH"
+ echo "sync_branch=$SYNC_BRANCH" >> "$GITHUB_OUTPUT"
+
+ - name: Close previous sync PRs and clean up branches
+ if: steps.sync.outputs.has_changes == 'true'
+ env:
+ GH_TOKEN: ${{ steps.app-token.outputs.token }}
+ run: |
+ gh pr list --base main --state open --json number,headRefName --jq '.[] | select(.headRefName | startswith("sync/dev-to-main")) | (.number | tostring)' | while read -r pr_number; do
+ echo "Closing PR #$pr_number"
+ gh pr close "$pr_number" --comment "Superseded by a newer sync PR." --delete-branch
+ done
+
+ - name: Create Pull Request
+ if: steps.sync.outputs.has_changes == 'true'
+ env:
+ GH_TOKEN: ${{ steps.app-token.outputs.token }}
+ run: |
+ gh pr create \
+ --base main \
+ --head "${{ steps.sync.outputs.sync_branch }}" \
+ --title "ci: sync dev to main" \
+ --body "Automated sync from \`dev\` to \`main\`.
+
+ This PR is created by an automated script. Please review carefully before merging."
diff --git a/.github/workflows/sync-main-to-dev.yaml b/.github/workflows/sync-main-to-dev.yaml
new file mode 100644
index 0000000..8f84ec3
--- /dev/null
+++ b/.github/workflows/sync-main-to-dev.yaml
@@ -0,0 +1,96 @@
+name: Sync Main to Dev
+
+on:
+ workflow_dispatch:
+ schedule:
+ - cron: "0 2 * * *"
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ sync:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Generate GitHub App token
+ id: app-token
+ uses: actions/create-github-app-token@v1
+ with:
+ app-id: ${{ secrets.SYNC_APP_ID }}
+ private-key: ${{ secrets.SYNC_APP_PRIVATE_KEY }}
+
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ token: ${{ steps.app-token.outputs.token }}
+
+ - name: Configure git
+ run: |
+ git config user.name "github-actions[bot]"
+ git config user.email "github-actions[bot]@users.noreply.github.com"
+
+ - name: Sync main to dev (preserve version-manifest.json)
+ id: sync
+ shell: bash
+ run: |
+ set -euo pipefail
+
+ EXCLUDED_FILE="samples/hosted-agent/version-manifest.json"
+
+ git fetch origin main dev
+
+ # Save the current version-manifest.json from dev
+ git show origin/dev:"$EXCLUDED_FILE" > /tmp/version-manifest.json
+
+ SYNC_BRANCH="sync/main-to-dev-$(date +%Y%m%d%H%M%S)"
+ git checkout -b "$SYNC_BRANCH" origin/dev
+
+ # Merge main, automatically resolving conflicts by preferring main
+ git merge origin/main --no-edit -X theirs || true
+
+ # Restore the excluded file from dev
+ cp /tmp/version-manifest.json "$EXCLUDED_FILE"
+ git add "$EXCLUDED_FILE"
+
+ # Check if there are any staged changes to the excluded file
+ if git diff --cached --quiet "$EXCLUDED_FILE"; then
+ echo "version-manifest.json unchanged after restore, no extra commit needed"
+ else
+ git commit --amend --no-edit
+ fi
+
+ # Check if there is any diff between the sync branch and dev
+ if git diff --quiet origin/dev; then
+ echo "No differences between main and dev (after exclusions). Skipping PR creation."
+ echo "has_changes=false" >> "$GITHUB_OUTPUT"
+ exit 0
+ fi
+
+ echo "has_changes=true" >> "$GITHUB_OUTPUT"
+ git push origin "$SYNC_BRANCH"
+ echo "sync_branch=$SYNC_BRANCH" >> "$GITHUB_OUTPUT"
+
+ - name: Close previous sync PRs and clean up branches
+ if: steps.sync.outputs.has_changes == 'true'
+ env:
+ GH_TOKEN: ${{ steps.app-token.outputs.token }}
+ run: |
+ gh pr list --base dev --state open --json number,headRefName --jq '.[] | select(.headRefName | startswith("sync/main-to-dev")) | (.number | tostring)' | while read -r pr_number; do
+ echo "Closing PR #$pr_number"
+ gh pr close "$pr_number" --comment "Superseded by a newer sync PR." --delete-branch
+ done
+
+ - name: Create Pull Request
+ if: steps.sync.outputs.has_changes == 'true'
+ env:
+ GH_TOKEN: ${{ steps.app-token.outputs.token }}
+ run: |
+ gh pr create \
+ --base dev \
+ --head "${{ steps.sync.outputs.sync_branch }}" \
+ --title "ci: sync main to dev" \
+ --body "Automated sync from \`main\` to \`dev\`.
+
+ This PR is created by an automated script. Please review carefully before merging."
diff --git a/.github/workflows/validate-hosted-agent-samples.yml b/.github/workflows/validate-hosted-agent-samples.yml
new file mode 100644
index 0000000..9b93cbb
--- /dev/null
+++ b/.github/workflows/validate-hosted-agent-samples.yml
@@ -0,0 +1,276 @@
+name: Validate Hosted Agent Samples
+
+on:
+ schedule:
+ - cron: "0 1 * * *" # Daily at 9:00 AM Beijing time (UTC+8 → 01:00 UTC)
+ workflow_dispatch: # Allow manual trigger
+
+permissions:
+ contents: read
+
+jobs:
+ validate:
+ name: "[${{ matrix.branch }}] ${{ matrix.language }}/${{ matrix.type }}"
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false # Run all matrix jobs even if one fails
+ matrix:
+ branch: [dev, main, pre-release, stable]
+ language: [dotnet, python]
+ type: [agent, workflow]
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ matrix.branch }}
+
+ - name: Prepare sample directory
+ id: prepare
+ env:
+ SAFE_PROJECT_NAME: SampleAgent
+ AGENT_NAME: sample-agent
+ run: |
+ WORK_DIR=$(mktemp -d)
+ cp -r "samples/hosted-agent/${{ matrix.language }}/${{ matrix.type }}/." "$WORK_DIR/"
+
+ # Rename any files whose names contain the SafeProjectName placeholder
+ find "$WORK_DIR" -name '*{{SafeProjectName}}*' | while read -r f; do
+ mv "$f" "$(echo "$f" | sed "s/{{SafeProjectName}}/$SAFE_PROJECT_NAME/g")"
+ done
+
+ # Replace placeholder values inside all text files
+ find "$WORK_DIR" -type f -print0 \
+ | xargs -0 sed -i \
+ -e "s/{{SafeProjectName}}/$SAFE_PROJECT_NAME/g" \
+ -e "s/{{AgentName}}/$AGENT_NAME/g"
+
+ echo "work_dir=$WORK_DIR" >> "$GITHUB_OUTPUT"
+
+ - name: Build Docker image
+ run: |
+ docker build "${{ steps.prepare.outputs.work_dir }}" \
+ -t "sample-${{ matrix.language }}-${{ matrix.type }}:ci-test"
+
+ - name: Run container smoke test
+ run: |
+ CONTAINER="test-${{ matrix.language }}-${{ matrix.type }}"
+
+ docker run -d \
+ --name "$CONTAINER" \
+ -p 8088:8088 \
+ -e AZURE_AI_PROJECT_ENDPOINT="https://fake.services.ai.azure.com" \
+ -e PROJECT_ENDPOINT="https://fake.services.ai.azure.com" \
+ -e MODEL_DEPLOYMENT_NAME="gpt-4o" \
+ "sample-${{ matrix.language }}-${{ matrix.type }}:ci-test"
+
+ # Poll port 8088 for up to 30 seconds
+ echo "Waiting for server to start..."
+ for i in $(seq 1 10); do
+ http_code=$(curl -s -o /dev/null -w "%{http_code}" --max-time 2 http://localhost:8088/ || echo "000")
+ if [ "$http_code" != "000" ]; then
+ echo "✅ Server responded with HTTP $http_code — container is healthy"
+ break
+ fi
+ if [ "$i" -eq 10 ]; then
+ echo "❌ Server did not start within 30 seconds"
+ docker logs "$CONTAINER"
+ docker rm -f "$CONTAINER" || true
+ exit 1
+ fi
+ echo " attempt $i/10 — not yet ready, retrying in 3s..."
+ sleep 3
+ done
+
+ # Confirm the container is still running (didn't crash after responding)
+ running=$(docker inspect -f '{{.State.Running}}' "$CONTAINER")
+ if [ "$running" != "true" ]; then
+ echo "❌ Container exited unexpectedly"
+ docker logs "$CONTAINER"
+ docker rm -f "$CONTAINER" || true
+ exit 1
+ fi
+
+ docker rm -f "$CONTAINER"
+
+ - name: Save result artifact
+ if: always()
+ run: |
+ mkdir -p /tmp/ci-result
+ echo "${{ job.status }}" > /tmp/ci-result/status.txt
+ echo "${{ matrix.branch }}/${{ matrix.language }}/${{ matrix.type }}" > /tmp/ci-result/sample.txt
+
+ - name: Upload result artifact
+ if: always()
+ uses: actions/upload-artifact@v4
+ with:
+ name: result-${{ matrix.branch }}-${{ matrix.language }}-${{ matrix.type }}
+ path: /tmp/ci-result/
+
+ notify:
+ name: Send notification
+ needs: [validate]
+ if: always()
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+
+ - name: Download all results
+ uses: actions/download-artifact@v4
+ with:
+ pattern: result-*
+ path: results/
+
+ - name: Compute summary and render email
+ id: email
+ env:
+ EMAIL_WORKFLOW: "${{ github.workflow }}"
+ EMAIL_BRANCH: "${{ github.ref_name }}"
+ EMAIL_COMMIT: "${{ github.sha }}"
+ EMAIL_TRIGGERED_BY: "${{ github.event_name }}"
+ EMAIL_ACTOR: "${{ github.actor }}"
+ EMAIL_RUN_NUMBER: "${{ github.run_number }}"
+ EMAIL_RUN_ID: "${{ github.run_id }}"
+ EMAIL_RUN_URL: "${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
+ EMAIL_REPOSITORY: "${{ github.repository }}"
+ EMAIL_SERVER_URL: "${{ github.server_url }}"
+ run: |
+ total=0
+ passed=0
+
+ # Collect results into a temp file: "sample|status"
+ tmpdata=$(mktemp)
+ for dir in results/result-*/; do
+ status=$(cat "$dir/status.txt" 2>/dev/null || echo "unknown")
+ sample=$(cat "$dir/sample.txt" 2>/dev/null || echo "unknown")
+ total=$((total + 1))
+ [ "$status" = "success" ] && passed=$((passed + 1))
+ printf '%s|%s\n' "$sample" "$status" >> "$tmpdata"
+ done
+
+ # Sort by branch (alphabetical on full "branch/language/type" line)
+ tmpsorted=$(mktemp)
+ sort "$tmpdata" > "$tmpsorted"
+ rm -f "$tmpdata"
+
+ # Count rows per branch
+ declare -A branch_count=()
+ while IFS='|' read -r sample _; do
+ b="${sample%%/*}"
+ branch_count["$b"]=$((${branch_count["$b"]:-0} + 1))
+ done < "$tmpsorted"
+
+ # Build rows with rowspan on the Branch column
+ rows=""
+ prev_branch=""
+ cell_base="padding:8px 12px;border-bottom:1px solid #eeeeee;font-size:13px;color:#111111;"
+ while IFS='|' read -r sample status; do
+ branch="${sample%%/*}"
+ rest="${sample#*/}"
+ if [ "$status" = "success" ]; then
+ icon="✅"
+ bg=""
+ else
+ icon="❌"
+ bg="background-color:#fff8f8;"
+ fi
+ if [ "$branch" != "$prev_branch" ]; then
+ count=${branch_count["$branch"]}
+ branch_cell="$branch | "
+ prev_branch="$branch"
+ else
+ branch_cell=""
+ fi
+ rows="${rows}${branch_cell}$rest | $icon $status |
"
+ done < "$tmpsorted"
+ rm -f "$tmpsorted"
+ failed=$((total - passed))
+ if [ "$failed" -eq 0 ]; then
+ overall="PASSED"
+ header_color="#137333"
+ else
+ overall="FAILED"
+ header_color="#d93025"
+ fi
+ export EMAIL_OVERALL="$overall"
+ export EMAIL_PASSED="$passed"
+ export EMAIL_TOTAL="$total"
+ export EMAIL_ROWS="$rows"
+ export EMAIL_HEADER_COLOR="$header_color"
+ body=$(envsubst \
+ '${EMAIL_OVERALL} ${EMAIL_PASSED} ${EMAIL_TOTAL} ${EMAIL_ROWS} ${EMAIL_HEADER_COLOR}
+ ${EMAIL_WORKFLOW} ${EMAIL_BRANCH} ${EMAIL_COMMIT}
+ ${EMAIL_TRIGGERED_BY} ${EMAIL_ACTOR} ${EMAIL_RUN_NUMBER} ${EMAIL_RUN_ID}
+ ${EMAIL_RUN_URL} ${EMAIL_REPOSITORY} ${EMAIL_SERVER_URL}' \
+ < .github/email-templates/validate-samples-failure.html)
+ body=$(printf '%s' "$body" | tr -d '\n' | sed 's/>[[:space:]]\+>> "$GITHUB_OUTPUT"
+ echo "passed=$passed" >> "$GITHUB_OUTPUT"
+ echo "total=$total" >> "$GITHUB_OUTPUT"
+ echo "body<> "$GITHUB_OUTPUT"
+ echo "$body" >> "$GITHUB_OUTPUT"
+ echo "DELIM" >> "$GITHUB_OUTPUT"
+
+ - name: Send notification
+ env:
+ TO: ${{ secrets.MAIL_TO }}
+ SUBJECT: "[${{ steps.email.outputs.overall }}] Validate Hosted Agent Samples (${{ steps.email.outputs.passed }}/${{ steps.email.outputs.total }} Passed)"
+ BODY: ${{ steps.email.outputs.body }}
+ LOGIC_APP_URL: ${{ secrets.LOGIC_APP_URL }}
+ MAIL_CLIENT_ID: ${{ secrets.MAIL_CLIENT_ID }}
+ MAIL_CLIENT_SECRET: ${{ secrets.MAIL_CLIENT_SECRET }}
+ MAIL_TENANT_ID: ${{ secrets.MAIL_TENANT_ID }}
+ run: |
+ if [ -z "$TO" ] || [ -z "$LOGIC_APP_URL" ]; then
+ echo "⚠️ MAIL_TO or LOGIC_APP_URL not configured. Skipping email send."
+ exit 0
+ fi
+
+ auth_header=""
+ if [ -n "$MAIL_CLIENT_ID" ] && [ -n "$MAIL_CLIENT_SECRET" ] && [ -n "$MAIL_TENANT_ID" ]; then
+ echo "🔐 Getting Azure AD access token..."
+ response=$(curl -s \
+ --request POST \
+ --header "Content-Type: application/x-www-form-urlencoded" \
+ --data "grant_type=client_credentials&client_id=${MAIL_CLIENT_ID}&client_secret=${MAIL_CLIENT_SECRET}&resource=https://management.core.windows.net" \
+ "https://login.microsoftonline.com/${MAIL_TENANT_ID}/oauth2/token")
+ access_token=$(echo "$response" | jq -r '. | select(.access_token) | .access_token')
+ if [ -z "$access_token" ] || [ "$access_token" = "null" ]; then
+ echo "⚠️ Failed to get access token. Skipping email send."
+ exit 0
+ fi
+ echo "✅ Got access token"
+ auth_header="Authorization: Bearer $access_token"
+ fi
+
+ body_file=$(mktemp)
+ printf '%s' "$BODY" > "$body_file"
+ payload=$(jq -n \
+ --arg to "$TO" \
+ --arg subject "$SUBJECT" \
+ --rawfile body "$body_file" \
+ '{to: $to, subject: $subject, body: $body, bodyHtml: $body, contentType: "text/html"}')
+ rm -f "$body_file"
+
+ if [ -n "$auth_header" ]; then
+ http_code=$(curl -s -o /tmp/email_response.txt -w "%{http_code}" \
+ --request POST \
+ --header "Content-Type: application/json" \
+ --header "$auth_header" \
+ --data "$payload" \
+ "$LOGIC_APP_URL")
+ else
+ http_code=$(curl -s -o /tmp/email_response.txt -w "%{http_code}" \
+ --request POST \
+ --header "Content-Type: application/json" \
+ --data "$payload" \
+ "$LOGIC_APP_URL")
+ fi
+
+ if [ "$http_code" -ge 200 ] && [ "$http_code" -lt 300 ]; then
+ echo "✅ Email sent successfully! (HTTP $http_code)"
+ else
+ echo "⚠️ Email send failed with HTTP $http_code"
+ cat /tmp/email_response.txt 2>/dev/null || true
+ fi