diff --git a/infrastructure/infrastructure-setup-bicep/32-customer-managed-keys-user-assigned-identity/azuredeploy.json b/infrastructure/infrastructure-setup-bicep/32-customer-managed-keys-user-assigned-identity/azuredeploy.json new file mode 100644 index 00000000..cf143128 --- /dev/null +++ b/infrastructure/infrastructure-setup-bicep/32-customer-managed-keys-user-assigned-identity/azuredeploy.json @@ -0,0 +1,428 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.39.26.7824", + "templateHash": "9619608194674690722" + } + }, + "parameters": { + "aiFoundryName": { + "type": "string", + "defaultValue": "ai-foundry-complete-cmk", + "metadata": { + "description": "That name is the name of our application. It has to be unique." + } + }, + "aiProjectName": { + "type": "string", + "defaultValue": "[format('{0}-proj', parameters('aiFoundryName'))]", + "metadata": { + "description": "Name of the AI Foundry project" + } + }, + "location": { + "type": "string", + "defaultValue": "eastus2", + "metadata": { + "description": "Location for all resources." + } + }, + "keyVaultName": { + "type": "string", + "metadata": { + "description": "Name of the Azure Key Vault target" + } + }, + "keyName": { + "type": "string", + "metadata": { + "description": "Name of the Azure Key Vault key" + } + }, + "keyVersion": { + "type": "string", + "metadata": { + "description": "Version of the Azure Key Vault key" + } + }, + "userAssignedIdentityId": { + "type": "string", + "metadata": { + "description": "Resource ID of the user-assigned managed identity to use for CMK encryption" + } + }, + "userAssignedIdentityClientId": { + "type": "string", + "metadata": { + "description": "Client ID of the user-assigned managed identity" + } + } + }, + "resources": [ + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2025-04-01", + "name": "foundryAccount", + "properties": { + "expressionEvaluationOptions": { + "scope": "inner" + }, + "mode": "Incremental", + "parameters": { + "aiFoundryName": { + "value": "[parameters('aiFoundryName')]" + }, + "location": { + "value": "[parameters('location')]" + }, + "userAssignedIdentityId": { + "value": "[parameters('userAssignedIdentityId')]" + } + }, + "template": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.39.26.7824", + "templateHash": "2042207162176060264" + } + }, + "parameters": { + "aiFoundryName": { + "type": "string", + "metadata": { + "description": "Name of the AI Foundry account" + } + }, + "location": { + "type": "string", + "metadata": { + "description": "Location for the resource" + } + }, + "userAssignedIdentityId": { + "type": "string", + "metadata": { + "description": "Resource ID of the user-assigned managed identity" + } + } + }, + "variables": { + "identityConfig": { + "type": "UserAssigned", + "userAssignedIdentities": { + "[format('{0}', parameters('userAssignedIdentityId'))]": {} + } + } + }, + "resources": [ + { + "type": "Microsoft.CognitiveServices/accounts", + "apiVersion": "2025-04-01-preview", + "name": "[parameters('aiFoundryName')]", + "location": "[parameters('location')]", + "identity": "[variables('identityConfig')]", + "kind": "AIServices", + "sku": { + "name": "S0" + }, + "properties": { + "allowProjectManagement": true, + "publicNetworkAccess": "Enabled", + "customSubDomainName": "[parameters('aiFoundryName')]", + "disableLocalAuth": true + } + } + ], + "outputs": { + "accountId": { + "type": "string", + "value": "[resourceId('Microsoft.CognitiveServices/accounts', parameters('aiFoundryName'))]" + }, + "accountName": { + "type": "string", + "value": "[parameters('aiFoundryName')]" + } + } + } + } + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2025-04-01", + "name": "cmkEncryption", + "properties": { + "expressionEvaluationOptions": { + "scope": "inner" + }, + "mode": "Incremental", + "parameters": { + "aiFoundryName": { + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'foundryAccount'), '2025-04-01').outputs.accountName.value]" + }, + "location": { + "value": "[parameters('location')]" + }, + "keyVaultName": { + "value": "[parameters('keyVaultName')]" + }, + "keyName": { + "value": "[parameters('keyName')]" + }, + "keyVersion": { + "value": "[parameters('keyVersion')]" + }, + "userAssignedIdentityId": { + "value": "[parameters('userAssignedIdentityId')]" + }, + "userAssignedIdentityClientId": { + "value": "[parameters('userAssignedIdentityClientId')]" + } + }, + "template": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.39.26.7824", + "templateHash": "1200570362967678125" + } + }, + "parameters": { + "aiFoundryName": { + "type": "string", + "metadata": { + "description": "Name of the AI Foundry account" + } + }, + "location": { + "type": "string", + "metadata": { + "description": "Location for the resource" + } + }, + "keyVaultName": { + "type": "string", + "metadata": { + "description": "Name of the Azure Key Vault" + } + }, + "keyName": { + "type": "string", + "metadata": { + "description": "Name of the Azure Key Vault key" + } + }, + "keyVersion": { + "type": "string", + "metadata": { + "description": "Version of the Azure Key Vault key" + } + }, + "userAssignedIdentityId": { + "type": "string", + "metadata": { + "description": "Resource ID of the user-assigned managed identity" + } + }, + "userAssignedIdentityClientId": { + "type": "string", + "metadata": { + "description": "Client ID of the user-assigned managed identity" + } + } + }, + "variables": { + "keyVaultUri": "[format('https://{0}{1}/', parameters('keyVaultName'), environment().suffixes.keyvaultDns)]" + }, + "resources": [ + { + "type": "Microsoft.CognitiveServices/accounts", + "apiVersion": "2025-04-01-preview", + "name": "[parameters('aiFoundryName')]", + "location": "[parameters('location')]", + "identity": { + "type": "UserAssigned", + "userAssignedIdentities": { + "[format('{0}', parameters('userAssignedIdentityId'))]": {} + } + }, + "kind": "AIServices", + "sku": { + "name": "S0" + }, + "properties": { + "encryption": { + "keySource": "Microsoft.KeyVault", + "keyVaultProperties": { + "keyVaultUri": "[variables('keyVaultUri')]", + "keyName": "[parameters('keyName')]", + "keyVersion": "[parameters('keyVersion')]", + "identityClientId": "[parameters('userAssignedIdentityClientId')]" + } + }, + "allowProjectManagement": true, + "publicNetworkAccess": "Enabled", + "customSubDomainName": "[parameters('aiFoundryName')]", + "disableLocalAuth": true + } + } + ], + "outputs": { + "encryptionStatus": { + "type": "string", + "value": "CMK encryption enabled" + }, + "keyVaultUri": { + "type": "string", + "value": "[variables('keyVaultUri')]" + } + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Resources/deployments', 'foundryAccount')]" + ] + }, + { + "type": "Microsoft.Resources/deployments", + "apiVersion": "2025-04-01", + "name": "foundryProject", + "properties": { + "expressionEvaluationOptions": { + "scope": "inner" + }, + "mode": "Incremental", + "parameters": { + "aiFoundryName": { + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'foundryAccount'), '2025-04-01').outputs.accountName.value]" + }, + "projectName": { + "value": "[parameters('aiProjectName')]" + }, + "projectDisplayName": { + "value": "[parameters('aiProjectName')]" + }, + "projectDescription": { + "value": "AI Foundry project with customer-managed keys" + }, + "location": { + "value": "[parameters('location')]" + }, + "userAssignedIdentityId": { + "value": "[parameters('userAssignedIdentityId')]" + } + }, + "template": { + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "metadata": { + "_generator": { + "name": "bicep", + "version": "0.39.26.7824", + "templateHash": "14690874522422526659" + } + }, + "parameters": { + "aiFoundryName": { + "type": "string", + "metadata": { + "description": "Name of the AI Foundry account (parent)" + } + }, + "location": { + "type": "string", + "metadata": { + "description": "Location for the resource" + } + }, + "projectName": { + "type": "string", + "metadata": { + "description": "Name of the project" + } + }, + "projectDisplayName": { + "type": "string", + "metadata": { + "description": "Display name for the project" + } + }, + "projectDescription": { + "type": "string", + "metadata": { + "description": "Description for the project" + } + }, + "userAssignedIdentityId": { + "type": "string", + "metadata": { + "description": "Resource ID of the user-assigned managed identity" + } + } + }, + "resources": [ + { + "type": "Microsoft.CognitiveServices/accounts/projects", + "apiVersion": "2025-04-01-preview", + "name": "[format('{0}/{1}', parameters('aiFoundryName'), parameters('projectName'))]", + "location": "[parameters('location')]", + "identity": { + "type": "UserAssigned", + "userAssignedIdentities": { + "[format('{0}', parameters('userAssignedIdentityId'))]": {} + } + }, + "properties": { + "displayName": "[parameters('projectDisplayName')]", + "description": "[parameters('projectDescription')]" + } + } + ], + "outputs": { + "projectId": { + "type": "string", + "value": "[resourceId('Microsoft.CognitiveServices/accounts/projects', parameters('aiFoundryName'), parameters('projectName'))]" + }, + "projectName": { + "type": "string", + "value": "[parameters('projectName')]" + } + } + } + }, + "dependsOn": [ + "[resourceId('Microsoft.Resources/deployments', 'foundryAccount')]", + "[resourceId('Microsoft.Resources/deployments', 'cmkEncryption')]" + ] + } + ], + "outputs": { + "accountId": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'foundryAccount'), '2025-04-01').outputs.accountId.value]" + }, + "accountName": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'foundryAccount'), '2025-04-01').outputs.accountName.value]" + }, + "projectId": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'foundryProject'), '2025-04-01').outputs.projectId.value]" + }, + "projectName": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'foundryProject'), '2025-04-01').outputs.projectName.value]" + }, + "keyVaultUri": { + "type": "string", + "value": "[reference(resourceId('Microsoft.Resources/deployments', 'cmkEncryption'), '2025-04-01').outputs.keyVaultUri.value]" + } + } +} \ No newline at end of file diff --git a/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/Dockerfile b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/Dockerfile similarity index 100% rename from samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/Dockerfile rename to samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/Dockerfile diff --git a/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/README.md b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/README.md new file mode 100644 index 00000000..6eecdc90 --- /dev/null +++ b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/README.md @@ -0,0 +1,146 @@ +**IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. Learn more in the transparency documents for [Agent Service](https://learn.microsoft.com/en-us/azure/ai-foundry/responsible-ai/agents/transparency-note) and [Agent Framework](https://github.com/microsoft/agent-framework/blob/main/TRANSPARENCY_FAQ.md). + +Agents, solutions, or other output you create may be subject to legal and regulatory requirements, may require licenses, or may not be suitable for all industries, scenarios, or use cases. By using any sample, you are acknowledging that any output created using those samples are solely your responsibility, and that you will comply with all applicable laws, regulations, and relevant safety standards, terms of service, and codes of conduct. + +Third-party samples contained in this folder are subject to their own designated terms, and they have not been tested or verified by Microsoft or its affiliates. + +Microsoft has no responsibility to you or others with respect to any of these samples or any resulting output. + +# What this sample demonstrates + +This sample demonstrates how to build a Microsoft Agent Framework chat agent that can use **Foundry tools** +(for example, web search and MCP tools), host it using the +[Azure AI AgentServer SDK](https://pypi.org/project/azure-ai-agentserver-agentframework/), +and deploy it to Microsoft Foundry using the Azure Developer CLI [ai agent](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents?view=foundry&tabs=cli#create-a-hosted-agent) extension. + +## How It Works + +### Foundry tools integration + +In [main.py](main.py), the agent is created using `AzureOpenAIChatClient` and is configured with +`FoundryToolsChatMiddleware`. The middleware enables tool usage via Foundry-supported tool types: + +- `web_search_preview` (foundry configured tools) +- `mcp` (connected mcp tool, configured with a Foundry project connection id) + +### Agent Hosting + +The agent is hosted using the [Azure AI AgentServer SDK](https://pypi.org/project/azure-ai-agentserver-agentframework/), +which provisions a REST API endpoint compatible with the OpenAI Responses protocol. This allows interaction with the agent using OpenAI Responses compatible clients. + +### Agent Deployment + +The hosted agent can be seamlessly deployed to Microsoft Foundry using the Azure Developer CLI [ai agent](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents?view=foundry&tabs=cli#create-a-hosted-agent) extension. +The extension builds a container image into Azure Container Registry (ACR), and creates a hosted agent version and deployment on Microsoft Foundry. + +## Running the Agent Locally + +### Prerequisites + +Before running this sample, ensure you have: + +1. **Azure OpenAI Service** + - Endpoint configured + - Chat model deployed (e.g., `gpt-4o-mini` or `gpt-4`) + - Note your endpoint URL and deployment name + +2. **Azure AI Foundry Project** + - Project created in [Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/what-is-foundry?view=foundry#microsoft-foundry-portals) + - Add 'Microsoft Learn' MCP from foundry tool catalog. + ![microsoft_learn](microsoft_learn.png) + +3. **Azure CLI** + - Installed and authenticated + - Run `az login` and verify with `az account show` + +4. **Python 3.10 or higher** + - Verify your version: `python --version` + - If you have Python 3.9 or older, install a newer version: + - Windows: `winget install Python.Python.3.12` + - macOS: `brew install python@3.12` + - Linux: Use your package manager + +### Environment Variables + +Set the following environment variables: + +- `AZURE_OPENAI_ENDPOINT` - Your Azure OpenAI endpoint URL (required) +- `AZURE_OPENAI_CHAT_DEPLOYMENT_NAME` - The deployment name for your chat model (required) +- `AZURE_AI_PROJECT_ENDPOINT` - Your Azure AI Foundry project endpoint (required) +- `AZURE_AI_PROJECT_TOOL_CONNECTION_ID` - Foundry project connection id used to configure the `mcp` tool (optional) + +This sample loads environment variables from a local `.env` file if present. + +**Finding your tool connection id** (portal names may vary): +1. Go to [Azure AI Foundry portal](https://ai.azure.com) +2. Navigate to your project -> Build -> Tools +3. Find your connected MCP tool (e.g., "Microsoft Learn") +4. Copy your tool's name and set it as `AZURE_AI_PROJECT_TOOL_CONNECTION_ID` + +```powershell +# Replace with your actual values +$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/" +$env:AZURE_OPENAI_CHAT_DEPLOYMENT_NAME="gpt-4o-mini" +$env:AZURE_AI_PROJECT_ENDPOINT="https://{resource}.services.ai.azure.com/api/projects/{project-name}" +$env:AZURE_AI_PROJECT_TOOL_CONNECTION_ID="" +``` + +### Installing Dependencies + +Install the required Python dependencies using pip: + +```powershell +pip install -r requirements.txt +``` + +### Running the Sample + +To run the agent, execute the following command in your terminal: + +```powershell +python main.py +``` + +This will start the hosted agent locally on `http://localhost:8088/`. + +### Interacting with the Agent + +**PowerShell (Windows):** +```powershell +$body = @{ + input = "How to deploy foundry hosted agents?" + stream = $false +} | ConvertTo-Json + +Invoke-RestMethod -Uri http://localhost:8088/responses -Method Post -Body $body -ContentType "application/json" +``` + +**Bash/curl (Linux/macOS):** +```bash +curl -sS -H "Content-Type: application/json" -X POST http://localhost:8088/responses \ + -d '{"input": "How to deploy foundry hosted agents?","stream":false}' +``` + +The agent may use Foundry tools (for example `web_search_preview` and/or `mcp`) as needed to answer. + +### Deploying the Agent to Microsoft Foundry + +To deploy your agent to Microsoft Foundry, follow the comprehensive deployment guide at https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents?view=foundry&tabs=cli + +## Troubleshooting + +### Images built on Apple Silicon or other ARM64 machines do not work on our service + +We **recommend using `azd` cloud build**, which always builds images with the correct architecture. + +If you choose to **build locally**, and your machine is **not `linux/amd64`** (for example, an Apple Silicon Mac), the image will **not be compatible with our service**, causing runtime failures. + +**Fix for local builds** + +Use this command to build the image locally: + +```shell +docker build --platform=linux/amd64 -t image . +``` + +This forces the image to be built for the required `amd64` architecture. \ No newline at end of file diff --git a/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/agent.yaml b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/agent.yaml similarity index 82% rename from samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/agent.yaml rename to samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/agent.yaml index 5a0f5855..9ad611eb 100644 --- a/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/agent.yaml +++ b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/agent.yaml @@ -1,5 +1,5 @@ # Unique identifier/name for this agent -name: agent-with-hosted-mcp +name: af-agent-with-foundry-tools # Brief description of what this agent does description: > An AI agent that uses Azure OpenAI with a Hosted Model Context Protocol (MCP) server. @@ -7,15 +7,14 @@ description: > metadata: # Categorization tags for organizing and discovering agents authors: - - Microsoft Agent Framework Team + - Microsoft tags: - Azure AI AgentServer - Microsoft Agent Framework - Model Context Protocol - MCP template: - name: agent-with-hosted-mcp - # The type of agent - "hosted" for HOBO, "container" for COBO + name: af-agent-with-foundry-tools kind: hosted protocols: - protocol: responses @@ -24,6 +23,8 @@ template: value: ${AZURE_OPENAI_ENDPOINT} - name: AZURE_OPENAI_CHAT_DEPLOYMENT_NAME value: "{{chat}}" + - name: AZURE_AI_PROJECT_TOOL_CONNECTION_ID + value: "" resources: - kind: model id: gpt-4o-mini diff --git a/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/main.py b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/main.py new file mode 100644 index 00000000..4f3f6954 --- /dev/null +++ b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/main.py @@ -0,0 +1,36 @@ +import os +from dotenv import load_dotenv +from agent_framework.azure import AzureOpenAIChatClient + +from azure.ai.agentserver.agentframework import from_agent_framework, FoundryToolsChatMiddleware +from azure.identity import DefaultAzureCredential + +# Load environment variables from .env file for local development +# load_dotenv() + +def main(): + required_env_vars = [ + "AZURE_OPENAI_ENDPOINT", + "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", + "AZURE_AI_PROJECT_ENDPOINT", + ] + for env_var in required_env_vars: + assert env_var in os.environ and os.environ[env_var], ( + f"{env_var} environment variable must be set." + ) + + tools=[{"type": "web_search_preview"}] + if project_tool_connection_id := os.environ.get("AZURE_AI_PROJECT_TOOL_CONNECTION_ID"): + tools.append({"type": "mcp", "project_connection_id": project_tool_connection_id}) + + chat_client = AzureOpenAIChatClient(credential=DefaultAzureCredential(), + middleware=FoundryToolsChatMiddleware(tools)) + agent = chat_client.create_agent( + name="FoundryToolAgent", + instructions="You are a helpful assistant with access to various tools." + ) + + from_agent_framework(agent).run() + +if __name__ == "__main__": + main() diff --git a/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/microsoft_learn.png b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/microsoft_learn.png new file mode 100644 index 00000000..0a8ebaaf Binary files /dev/null and b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/microsoft_learn.png differ diff --git a/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/requirements.txt b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/requirements.txt new file mode 100644 index 00000000..bb03ffea --- /dev/null +++ b/samples/python/hosted-agents/agent-framework/agent-with-foundry-tools/requirements.txt @@ -0,0 +1 @@ +azure_ai_agentserver_agentframework-1.0.0b9 \ No newline at end of file diff --git a/samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/Dockerfile b/samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/Dockerfile similarity index 100% rename from samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/Dockerfile rename to samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/Dockerfile diff --git a/samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/README.md b/samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/README.md similarity index 100% rename from samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/README.md rename to samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/README.md diff --git a/samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/agent.yaml b/samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/agent.yaml similarity index 100% rename from samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/agent.yaml rename to samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/agent.yaml diff --git a/samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/main.py b/samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/main.py similarity index 98% rename from samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/main.py rename to samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/main.py index 768dd1c7..9a0cfa91 100644 --- a/samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/main.py +++ b/samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/main.py @@ -106,7 +106,7 @@ def create_agent(): def main(): # Run the agent as a hosted agent - from_agent_framework(lambda _: create_agent()).run() + from_agent_framework(create_agent()).run() if __name__ == "__main__": diff --git a/samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/requirements.txt b/samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/requirements.txt new file mode 100644 index 00000000..adac06de --- /dev/null +++ b/samples/python/hosted-agents/agent-framework/agent-with-text-search-rag/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-agentframework==1.0.0b9 \ No newline at end of file diff --git a/samples/python/hosted-agents/agent_framework/agents_in_workflow/Dockerfile b/samples/python/hosted-agents/agent-framework/agents-in-workflow/Dockerfile similarity index 100% rename from samples/python/hosted-agents/agent_framework/agents_in_workflow/Dockerfile rename to samples/python/hosted-agents/agent-framework/agents-in-workflow/Dockerfile diff --git a/samples/python/hosted-agents/agent_framework/agents_in_workflow/README.md b/samples/python/hosted-agents/agent-framework/agents-in-workflow/README.md similarity index 100% rename from samples/python/hosted-agents/agent_framework/agents_in_workflow/README.md rename to samples/python/hosted-agents/agent-framework/agents-in-workflow/README.md diff --git a/samples/python/hosted-agents/agent_framework/agents_in_workflow/agent.yaml b/samples/python/hosted-agents/agent-framework/agents-in-workflow/agent.yaml similarity index 100% rename from samples/python/hosted-agents/agent_framework/agents_in_workflow/agent.yaml rename to samples/python/hosted-agents/agent-framework/agents-in-workflow/agent.yaml diff --git a/samples/python/hosted-agents/agent_framework/agents_in_workflow/main.py b/samples/python/hosted-agents/agent-framework/agents-in-workflow/main.py similarity index 96% rename from samples/python/hosted-agents/agent_framework/agents_in_workflow/main.py rename to samples/python/hosted-agents/agent-framework/agents-in-workflow/main.py index c4a667c2..06025334 100644 --- a/samples/python/hosted-agents/agent_framework/agents_in_workflow/main.py +++ b/samples/python/hosted-agents/agent-framework/agents-in-workflow/main.py @@ -40,7 +40,7 @@ def create_agent(): def main(): # Run the agent as a hosted agent - from_agent_framework(lambda _: create_agent()).run() + from_agent_framework(create_agent()).run() if __name__ == "__main__": diff --git a/samples/python/hosted-agents/agent-framework/agents-in-workflow/requirements.txt b/samples/python/hosted-agents/agent-framework/agents-in-workflow/requirements.txt new file mode 100644 index 00000000..adac06de --- /dev/null +++ b/samples/python/hosted-agents/agent-framework/agents-in-workflow/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-agentframework==1.0.0b9 \ No newline at end of file diff --git a/samples/python/hosted-agents/agent_framework/echo-agent/Dockerfile b/samples/python/hosted-agents/agent-framework/echo-agent/Dockerfile similarity index 100% rename from samples/python/hosted-agents/agent_framework/echo-agent/Dockerfile rename to samples/python/hosted-agents/agent-framework/echo-agent/Dockerfile diff --git a/samples/python/hosted-agents/agent_framework/web-search-agent/README.md b/samples/python/hosted-agents/agent-framework/echo-agent/README.md similarity index 100% rename from samples/python/hosted-agents/agent_framework/web-search-agent/README.md rename to samples/python/hosted-agents/agent-framework/echo-agent/README.md diff --git a/samples/python/hosted-agents/agent_framework/echo-agent/agent.yaml b/samples/python/hosted-agents/agent-framework/echo-agent/agent.yaml similarity index 100% rename from samples/python/hosted-agents/agent_framework/echo-agent/agent.yaml rename to samples/python/hosted-agents/agent-framework/echo-agent/agent.yaml diff --git a/samples/python/hosted-agents/agent_framework/echo-agent/main.py b/samples/python/hosted-agents/agent-framework/echo-agent/main.py similarity index 98% rename from samples/python/hosted-agents/agent_framework/echo-agent/main.py rename to samples/python/hosted-agents/agent-framework/echo-agent/main.py index 899ae44d..8a556e83 100644 --- a/samples/python/hosted-agents/agent_framework/echo-agent/main.py +++ b/samples/python/hosted-agents/agent-framework/echo-agent/main.py @@ -152,4 +152,4 @@ def create_agent() -> EchoAgent: return agent if __name__ == "__main__": - from_agent_framework(lambda _: create_agent()).run() + from_agent_framework(create_agent()).run() diff --git a/samples/python/hosted-agents/agent_framework/web-search-agent/requirements.txt b/samples/python/hosted-agents/agent-framework/echo-agent/requirements.txt similarity index 60% rename from samples/python/hosted-agents/agent_framework/web-search-agent/requirements.txt rename to samples/python/hosted-agents/agent-framework/echo-agent/requirements.txt index e4e6e05f..b33c027a 100644 --- a/samples/python/hosted-agents/agent_framework/web-search-agent/requirements.txt +++ b/samples/python/hosted-agents/agent-framework/echo-agent/requirements.txt @@ -1,4 +1,4 @@ -azure-ai-agentserver-agentframework==1.0.0b8 +azure-ai-agentserver-agentframework==1.0.0b9 pytest==8.4.2 python-dotenv==1.1.1 azure-monitor-opentelemetry==1.8.1 diff --git a/samples/python/hosted-agents/agent_framework/web-search-agent/Dockerfile b/samples/python/hosted-agents/agent-framework/web-search-agent/Dockerfile similarity index 100% rename from samples/python/hosted-agents/agent_framework/web-search-agent/Dockerfile rename to samples/python/hosted-agents/agent-framework/web-search-agent/Dockerfile diff --git a/samples/python/hosted-agents/agent_framework/echo-agent/README.md b/samples/python/hosted-agents/agent-framework/web-search-agent/README.md similarity index 100% rename from samples/python/hosted-agents/agent_framework/echo-agent/README.md rename to samples/python/hosted-agents/agent-framework/web-search-agent/README.md diff --git a/samples/python/hosted-agents/agent_framework/web-search-agent/agent.yaml b/samples/python/hosted-agents/agent-framework/web-search-agent/agent.yaml similarity index 100% rename from samples/python/hosted-agents/agent_framework/web-search-agent/agent.yaml rename to samples/python/hosted-agents/agent-framework/web-search-agent/agent.yaml diff --git a/samples/python/hosted-agents/agent_framework/web-search-agent/main.py b/samples/python/hosted-agents/agent-framework/web-search-agent/main.py similarity index 96% rename from samples/python/hosted-agents/agent_framework/web-search-agent/main.py rename to samples/python/hosted-agents/agent-framework/web-search-agent/main.py index 8556f98a..eb5a8fcc 100644 --- a/samples/python/hosted-agents/agent_framework/web-search-agent/main.py +++ b/samples/python/hosted-agents/agent-framework/web-search-agent/main.py @@ -44,4 +44,4 @@ def create_agent() -> ChatAgent: return agent if __name__ == "__main__": - from_agent_framework(lambda _: create_agent()).run() + from_agent_framework(create_agent()).run() diff --git a/samples/python/hosted-agents/agent_framework/echo-agent/requirements.txt b/samples/python/hosted-agents/agent-framework/web-search-agent/requirements.txt similarity index 60% rename from samples/python/hosted-agents/agent_framework/echo-agent/requirements.txt rename to samples/python/hosted-agents/agent-framework/web-search-agent/requirements.txt index e4e6e05f..b33c027a 100644 --- a/samples/python/hosted-agents/agent_framework/echo-agent/requirements.txt +++ b/samples/python/hosted-agents/agent-framework/web-search-agent/requirements.txt @@ -1,4 +1,4 @@ -azure-ai-agentserver-agentframework==1.0.0b8 +azure-ai-agentserver-agentframework==1.0.0b9 pytest==8.4.2 python-dotenv==1.1.1 azure-monitor-opentelemetry==1.8.1 diff --git a/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/README.md b/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/README.md deleted file mode 100644 index 7a334b41..00000000 --- a/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/README.md +++ /dev/null @@ -1,112 +0,0 @@ -**IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. Learn more in the transparency documents for [Agent Service](https://learn.microsoft.com/en-us/azure/ai-foundry/responsible-ai/agents/transparency-note) and [Agent Framework](https://github.com/microsoft/agent-framework/blob/main/TRANSPARENCY_FAQ.md). - -Agents, solutions, or other output you create may be subject to legal and regulatory requirements, may require licenses, or may not be suitable for all industries, scenarios, or use cases. By using any sample, you are acknowledging that any output created using those samples are solely your responsibility, and that you will comply with all applicable laws, regulations, and relevant safety standards, terms of service, and codes of conduct. - -Third-party samples contained in this folder are subject to their own designated terms, and they have not been tested or verified by Microsoft or its affiliates. - -Microsoft has no responsibility to you or others with respect to any of these samples or any resulting output. - -# What this sample demonstrates - -This sample demonstrates how to use a Hosted Model Context Protocol (MCP) server with a -[Microsoft Agent Framework](https://learn.microsoft.com/en-us/agent-framework/overview/agent-framework-overview#ai-agents) AI agent and -host it using [Azure AI AgentServer SDK](https://learn.microsoft.com/en-us/dotnet/api/overview/azure/ai.agentserver.agentframework-readme) and -deploy it to Microsoft Foundry using the Azure Developer CLI [ai agent](https://aka.ms/azdaiagent/docs) extension. - -## How It Works - -### MCP Integration - -This sample uses a Hosted Model Context Protocol (MCP) server to provide external tools to the agent. The MCP workflow operates as follows: - -1. The agent is configured with a `HostedMCPTool` pointing to `https://learn.microsoft.com/api/mcp` -2. When you ask questions, the Azure OpenAI Responses service automatically invokes the MCP tool to search Microsoft Learn documentation -3. The agent returns answers based on the retrieved Microsoft Learn content - -**Note**: In this configuration, the Azure OpenAI Responses service manages tool invocation directly - the Agent Framework does not handle MCP tool calls. - -### Agent Hosting - -The agent is hosted using the [Azure AI AgentServer SDK](https://learn.microsoft.com/en-us/dotnet/api/overview/azure/ai.agentserver.agentframework-readme), -which provisions a REST API endpoint compatible with the OpenAI Responses protocol. This allows interaction with the agent using OpenAI Responses compatible clients. - -### Agent Deployment - -The hosted agent can be seamlessly deployed to Microsoft Foundry using the Azure Developer CLI [ai agent](https://aka.ms/azdaiagent/docs) extension. -The extension builds a container image for the agent, deploys it to Azure Container Instances (ACI), and creates a hosted agent version and deployment on Foundry Agent Service. - -## Running the Agent Locally - -### Prerequisites - -Before running this sample, ensure you have: - -1. An Azure OpenAI endpoint configured -2. A deployment of a chat model (e.g., `gpt-4o-mini`) -3. Azure CLI installed and authenticated (`az login`) -4. Python 3.10+ installed - -### Environment Variables - -Set the following environment variables: - -- `AZURE_OPENAI_ENDPOINT` - Your Azure OpenAI endpoint URL (required) -- `AZURE_OPENAI_CHAT_DEPLOYMENT_NAME` - The deployment name for your chat model (required) - -```powershell -# Replace with your Azure OpenAI endpoint -$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/" - -# Optional, defaults to gpt-4o-mini -$env:AZURE_OPENAI_CHAT_DEPLOYMENT_NAME="gpt-4o-mini" -``` - -### Installing Dependencies - -Install the required Python dependencies using pip: - -```powershell -pip install -r requirements.txt -``` - -> It is always a good practice to use a virtual environment for Python projects. - -### Running the Sample - -To run the agent, execute the following command in your terminal: - -```powershell -python main.py -``` - -This will start the hosted agent locally on `http://localhost:8088/`. - -### Interacting with the Agent - -You can interact with the agent using: - -```powershell -curl -sS -H "Content-Type: application/json" -X POST http://localhost:8088/responses -d '{"input": "How to create an Azure storage account using az cli?","stream":false}' -``` - -### Deploying the Agent to Microsoft Foundry - -To deploy your agent to Microsoft Foundry, follow the comprehensive deployment guide at https://aka.ms/azdaiagent/docs - -## Troubleshooting - -### Images built on Apple Silicon or other ARM64 machines do not work on our service - -We **recommend using `azd` cloud build**, which always builds images with the correct architecture. - -If you choose to **build locally**, and your machine is **not `linux/amd64`** (for example, an Apple Silicon Mac), the image will **not be compatible with our service**, causing runtime failures. - -**Fix for local builds** - -Use this command to build the image locally: - -```shell -docker build --platform=linux/amd64 -t image . -``` - -This forces the image to be built for the required `amd64` architecture. diff --git a/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/main.py b/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/main.py deleted file mode 100644 index 4df3362c..00000000 --- a/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/main.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. - -from agent_framework import HostedMCPTool -from agent_framework.azure import AzureOpenAIChatClient -from azure.ai.agentserver.agentframework import from_agent_framework # pyright: ignore[reportUnknownVariableType] -from azure.identity import DefaultAzureCredential - - -def create_agent(): - # Create an Agent using the Azure OpenAI Chat Client with a MCP Tool that connects to Microsoft Learn MCP - agent = AzureOpenAIChatClient(credential=DefaultAzureCredential()).create_agent( - name="DocsAgent", - instructions="You are a helpful assistant that can help with microsoft documentation questions.", - tools=HostedMCPTool( - name="Microsoft Learn MCP", - url="https://learn.microsoft.com/api/mcp", - ), - ) - return agent - - -def main(): - # Run the agent as a hosted agent - from_agent_framework(lambda _: create_agent()).run() - - -if __name__ == "__main__": - main() diff --git a/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/requirements.txt b/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/requirements.txt deleted file mode 100644 index dc8ac271..00000000 --- a/samples/python/hosted-agents/agent_framework/agent_with_hosted_mcp/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -azure-ai-agentserver-agentframework==1.0.0b8 \ No newline at end of file diff --git a/samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/requirements.txt b/samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/requirements.txt deleted file mode 100644 index dc8ac271..00000000 --- a/samples/python/hosted-agents/agent_framework/agent_with_text_search_rag/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -azure-ai-agentserver-agentframework==1.0.0b8 \ No newline at end of file diff --git a/samples/python/hosted-agents/agent_framework/agents_in_workflow/requirements.txt b/samples/python/hosted-agents/agent_framework/agents_in_workflow/requirements.txt deleted file mode 100644 index dc8ac271..00000000 --- a/samples/python/hosted-agents/agent_framework/agents_in_workflow/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -azure-ai-agentserver-agentframework==1.0.0b8 \ No newline at end of file diff --git a/samples/python/hosted-agents/custom/system-utility-agent/requirements.txt b/samples/python/hosted-agents/custom/system-utility-agent/requirements.txt index 38bd356b..591d558e 100644 --- a/samples/python/hosted-agents/custom/system-utility-agent/requirements.txt +++ b/samples/python/hosted-agents/custom/system-utility-agent/requirements.txt @@ -1,6 +1,6 @@ azure-identity==1.25.1 azure-ai-projects==2.0.0b2 -azure-ai-agentserver-core==1.0.0b8 +azure-ai-agentserver-core==1.0.0b9 openai==2.14.0 python-dotenv==1.0.0 psutil==5.9.4 diff --git a/samples/python/hosted-agents/langgraph/calculator-agent/agent.yaml b/samples/python/hosted-agents/langgraph/calculator-agent/agent.yaml index 4d58b442..f63e6a99 100644 --- a/samples/python/hosted-agents/langgraph/calculator-agent/agent.yaml +++ b/samples/python/hosted-agents/langgraph/calculator-agent/agent.yaml @@ -21,8 +21,6 @@ template: value: ${AZURE_OPENAI_ENDPOINT} - name: OPENAI_API_VERSION value: 2025-03-01-preview - - name: APPLICATIONINSIGHTS_CONNECTION_STRING - value: ${APPLICATIONINSIGHTS_CONNECTION_STRING} - name: AZURE_AI_MODEL_DEPLOYMENT_NAME value: "{{chat}}" resources: diff --git a/samples/python/hosted-agents/langgraph/calculator-agent/main.py b/samples/python/hosted-agents/langgraph/calculator-agent/main.py index eb3d235c..140788ac 100644 --- a/samples/python/hosted-agents/langgraph/calculator-agent/main.py +++ b/samples/python/hosted-agents/langgraph/calculator-agent/main.py @@ -1,7 +1,6 @@ import os import logging -from dotenv import load_dotenv from langchain.chat_models import init_chat_model from langchain_core.messages import SystemMessage, ToolMessage from langchain_core.tools import tool @@ -15,15 +14,9 @@ from azure.identity import DefaultAzureCredential, get_bearer_token_provider from azure.ai.agentserver.langgraph import from_langgraph -from azure.monitor.opentelemetry import configure_azure_monitor logger = logging.getLogger(__name__) -load_dotenv() - -if os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING"): - configure_azure_monitor(enable_live_metrics=True, logger_name="__main__") - # Define tools @tool diff --git a/samples/python/hosted-agents/langgraph/calculator-agent/requirements.txt b/samples/python/hosted-agents/langgraph/calculator-agent/requirements.txt index b23a4861..a95dd751 100644 --- a/samples/python/hosted-agents/langgraph/calculator-agent/requirements.txt +++ b/samples/python/hosted-agents/langgraph/calculator-agent/requirements.txt @@ -1,5 +1 @@ -azure-ai-agentserver-langgraph==1.0.0b8 - -pytest==8.4.2 -python-dotenv==1.1.1 -azure-monitor-opentelemetry==1.8.1 +azure-ai-agentserver-langgraph==1.0.0b9 diff --git a/samples/python/hosted-agents/langgraph/human-in-the-loop/Dockerfile b/samples/python/hosted-agents/langgraph/human-in-the-loop/Dockerfile new file mode 100644 index 00000000..0cc939d9 --- /dev/null +++ b/samples/python/hosted-agents/langgraph/human-in-the-loop/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.12-slim + +WORKDIR /app + +COPY . user_agent/ +WORKDIR /app/user_agent + +RUN if [ -f requirements.txt ]; then \ + pip install -r requirements.txt; \ + else \ + echo "No requirements.txt found"; \ + fi + +EXPOSE 8088 + +CMD ["python", "main.py"] diff --git a/samples/python/hosted-agents/langgraph/human-in-the-loop/README.md b/samples/python/hosted-agents/langgraph/human-in-the-loop/README.md new file mode 100644 index 00000000..f50b13e4 --- /dev/null +++ b/samples/python/hosted-agents/langgraph/human-in-the-loop/README.md @@ -0,0 +1,212 @@ +**IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. Learn more in the transparency documents for [Agent Service](https://learn.microsoft.com/en-us/azure/ai-foundry/responsible-ai/agents/transparency-note) and [LangGraph](https://docs.langchain.com/oss/python/langgraph/workflows-agents). + +Agents, solutions, or other output you create may be subject to legal and regulatory requirements, may require licenses, or may not be suitable for all industries, scenarios, or use cases. By using any sample, you are acknowledging that any output created using those samples are solely your responsibility, and that you will comply with all applicable laws, regulations, and relevant safety standards, terms of service, and codes of conduct. + +Third-party samples contained in this folder are subject to their own designated terms, and they have not been tested or verified by Microsoft or its affiliates. + +Microsoft has no responsibility to you or others with respect to any of these samples or any resulting output. + +# What this sample demonstrates + +This sample demonstrates how to build a LangGraph agent with **human-in-the-loop capabilities** that can interrupt execution to ask for human input when needed, host it using the +[Azure AI AgentServer SDK](https://pypi.org/project/azure-ai-agentserver-langgraph/), +and deploy it to Microsoft Foundry using the Azure Developer CLI [ai agent](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents?view=foundry&tabs=cli#create-a-hosted-agent) extension. + +## How It Works + +### Human-in-the-Loop Integration + +In [main.py](main.py), the agent is created using LangGraph's `StateGraph` and includes a custom `AskHuman` tool that uses the `interrupt()` function to pause execution and wait for human feedback. The key components are: + +- **LangGraph Agent**: An AI agent that can intelligently decide when to ask humans for input during task execution +- **Human Interrupt Mechanism**: Uses LangGraph's `interrupt()` function to pause execution and wait for human feedback +- **Conditional Routing**: The agent determines whether to execute tools, ask for human input, or complete the task + +### Agent Hosting + +The agent is hosted using the [Azure AI AgentServer SDK](https://pypi.org/project/azure-ai-agentserver-langgraph/), +which provisions a REST API endpoint compatible with the OpenAI Responses protocol. This allows interaction with the agent using OpenAI Responses compatible clients. + +### Agent Deployment + +The hosted agent can be seamlessly deployed to Microsoft Foundry using the Azure Developer CLI [ai agent](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents?view=foundry&tabs=cli#create-a-hosted-agent) extension. +The extension builds a container image into Azure Container Registry (ACR), and creates a hosted agent version and deployment on Microsoft Foundry. + +## Running the Agent Locally + +### Prerequisites + +Before running this sample, ensure you have: + +1. **Azure OpenAI Service** + - Endpoint configured + - Chat model deployed (e.g., `gpt-4o-mini` or `gpt-4`) + - Note your endpoint URL and deployment name + +2. **Azure CLI** + - Installed and authenticated + - Run `az login` and verify with `az account show` + +3. **Python 3.10 or higher** + - Verify your version: `python --version` + - If you have Python 3.9 or older, install a newer version: + - Windows: `winget install Python.Python.3.12` + - macOS: `brew install python@3.12` + - Linux: Use your package manager + +### Environment Variables + +Set the following environment variables: + +- `AZURE_OPENAI_ENDPOINT` - Your Azure OpenAI endpoint URL (required) +- `AZURE_AI_MODEL_DEPLOYMENT_NAME` - The deployment name for your chat model (required) + +This sample loads environment variables from a local `.env` file if present. + +```powershell +# Replace with your actual values +$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/" +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" +``` + +### Installing Dependencies + +Install the required Python dependencies using pip: + +```powershell +pip install -r requirements.txt +``` + +### Running the Sample + +To run the agent, execute the following command in your terminal: + +```powershell +python main.py +``` + +This will start the hosted agent locally on `http://localhost:8088/`. + +### Interacting with the Agent + +#### Initial Request (Triggering Human Input) + +Send a request that will cause the agent to ask for human input: + +**PowerShell (Windows):** +```powershell +$body = @{ + input = "Ask the user where they are, then look up the weather there." + stream = $false +} | ConvertTo-Json + +Invoke-RestMethod -Uri http://localhost:8088/responses -Method Post -Body $body -ContentType "application/json" +``` + +**Bash/curl (Linux/macOS):** +```bash +curl -sS -H "Content-Type: application/json" -X POST http://localhost:8088/responses \ + -d '{"input": "Ask the user where they are, then look up the weather there.", "stream": false}' +``` + +**Response Structure:** + +The agent will respond with an interrupt request: + +```json +{ + "conversation": { + "id": "conv_abc123..." + }, + "output": [ + { + "type": "function_call", + "name": "__hosted_agent_adapter_interrupt__", + "call_id": "call_xyz789...", + "arguments": "{\"question\": \"Where are you located?\"}" + } + ] +} +``` + +#### Providing Human Feedback + +Resume the conversation by providing the human's response: + +**PowerShell (Windows):** +```powershell +$body = @{ + input = @( + @{ + type = "function_call_output" + call_id = "call_xyz789..." + output = '{"resume": "San Francisco"}' + } + ) + stream = $false + conversation = @{ + id = "conv_abc123..." + } +} | ConvertTo-Json -Depth 4 + +Invoke-RestMethod -Uri http://localhost:8088/responses -Method Post -Body $body -ContentType "application/json" +``` + +**Bash/curl (Linux/macOS):** +```bash +curl -sS -H "Content-Type: application/json" -X POST http://localhost:8088/responses \ + -d '{ + "input": [ + { + "type": "function_call_output", + "call_id": "call_xyz789...", + "output": "{\"resume\": \"San Francisco\"}" + } + ], + "stream": false, + "conversation": { + "id": "conv_abc123..." + } + }' +``` + +**Final Response:** + +The agent will continue execution and provide the final result: + +```json +{ + "conversation": { + "id": "conv_abc123..." + }, + "output": [ + { + "type": "message", + "role": "assistant", + "content": "I looked up the weather in San Francisco. Result: It's sunny in San Francisco." + } + ] +} +``` + +### Deploying the Agent to Microsoft Foundry + +To deploy your agent to Microsoft Foundry, follow the comprehensive deployment guide at https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents?view=foundry&tabs=cli + +## Troubleshooting + +### Images built on Apple Silicon or other ARM64 machines do not work on our service + +We **recommend using `azd` cloud build**, which always builds images with the correct architecture. + +If you choose to **build locally**, and your machine is **not `linux/amd64`** (for example, an Apple Silicon Mac), the image will **not be compatible with our service**, causing runtime failures. + +**Fix for local builds** + +Use this command to build the image locally: + +```shell +docker build --platform=linux/amd64 -t image . +``` + +This forces the image to be built for the required `amd64` architecture. diff --git a/samples/python/hosted-agents/langgraph/human-in-the-loop/agent.yaml b/samples/python/hosted-agents/langgraph/human-in-the-loop/agent.yaml new file mode 100644 index 00000000..25135bb5 --- /dev/null +++ b/samples/python/hosted-agents/langgraph/human-in-the-loop/agent.yaml @@ -0,0 +1,29 @@ +name: HumanInTheLoopAgent +description: This LangGraph agent demonstrates human-in-the-loop capabilities. +metadata: + example: + - role: user + content: |- + Ask the user where they are, then look up the weather there. + tags: + - example + - learning + authors: + - junanchen +template: + name: HumanInTheLoopAgentLG + kind: hosted + protocols: + - protocol: responses + version: v1 + environment_variables: + - name: AZURE_OPENAI_ENDPOINT + value: ${AZURE_OPENAI_ENDPOINT} + - name: OPENAI_API_VERSION + value: 2025-03-01-preview + - name: AZURE_AI_MODEL_DEPLOYMENT_NAME + value: "{{chat}}" +resources: + - kind: model + id: gpt-4o-mini + name: chat diff --git a/samples/python/hosted-agents/langgraph/human-in-the-loop/main.py b/samples/python/hosted-agents/langgraph/human-in-the-loop/main.py new file mode 100644 index 00000000..ddbea4cc --- /dev/null +++ b/samples/python/hosted-agents/langgraph/human-in-the-loop/main.py @@ -0,0 +1,191 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +""" +Human-in-the-Loop Agent Example + +This sample demonstrates how to create a LangGraph agent that can interrupt +execution to ask for human input when needed. The agent uses Azure OpenAI +and includes a custom tool for asking human questions. +""" + +import os + +from pydantic import BaseModel + +from azure.identity import DefaultAzureCredential, get_bearer_token_provider +from langchain.chat_models import init_chat_model +from langchain_core.messages import ToolMessage +from langchain_core.tools import tool +from langgraph.checkpoint.memory import InMemorySaver +from langgraph.graph import END, START, MessagesState, StateGraph +from langgraph.prebuilt import ToolNode +from langgraph.types import interrupt + +from azure.ai.agentserver.langgraph import from_langgraph + + +# ============================================================================= +# Model Initialization +# ============================================================================= + +def initialize_llm(): + """Initialize the language model with Azure OpenAI credentials.""" + deployment_name = os.getenv("AZURE_AI_MODEL_DEPLOYMENT_NAME", "gpt-4o-mini") + return init_chat_model( + f"azure_openai:{deployment_name}", + azure_ad_token_provider=get_bearer_token_provider( + DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default" + ) + ) + + +llm = initialize_llm() + +# ============================================================================= +# Tools and Models +# ============================================================================= + +@tool +def search(query: str) -> str: + """ + Call to search the web for information. + + Args: + query: The search query string + + Returns: + Search results as a string + """ + # This is a placeholder for the actual implementation + return f"I looked up: {query}. Result: It's sunny in San Francisco." + + +class AskHuman(BaseModel): + """Schema for asking the human a question.""" + question: str + + +# Initialize tools and bind to model +tools = [search] +tool_node = ToolNode(tools) +model = llm.bind_tools(tools + [AskHuman]) + + +# ============================================================================= +# Graph Nodes +# ============================================================================= + +def call_model(state: MessagesState) -> dict: + """ + Call the language model with the current conversation state. + + Args: + state: The current messages state + + Returns: + Dictionary with the model's response message + """ + messages = state["messages"] + response = model.invoke(messages) + return {"messages": [response]} + + +def ask_human(state: MessagesState) -> dict: + """ + Interrupt execution to ask the human for input. + + Args: + state: The current messages state + + Returns: + Dictionary with the human's response as a tool message + """ + last_message = state["messages"][-1] + tool_call_id = last_message.tool_calls[0]["id"] + ask = AskHuman.model_validate(last_message.tool_calls[0]["args"]) + + # Interrupt and wait for human input + location = interrupt(ask.question) + + tool_message = ToolMessage(tool_call_id=tool_call_id, content=location) + return {"messages": [tool_message]} + + +# ============================================================================= +# Graph Logic +# ============================================================================= + +def should_continue(state: MessagesState) -> str: + """ + Determine the next step in the graph based on the last message. + + Args: + state: The current messages state + + Returns: + The name of the next node to execute, or END to finish + """ + messages = state["messages"] + last_message = messages[-1] + + # If there's no function call, we're done + if not last_message.tool_calls: + return END + + # If asking for human input, route to ask_human node + if last_message.tool_calls[0]["name"] == "AskHuman": + return "ask_human" + + # Otherwise, execute the tool call + return "action" + + +# ============================================================================= +# Graph Construction +# ============================================================================= + +def build_graph() -> StateGraph: + """ + Build and compile the LangGraph workflow. + + Returns: + Compiled StateGraph with checkpointing enabled + """ + workflow = StateGraph(MessagesState) + + # Add nodes + workflow.add_node("agent", call_model) + workflow.add_node("action", tool_node) + workflow.add_node("ask_human", ask_human) + + # Set entry point + workflow.add_edge(START, "agent") + + # Add conditional routing from agent + workflow.add_conditional_edges( + "agent", + should_continue, + path_map=["ask_human", "action", END], + ) + + # Add edges back to agent + workflow.add_edge("action", "agent") + workflow.add_edge("ask_human", "agent") + + # Compile with memory checkpointer + memory = InMemorySaver() + return workflow.compile(checkpointer=memory) + + +app = build_graph() + + +# ============================================================================= +# Main Entry Point +# ============================================================================= + +if __name__ == "__main__": + adapter = from_langgraph(app) + adapter.run() + diff --git a/samples/python/hosted-agents/langgraph/human-in-the-loop/requirements.txt b/samples/python/hosted-agents/langgraph/human-in-the-loop/requirements.txt new file mode 100644 index 00000000..a95dd751 --- /dev/null +++ b/samples/python/hosted-agents/langgraph/human-in-the-loop/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-langgraph==1.0.0b9 diff --git a/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/Dockerfile b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/Dockerfile new file mode 100644 index 00000000..0cc939d9 --- /dev/null +++ b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/Dockerfile @@ -0,0 +1,16 @@ +FROM python:3.12-slim + +WORKDIR /app + +COPY . user_agent/ +WORKDIR /app/user_agent + +RUN if [ -f requirements.txt ]; then \ + pip install -r requirements.txt; \ + else \ + echo "No requirements.txt found"; \ + fi + +EXPOSE 8088 + +CMD ["python", "main.py"] diff --git a/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/README.md b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/README.md new file mode 100644 index 00000000..cc3898d9 --- /dev/null +++ b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/README.md @@ -0,0 +1,146 @@ +**IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. Learn more in the transparency documents for [Agent Service](https://learn.microsoft.com/en-us/azure/ai-foundry/responsible-ai/agents/transparency-note) and [LangGraph](https://docs.langchain.com/oss/python/langgraph/workflows-agents). + +Agents, solutions, or other output you create may be subject to legal and regulatory requirements, may require licenses, or may not be suitable for all industries, scenarios, or use cases. By using any sample, you are acknowledging that any output created using those samples are solely your responsibility, and that you will comply with all applicable laws, regulations, and relevant safety standards, terms of service, and codes of conduct. + +Third-party samples contained in this folder are subject to their own designated terms, and they have not been tested or verified by Microsoft or its affiliates. + +Microsoft has no responsibility to you or others with respect to any of these samples or any resulting output. + +# What this sample demonstrates + +This sample demonstrates how to build a LangGraph react agent that can use **Foundry tools** +(for example, code interpreter and MCP tools), host it using the +[Azure AI AgentServer SDK](https://pypi.org/project/azure-ai-agentserver-langgraph/), +and deploy it to Microsoft Foundry using the Azure Developer CLI [ai agent](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents?view=foundry&tabs=cli#create-a-hosted-agent) extension. + +## How It Works + +### Foundry tools integration + +In [main.py](main.py), the agent is created using `langchain.agents.create_agent` and is configured with +`use_foundry_tools`. The middleware enables tool usage via Foundry-supported tool types: + +- `code_interpreter` (foundry configured tools) +- `mcp` (connected mcp tool, configured with a Foundry project connection id) + +### Agent Hosting + +The agent is hosted using the [Azure AI AgentServer SDK](https://pypi.org/project/azure-ai-agentserver-langgraph/), +which provisions a REST API endpoint compatible with the OpenAI Responses protocol. This allows interaction with the agent using OpenAI Responses compatible clients. + +### Agent Deployment + +The hosted agent can be seamlessly deployed to Microsoft Foundry using the Azure Developer CLI [ai agent](https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents?view=foundry&tabs=cli#create-a-hosted-agent) extension. +The extension builds a container image into Azure Container Registry (ACR), and creates a hosted agent version and deployment on Microsoft Foundry. + +## Running the Agent Locally + +### Prerequisites + +Before running this sample, ensure you have: + +1. **Azure OpenAI Service** + - Endpoint configured + - Chat model deployed (e.g., `gpt-4o-mini` or `gpt-4`) + - Note your endpoint URL and deployment name + +2. **Azure AI Foundry Project** + - Project created in [Azure AI Foundry](https://learn.microsoft.com/en-us/azure/ai-foundry/what-is-foundry?view=foundry#microsoft-foundry-portals) + - Add 'Microsoft Learn' MCP from foundry tool catalog. + ![microsoft_learn](microsoft_learn.png) + +3. **Azure CLI** + - Installed and authenticated + - Run `az login` and verify with `az account show` + +4. **Python 3.10 or higher** + - Verify your version: `python --version` + - If you have Python 3.9 or older, install a newer version: + - Windows: `winget install Python.Python.3.12` + - macOS: `brew install python@3.12` + - Linux: Use your package manager + +### Environment Variables + +Set the following environment variables: + +- `AZURE_OPENAI_ENDPOINT` - Your Azure OpenAI endpoint URL (required) +- `AZURE_AI_MODEL_DEPLOYMENT_NAME` - The deployment name for your chat model (required) +- `AZURE_AI_PROJECT_ENDPOINT` - Your Azure AI Foundry project endpoint (required) +- `AZURE_AI_PROJECT_TOOL_CONNECTION_ID` - Foundry project connection id used to configure the `mcp` tool (required) + +This sample loads environment variables from a local `.env` file if present. + +**Finding your tool connection id** (portal names may vary): +1. Go to [Azure AI Foundry portal](https://ai.azure.com) +2. Navigate to your project -> Build -> Tools +3. Find your connected MCP tool (e.g., "Microsoft Learn") +4. Copy your tool's name and set it as `AZURE_AI_PROJECT_TOOL_CONNECTION_ID` + +```powershell +# Replace with your actual values +$env:AZURE_OPENAI_ENDPOINT="https://your-openai-resource.openai.azure.com/" +$env:AZURE_AI_MODEL_DEPLOYMENT_NAME="gpt-4o-mini" +$env:AZURE_AI_PROJECT_ENDPOINT="https://{resource}.services.ai.azure.com/api/projects/{project-name}" +$env:AZURE_AI_PROJECT_TOOL_CONNECTION_ID="" +``` + +### Installing Dependencies + +Install the required Python dependencies using pip: + +```powershell +pip install -r requirements.txt +``` + +### Running the Sample + +To run the agent, execute the following command in your terminal: + +```powershell +python main.py +``` + +This will start the hosted agent locally on `http://localhost:8088/`. + +### Interacting with the Agent + +**PowerShell (Windows):** +```powershell +$body = @{ + input = "use the python tool to calculate what is 4 * 3.82. and then find its square root and then find the square root of that result" + stream = $false +} | ConvertTo-Json + +Invoke-RestMethod -Uri http://localhost:8088/responses -Method Post -Body $body -ContentType "application/json" +``` + +**Bash/curl (Linux/macOS):** +```bash +curl -sS -H "Content-Type: application/json" -X POST http://localhost:8088/responses \ + -d '{"input": "use the python tool to calculate what is 4 * 3.82. and then find its square root and then find the square root of that result","stream":false}' +``` + +The agent may use Foundry tools (for example `web_search_preview` and/or `mcp`) as needed to answer. + +### Deploying the Agent to Microsoft Foundry + +To deploy your agent to Microsoft Foundry, follow the comprehensive deployment guide at https://learn.microsoft.com/en-us/azure/ai-foundry/agents/concepts/hosted-agents?view=foundry&tabs=cli + +## Troubleshooting + +### Images built on Apple Silicon or other ARM64 machines do not work on our service + +We **recommend using `azd` cloud build**, which always builds images with the correct architecture. + +If you choose to **build locally**, and your machine is **not `linux/amd64`** (for example, an Apple Silicon Mac), the image will **not be compatible with our service**, causing runtime failures. + +**Fix for local builds** + +Use this command to build the image locally: + +```shell +docker build --platform=linux/amd64 -t image . +``` + +This forces the image to be built for the required `amd64` architecture. \ No newline at end of file diff --git a/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/agent.yaml b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/agent.yaml new file mode 100644 index 00000000..425dcf5b --- /dev/null +++ b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/agent.yaml @@ -0,0 +1,32 @@ +name: FoundryToolsReactAgent +description: This LangGraph agent uses Foundry tools to perform tasks such as interpreting python code. +metadata: + example: + - role: user + content: |- + use the python tool to calculate what is 4 * 3.82. + and then find its square root and then find the square root of that result + tags: + - example + - learning + authors: + - junanchen +template: + name: FoundryToolsReactAgentLG + kind: hosted + protocols: + - protocol: responses + version: v1 + environment_variables: + - name: AZURE_OPENAI_ENDPOINT + value: ${AZURE_OPENAI_ENDPOINT} + - name: OPENAI_API_VERSION + value: 2025-03-01-preview + - name: AZURE_AI_MODEL_DEPLOYMENT_NAME + value: "{{chat}}" + - name: AZURE_AI_PROJECT_TOOL_CONNECTION_ID + value: "" +resources: + - kind: model + id: gpt-4o-mini + name: chat diff --git a/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/main.py b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/main.py new file mode 100644 index 00000000..8493d32b --- /dev/null +++ b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/main.py @@ -0,0 +1,35 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import os + +from azure.ai.agentserver.langgraph import from_langgraph +from azure.ai.agentserver.langgraph.tools import use_foundry_tools +from azure.identity import DefaultAzureCredential, get_bearer_token_provider +from langchain.agents import create_agent +from langchain.chat_models import init_chat_model +from langgraph.checkpoint.memory import MemorySaver + +deployment_name = os.getenv("AZURE_AI_MODEL_DEPLOYMENT_NAME", "gpt-4o-mini") +model = init_chat_model( + f"azure_openai:{deployment_name}", + azure_ad_token_provider=get_bearer_token_provider( + DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default" + ) +) + +foundry_tools = [ + { + # test prompt: + # use the python tool to calculate what is 4 * 3.82. and then find its square root and then find the square root of that result + "type": "code_interpreter" + } +] +if project_tool_connection_id := os.environ.get("AZURE_AI_PROJECT_TOOL_CONNECTION_ID"): + foundry_tools.append({"type": "mcp", "project_connection_id": project_tool_connection_id}) + +agent = create_agent(model, checkpointer=MemorySaver(), middleware=[use_foundry_tools(foundry_tools)]) + +if __name__ == "__main__": + # host the langgraph agent + from_langgraph(agent).run() diff --git a/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/microsoft_learn.png b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/microsoft_learn.png new file mode 100644 index 00000000..0a8ebaaf Binary files /dev/null and b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/microsoft_learn.png differ diff --git a/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/requirements.txt b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/requirements.txt new file mode 100644 index 00000000..dd1df162 --- /dev/null +++ b/samples/python/hosted-agents/langgraph/react-agent-with-foundry-tools/requirements.txt @@ -0,0 +1 @@ +azure-ai-agentserver-langgraph==1.0.0b9 \ No newline at end of file