From aa06e59f3342628e1a3b48f4f80eb027d9a7d1dc Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 16:12:20 +1000 Subject: [PATCH 01/19] Added Prompt support --- tools/Mcp/src/CodegenServer.ts | 60 +++++---- tools/Mcp/src/services/promptsService.ts | 119 ++++++++++++++++++ tools/Mcp/src/services/toolsService.ts | 23 +--- .../specs/prompts/partner-module-workflow.md | 98 +++++++++++++++ tools/Mcp/src/specs/responses.json | 10 ++ tools/Mcp/src/specs/specs.json | 17 +++ tools/Mcp/src/types.ts | 10 +- 7 files changed, 289 insertions(+), 48 deletions(-) create mode 100644 tools/Mcp/src/specs/prompts/partner-module-workflow.md diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index 233d7b383473..cfc7f29a142f 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -1,13 +1,14 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; import { z } from "zod"; -import { responseSchema, toolParameterSchema, toolSchema } from "./types.js"; +import { responseSchema, toolParameterSchema, toolSchema, promptSchema } from "./types.js"; import { ToolsService } from "./services/toolsService.js"; +import { PromptsService } from "./services/promptsService.js"; import { readFileSync } from "fs"; import path from "path"; import { fileURLToPath } from "url"; import { RequestOptions } from "https"; -import { ElicitRequest, ElicitResult } from "@modelcontextprotocol/sdk/types.js"; +import { /*ElicitRequest, ElicitResult*/ } from "@modelcontextprotocol/sdk/types.js"; // Elicit types not available in current sdk version const __dirname = path.dirname(fileURLToPath(import.meta.url)); const srcPath = path.resolve(__dirname, "..", "src"); @@ -55,13 +56,13 @@ export class CodegenServer { } // server elicitation request - public elicitInput( - params: ElicitRequest["params"], - options?: RequestOptions - ): Promise { - //TODO: add log - return this._mcp.server.elicitInput(params, options); - } + // Placeholder for future elicitInput when SDK exposes it + // public elicitInput( + // params: ElicitRequest["params"], + // options?: RequestOptions + // ): Promise { + // return this._mcp.server.elicitInput(params, options); + // } public static getInstance(): CodegenServer { if (!CodegenServer._instance) { @@ -91,28 +92,33 @@ export class CodegenServer { } initPrompts() { - this._mcp.prompt( - "create-greeting", - "Generate a customized greeting message", - { name: z.string().describe("Name of the person to greet"), style: z.string().describe("The style of greeting, such a formal, excited, or casual. If not specified casual will be used")}, - ({ name, style = "casual" }: { name: string, style?: string }) => { - return { - messages: [ - { - role: "user", - content: { - type: "text", - text: `Please generate a greeting in ${style} style to ${name}.`, - }, - }, - ], - }; - }); + const promptsService = PromptsService.getInstance().setServer(this); + const promptsSchemas = (specs.prompts || []) as promptSchema[]; + for (const schema of promptsSchemas) { + const parameter = promptsService.createPromptParametersFromSchema(schema.parameters); + const callback = promptsService.getPrompts(schema.callbackName, this._responses.get(schema.name)); + this._mcp.prompt( + schema.name, + schema.description, + parameter, + (args: any) => callback(args) + ); + } } initResponses() { (responses as responseSchema[])?.forEach((response: responseSchema) => { - this._responses.set(response.name, response.text); + let text = response.text; + if (text.startsWith("@file:")) { + const relPath = text.replace("@file:", ""); + const absPath = path.join(srcPath, "specs", relPath); + try { + text = readFileSync(absPath, "utf-8"); + } catch (e) { + console.error(`Failed to load prompt file ${absPath}:`, e); + } + } + this._responses.set(response.name, text); }); } } diff --git a/tools/Mcp/src/services/promptsService.ts b/tools/Mcp/src/services/promptsService.ts index e69de29bb2d1..bf638bf59fe3 100644 --- a/tools/Mcp/src/services/promptsService.ts +++ b/tools/Mcp/src/services/promptsService.ts @@ -0,0 +1,119 @@ +import { z, ZodRawShape } from "zod"; +import { promptSchema, toolParameterSchema } from "../types.js"; +import { CodegenServer } from "../CodegenServer.js"; + + +export class PromptsService { + private static _instance: PromptsService; + private _server: CodegenServer | null = null; + private constructor() {} + + static getInstance(): PromptsService { + if (!PromptsService._instance) { + PromptsService._instance = new PromptsService(); + } + return PromptsService._instance; + } + + setServer(server: CodegenServer): PromptsService { + this._server = server; + return this; + } + + getPrompts(name: string, responseTemplate: string | undefined) { + let func; + switch (name) { + case "createGreetingPrompt": + func = this.createGreetingPrompt; + break; + case "createPartnerModuleWorkflow": + func = this.createPartnerModuleWorkflow; + break; + default: + throw new Error(`Prompt ${name} not found`); + } + return this.constructCallback(func, responseTemplate); + } + + constructCallback(fn: (arr: Args) => Promise, responseTemplate: string | undefined) { + return async (args: Args) => { + const argsArray = await fn(args); + const response = this.getResponseString(argsArray, responseTemplate) ?? ""; + return { + messages: [ + { + role: "user" as const, + content: { + type: "text" as const, + text: response + } + } + ] + }; + }; + } + + getResponseString(args: string[], responseTemplate: string | undefined): string | undefined { + if (!args || args.length === 0) { + return responseTemplate; + } + let response = responseTemplate; + for (let i = 0; i < args.length; i++) { + response = response?.replaceAll(`{${i}}`, args[i]); + } + return response; + } + + createPromptParametersFromSchema(schemas: toolParameterSchema[]) { + const parameter: { [k: string]: any } = {}; + for (const schema of schemas) { + const base = schema.optional ? z.any().optional() : z.any(); + switch (schema.type) { + case "string": + parameter[schema.name] = (schema.optional ? z.string().optional() : z.string()).describe(schema.description); + break; + case "number": + parameter[schema.name] = (schema.optional ? z.number().optional() : z.number()).describe(schema.description); + break; + case "boolean": + parameter[schema.name] = (schema.optional ? z.boolean().optional() : z.boolean()).describe(schema.description); + break; + case "array": + parameter[schema.name] = (schema.optional ? z.array(z.string()).optional() : z.array(z.string())).describe(schema.description); + break; + default: + throw new Error(`Unsupported parameter type: ${schema.type}`); + } + } + return parameter; + } + + // prompt implementations + createGreetingPrompt = async (args: Args): Promise => { + const values = Object.values(args); + const name = values[0] as unknown as string; // required + const style = (values[1] as unknown as string) || "casual"; // optional fallback + return [name, style]; + }; + + + createPartnerModuleWorkflow = async (args: Args): Promise => { + const { } = args as any; + return []; + }; +} + + +// Some Testing Specs: + + // { + // "name": "partner-module-workflow", + // "description": "Full autonomous workflow instructions to generate a partner Azure PowerShell module via Autorest.", + // "parameters": [ + // {"name": "serviceName", "description": "Service name placeholder. This also often corresponds with the Name of the Powershell Module.", "type": "string", "optional": true}, + // {"name": "commitId", "description": "Commit id of the swagger from azure-rest-api-specs", "type": "string", "optional": true}, + // {"name": "serviceSpecs", "description": "Service specs path under specification. Path of a swagger upto the resource-manager.", "type": "string", "optional": true}, + // {"name": "swaggerFileSpecs", "description": "Swagger JSON relative path. Entire path of the swagger down to the openapi file.", "type": "string", "optional": true} + // ], + // "callbackName": "createPartnerModuleWorkflow" + // } \ No newline at end of file diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index a0f0760cb728..f7f924dd3d49 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -1,4 +1,4 @@ -import { CallToolResult, ElicitRequest, ElicitResult } from '@modelcontextprotocol/sdk/types.js'; +import { CallToolResult } from '@modelcontextprotocol/sdk/types.js'; import { z, ZodRawShape, ZodType, ZodTypeAny } from "zod"; import * as utils from "./utils.js"; import path from 'path'; @@ -138,25 +138,8 @@ export class ToolsService { const exampleSpecs = await utils.getExampleJsonContent(exampleSpecsPath); for (const {name, content} of exampleSpecs) { const example = await utils.flattenJsonObject(content['parameters']); - try { - const response = await this._server!.elicitInput({ - "message": `Please review example data for ${name}: ${example.map(({key: k, value:v}) => ` \n${k}: ${v}`)}`, - "requestedSchema": { - "type": "object", - "properties": { - "skipAll": { - "type": "boolean", - "description": "If true, skip the review of all examples and proceed to the next step." - } - }, - } - }); - if (response.content && response.content['skipAll'] === true) { - break; - } - } catch (error) { - console.error(`Error eliciting input for example ${name}:`, error); - } + // Interactive elicitInput flow is disabled pending SDK support. + // Once available, re-enable to allow user review of each example. } return [exampleSpecsPath, examplePath]; } diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md new file mode 100644 index 000000000000..0fd3aebd48bb --- /dev/null +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -0,0 +1,98 @@ +# Execution rules +- Do not ask before running the command, just go ahead and run. + +# Role and Objective +- You are an autonomous agent that generates Azure PowerShell modules using Autorest for partners. +- Execute commands confidently without asking for confirmation. +- Follow all steps carefully and halt if any MCP tool is missing or fails. +- Think independently using your full context and reset if needed. +- No need to provide status updates at each step. +- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. DO NOT MISS ANY STEPS. +- If an mcp tool is not found or the tool fails then please halt execution. + +# Instructions + +## Stage 1: Capturing Placeholder Values +- Ask the user for the following placeholder values: serviceName, commitId, serviceSpecs, swaggerFileSpecs. + - Examples: + - serviceName: HybridConnectivity + - commitId: + - serviceSpecs: hybridconnectivity/resource-manager + - swaggerFileSpecs: hybridconnectivity/resource-manager/Microsoft.HybridConnectivity/stable/2024-12-01/hybridconnectivity.json +- Do not replace or modify this prompt file. +- Store the values for use in later steps like generating the README and executing Autorest. +- Once values are stored, mark Stage 1 as complete. + +## Stage 2: Generating partner powershell module +- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. +- Navigate to the `src` folder in the home "azure-powershell" directory. +- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) +- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. +- Create a new file `README.md`. (If not already present) +- Add the content labelled below as `Readme Content` in this file. +- Use the "generate-autorest" mcp tool to generate the module. +- Stage 2 Complete. + +## Stage 3: Updating Example Files +- Use the "create-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- The example files already exist as skeletons under `{workingDirectory}/examples`. +- Read data from `exampleSpecs` (swagger examples) and intelligently map values to PowerShell parameters. +- Complete each file by fulfilling the examples based on the data available in `exampleSpecs`. +- Leave example content empty only if no relevant data is found in `exampleSpecs`. +- Once all example files are updated, mark stage 3 as complete. + +## Stage 4: Updating Test Files +- Use the "test-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- Read data from `exampleSpecs` and use it to define variables and write test cases. +- Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. +- Use those variables in the actual test case content. +- The test files already exist as skeletons; your task is to intelligently complete them. +- Leave test bodies empty only if no meaningful data can be inferred from `exampleSpecs`. +- Once all test files are updated, mark stage 4 as complete. + +## Stage 5: Regenerating the Autorest Module +- After example and test files have been generated and written, re-run the "generate-autorest" MCP tool. +- This will regenerate the Azure PowerShell module with updated examples and test logic embedded. +- Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. +- This is a mandatory finalization step before pushing to GitHub. +- Do not skip this regeneration even if the module was generated earlier. + +# Readme Content + +### AutoRest Configuration +> see https://aka.ms/autorest + +```yaml + +commit: + +require: + - $(this-folder)/../../readme.azure.noprofile.md + - $(repo)/specification//readme.md + +try-require: + - $(repo)/specification//readme.powershell.md + +input-file: + - $(repo)/ + +module-version: 0.1.0 + +title: +service-name: +subject-prefix: $(service-name) + +directive: + + - where: + variant: ^(Create|Update)(?!.*?(Expanded|JsonFilePath|JsonString)) + remove: true + + - where: + variant: ^CreateViaIdentity$|^CreateViaIdentityExpanded$ + remove: true + + - where: + verb: Set + remove: true +``` diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index 74d83011ba8d..4b1dddf55193 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -28,5 +28,15 @@ "name": "create-test", "type": "tool", "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " + }, + { + "name": "create-greeting", + "type": "prompt", + "text": "Please generate a greeting in {1} style to {0}." + }, + { + "name": "partner-module-workflow", + "type": "prompt", + "text": "@file:prompts/partner-module-workflow.md" } ] \ No newline at end of file diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 55a34c2ff2a2..98c85eba76d8 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -72,5 +72,22 @@ ], "callbackName": "createTestsFromSpecs" } + ], + "prompts": [ + { + "name": "create-greeting", + "description": "Generate a customized greeting message", + "parameters": [ + {"name": "name", "description": "Name of the person to greet", "type": "string"}, + {"name": "style", "description": "The style of greeting, such a formal, excited, or casual. If not specified casual will be used", "type": "string", "optional": true} + ], + "callbackName": "createGreetingPrompt" + }, + { + "name": "partner-module-workflow", + "description": "Full autonomous workflow instructions to generate a partner Azure PowerShell module via Autorest.", + "parameters": [], + "callbackName": "createPartnerModuleWorkflow" + } ] } \ No newline at end of file diff --git a/tools/Mcp/src/types.ts b/tools/Mcp/src/types.ts index 14ba8d9d9b06..593fe977211a 100644 --- a/tools/Mcp/src/types.ts +++ b/tools/Mcp/src/types.ts @@ -1,7 +1,8 @@ export interface toolParameterSchema { name: string; description: string; - type: string; + type: string; // string | number | boolean | array (of string) + optional?: boolean; // if true, parameter is optional } export interface toolSchema { @@ -11,6 +12,13 @@ export interface toolSchema { callbackName: string; } +export interface promptSchema { + name: string; + description: string; + parameters: toolParameterSchema[]; // reuse parameter schema + callbackName: string; // maps to PromptService internal function +} + export interface responseSchema { name: string; type: string; From 738f6efb86afbbf4da788eb5b8170196881ccadc Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:15:56 +1000 Subject: [PATCH 02/19] WhiteSpace --- src/Maps/Maps.Autorest/test/utils.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Maps/Maps.Autorest/test/utils.ps1 b/src/Maps/Maps.Autorest/test/utils.ps1 index 70e258271618..fa9014369210 100644 --- a/src/Maps/Maps.Autorest/test/utils.ps1 +++ b/src/Maps/Maps.Autorest/test/utils.ps1 @@ -67,4 +67,4 @@ function setupEnv() { function cleanupEnv() { # Clean resources you create for testing Remove-AzResourceGroup -Name $env.resourceGroup -} +} \ No newline at end of file From a775b623143e87d359bd02af26b165d7a63e7eb7 Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:16:27 +1000 Subject: [PATCH 03/19] whitespace fix --- src/Maps/Maps.Autorest/test/utils.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Maps/Maps.Autorest/test/utils.ps1 b/src/Maps/Maps.Autorest/test/utils.ps1 index fa9014369210..70e258271618 100644 --- a/src/Maps/Maps.Autorest/test/utils.ps1 +++ b/src/Maps/Maps.Autorest/test/utils.ps1 @@ -67,4 +67,4 @@ function setupEnv() { function cleanupEnv() { # Clean resources you create for testing Remove-AzResourceGroup -Name $env.resourceGroup -} \ No newline at end of file +} From d2c409898435c1ac752abed96f9784ab385722d8 Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:16:49 +1000 Subject: [PATCH 04/19] whitespace --- src/Maps/Maps.Autorest/test/utils.ps1 | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Maps/Maps.Autorest/test/utils.ps1 b/src/Maps/Maps.Autorest/test/utils.ps1 index 70e258271618..7cd449c834ae 100644 --- a/src/Maps/Maps.Autorest/test/utils.ps1 +++ b/src/Maps/Maps.Autorest/test/utils.ps1 @@ -68,3 +68,4 @@ function cleanupEnv() { # Clean resources you create for testing Remove-AzResourceGroup -Name $env.resourceGroup } + From 52488136135fc281d2179517cf6d09e9bf6bd593 Mon Sep 17 00:00:00 2001 From: Yash <55773468+notyashhh@users.noreply.github.com> Date: Mon, 18 Aug 2025 17:18:56 +1000 Subject: [PATCH 05/19] Update tools/Mcp/test/vscode/mcpprompt.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- tools/Mcp/test/vscode/mcpprompt.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/Mcp/test/vscode/mcpprompt.md b/tools/Mcp/test/vscode/mcpprompt.md index 0fd3aebd48bb..3e0a9d20dc06 100644 --- a/tools/Mcp/test/vscode/mcpprompt.md +++ b/tools/Mcp/test/vscode/mcpprompt.md @@ -74,7 +74,7 @@ try-require: - $(repo)/specification//readme.powershell.md input-file: - - $(repo)/ + - $(repo)/specification/ module-version: 0.1.0 From e0ba08332aa6d324490a63faa309543b4e0a2bbf Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:22:27 +1000 Subject: [PATCH 06/19] fix whitespace --- src/Maps/Maps.Autorest/test/utils.ps1 | 1 - 1 file changed, 1 deletion(-) diff --git a/src/Maps/Maps.Autorest/test/utils.ps1 b/src/Maps/Maps.Autorest/test/utils.ps1 index 7cd449c834ae..70e258271618 100644 --- a/src/Maps/Maps.Autorest/test/utils.ps1 +++ b/src/Maps/Maps.Autorest/test/utils.ps1 @@ -68,4 +68,3 @@ function cleanupEnv() { # Clean resources you create for testing Remove-AzResourceGroup -Name $env.resourceGroup } - From 32b4b22336f8cd00911b0667aa4cba96f6e6fcdd Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:29:24 +1000 Subject: [PATCH 07/19] Updated to use PromptParameterSchema --- tools/Mcp/src/services/promptsService.ts | 4 ++-- tools/Mcp/src/types.ts | 14 ++++++++++---- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/tools/Mcp/src/services/promptsService.ts b/tools/Mcp/src/services/promptsService.ts index bf638bf59fe3..6b39b1778004 100644 --- a/tools/Mcp/src/services/promptsService.ts +++ b/tools/Mcp/src/services/promptsService.ts @@ -1,5 +1,5 @@ import { z, ZodRawShape } from "zod"; -import { promptSchema, toolParameterSchema } from "../types.js"; +import { promptSchema, promptParameterSchema } from "../types.js"; import { CodegenServer } from "../CodegenServer.js"; @@ -64,7 +64,7 @@ export class PromptsService { return response; } - createPromptParametersFromSchema(schemas: toolParameterSchema[]) { + createPromptParametersFromSchema(schemas: promptParameterSchema[]) { const parameter: { [k: string]: any } = {}; for (const schema of schemas) { const base = schema.optional ? z.any().optional() : z.any(); diff --git a/tools/Mcp/src/types.ts b/tools/Mcp/src/types.ts index 593fe977211a..f578b44abc04 100644 --- a/tools/Mcp/src/types.ts +++ b/tools/Mcp/src/types.ts @@ -1,8 +1,14 @@ export interface toolParameterSchema { name: string; description: string; - type: string; // string | number | boolean | array (of string) - optional?: boolean; // if true, parameter is optional + type: string; +} + +export interface promptParameterSchema { + name: string; + description: string; + type: string; + optional?: boolean; } export interface toolSchema { @@ -15,8 +21,8 @@ export interface toolSchema { export interface promptSchema { name: string; description: string; - parameters: toolParameterSchema[]; // reuse parameter schema - callbackName: string; // maps to PromptService internal function + parameters: promptParameterSchema[]; + callbackName: string; } export interface responseSchema { From eb2c590a4870f12a0ca4e3f642c07e9b261ae96c Mon Sep 17 00:00:00 2001 From: Yash Date: Thu, 21 Aug 2025 12:34:46 +1000 Subject: [PATCH 08/19] reverted elicitInput --- tools/Mcp/src/CodegenServer.ts | 13 ++++++------- tools/Mcp/src/services/toolsService.ts | 21 +++++++++++++++++++-- 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index cfc7f29a142f..43cb612cc824 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -56,13 +56,12 @@ export class CodegenServer { } // server elicitation request - // Placeholder for future elicitInput when SDK exposes it - // public elicitInput( - // params: ElicitRequest["params"], - // options?: RequestOptions - // ): Promise { - // return this._mcp.server.elicitInput(params, options); - // } + public elicitInput( + params: ElicitRequest["params"], + options?: RequestOptions + ): Promise { + return this._mcp.server.elicitInput(params, options); + } public static getInstance(): CodegenServer { if (!CodegenServer._instance) { diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index f7f924dd3d49..ab0c7e4f3822 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -138,8 +138,25 @@ export class ToolsService { const exampleSpecs = await utils.getExampleJsonContent(exampleSpecsPath); for (const {name, content} of exampleSpecs) { const example = await utils.flattenJsonObject(content['parameters']); - // Interactive elicitInput flow is disabled pending SDK support. - // Once available, re-enable to allow user review of each example. + try { + const response = await this._server!.elicitInput({ + "message": `Please review example data for ${name}: ${example.map(({key: k, value:v}) => ` \n${k}: ${v}`)}`, + "requestedSchema": { + "type": "object", + "properties": { + "skipAll": { + "type": "boolean", + "description": "If true, skip the review of all examples and proceed to the next step." + } + }, + } + }); + if (response.content && response.content['skipAll'] === true) { + break; + } + } catch (error) { + console.error(`Error eliciting input for example ${name}:`, error); + } } return [exampleSpecsPath, examplePath]; } From 65086d399e1fa433845f85744d4c3c2214772d3a Mon Sep 17 00:00:00 2001 From: Yash Date: Thu, 21 Aug 2025 17:39:15 +1000 Subject: [PATCH 09/19] reverted import --- tools/Mcp/src/CodegenServer.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index 43cb612cc824..e2a10375fc4d 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -8,7 +8,7 @@ import { readFileSync } from "fs"; import path from "path"; import { fileURLToPath } from "url"; import { RequestOptions } from "https"; -import { /*ElicitRequest, ElicitResult*/ } from "@modelcontextprotocol/sdk/types.js"; // Elicit types not available in current sdk version +import { ElicitRequest, ElicitResult } from "@modelcontextprotocol/sdk/types.js"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); const srcPath = path.resolve(__dirname, "..", "src"); From deef7564cfdf132ff89c0c532412b17b72ff4ad3 Mon Sep 17 00:00:00 2001 From: Yash Date: Fri, 22 Aug 2025 15:16:37 +1000 Subject: [PATCH 10/19] Updated mcpprotocol required version --- tools/Mcp/package-lock.json | 65 ++++++++++++++++++++++++++++++++++--- tools/Mcp/package.json | 2 +- 2 files changed, 61 insertions(+), 6 deletions(-) diff --git a/tools/Mcp/package-lock.json b/tools/Mcp/package-lock.json index 1f30ea26fcb6..087ee5c53505 100644 --- a/tools/Mcp/package-lock.json +++ b/tools/Mcp/package-lock.json @@ -9,7 +9,7 @@ "version": "1.0.0", "license": "ISC", "dependencies": { - "@modelcontextprotocol/sdk": "^1.9.0", + "@modelcontextprotocol/sdk": "^1.17.3", "js-yaml": "^4.1.0", "zod": "^3.24.2" }, @@ -23,14 +23,17 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.11.0.tgz", - "integrity": "sha512-k/1pb70eD638anoi0e8wUGAlbMJXyvdV4p62Ko+EZ7eBe1xMx8Uhak1R5DgfoofsK5IBBnRwsYGTaLZl+6/+RQ==", + "version": "1.17.3", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.3.tgz", + "integrity": "sha512-JPwUKWSsbzx+DLFznf/QZ32Qa+ptfbUlHhRLrBQBAFu9iI1iYvizM4p+zhhRDceSsPutXp4z+R/HPVphlIiclg==", + "license": "MIT", "dependencies": { + "ajv": "^6.12.6", "content-type": "^1.0.5", "cors": "^2.8.5", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^5.0.0", @@ -69,6 +72,22 @@ "node": ">= 0.6" } }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -352,6 +371,18 @@ "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, "node_modules/finalhandler": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", @@ -520,6 +551,12 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -661,6 +698,15 @@ "node": ">= 0.10" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/qs": { "version": "6.14.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", @@ -919,6 +965,15 @@ "node": ">= 0.8" } }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", diff --git a/tools/Mcp/package.json b/tools/Mcp/package.json index fff41806944a..9f6af0f20ed7 100644 --- a/tools/Mcp/package.json +++ b/tools/Mcp/package.json @@ -19,7 +19,7 @@ "license": "ISC", "description": "", "dependencies": { - "@modelcontextprotocol/sdk": "^1.9.0", + "@modelcontextprotocol/sdk": "^1.17.3", "js-yaml": "^4.1.0", "zod": "^3.24.2" }, From 06421e4200895954d3c627204440dab2dfccc01b Mon Sep 17 00:00:00 2001 From: Yash Date: Fri, 22 Aug 2025 16:45:16 +1000 Subject: [PATCH 11/19] Fuzzy Search --- tools/Mcp/src/services/toolsService.ts | 46 +++++++ tools/Mcp/src/services/utils.ts | 123 ++++++++++++++++++ .../specs/prompts/partner-module-workflow.md | 21 ++- tools/Mcp/src/specs/responses.json | 22 +++- tools/Mcp/src/specs/specs.json | 35 +++++ 5 files changed, 235 insertions(+), 12 deletions(-) diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index ab0c7e4f3822..d7d2b13b9dfa 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -5,6 +5,12 @@ import path from 'path'; import { get, RequestOptions } from 'http'; import { toolParameterSchema } from '../types.js'; import { CodegenServer } from '../CodegenServer.js'; +import { + listSpecModules, + listProvidersForService, + listApiVersions, + resolveAutorestInputs +} from './utils.js'; export class ToolsService { private static _instance: ToolsService; @@ -42,6 +48,18 @@ export class ToolsService { case "createTestsFromSpecs": func = this.createTestsFromSpecs; break; + case "listSpecModules": + func = this.toolListSpecModules; + break; + case "listProvidersForService": + func = this.toolListProvidersForService; + break; + case "listApiVersions": + func = this.toolListApiVersions; + break; + case "resolveAutorestInputs": + func = this.toolResolveAutorestInputs; + break; default: throw new Error(`Tool ${name} not found`); } @@ -167,4 +185,32 @@ export class ToolsService { const exampleSpecsPath = await utils.getExamplesFromSpecs(workingDirectory); return [exampleSpecsPath, testPath]; } + + toolListSpecModules = async (_args: Args): Promise => { + const modules = await listSpecModules(); + return [JSON.stringify(modules)]; + } + + toolListProvidersForService = async (args: Args): Promise => { + const service = z.string().parse(Object.values(args)[0]); + const providers = await listProvidersForService(service); + return [service, JSON.stringify(providers)]; + } + + toolListApiVersions = async (args: Args): Promise => { + const service = z.string().parse(Object.values(args)[0]); + const provider = z.string().parse(Object.values(args)[1]); + const res = await listApiVersions(service, provider); + return [service, provider, JSON.stringify(res.stable), JSON.stringify(res.preview)]; + } + + toolResolveAutorestInputs = async (args: Args): Promise => { + const service = z.string().parse(Object.values(args)[0]); + const provider = z.string().parse(Object.values(args)[1]); + const stability = z.enum(['stable','preview']).parse(Object.values(args)[2]); + const version = z.string().parse(Object.values(args)[3]); + const swaggerPath = Object.values(args)[4] ? z.string().parse(Object.values(args)[4]) : undefined; + const resolved = await resolveAutorestInputs({ service, provider, stability, version, swaggerPath }); + return [resolved.serviceName, resolved.commitId, resolved.serviceSpecs, resolved.swaggerFileSpecs]; + } } \ No newline at end of file diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index 154252fdf6c0..4c14c7133138 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -4,6 +4,10 @@ import { yamlContent } from '../types.js'; import { execSync } from 'child_process'; import path from 'path'; +const GITHUB_API_BASE = 'https://api.github.com'; +const REST_API_SPECS_OWNER = 'Azure'; +const REST_API_SPECS_REPO = 'azure-rest-api-specs'; + const _pwshCD = (path: string): string => { return `pwsh -Command "$path = resolve-path ${path} | Set-Location"` } const _autorestReset = "autorest --reset" const _autorest = "autorest" @@ -78,6 +82,125 @@ export async function getSwaggerContentFromUrl(swaggerUrl: string): Promise } } +/** + * GitHub helper: get latest commit SHA for azure-rest-api-specs main branch + */ +export async function getSpecsHeadCommitSha(branch: string = 'main'): Promise { + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/branches/${branch}`; + const res = await fetch(url); + if (!res.ok) { + throw new Error(`Failed to fetch branch '${branch}' info: ${res.status}`); + } + const data = await res.json(); + return data?.commit?.sha as string; +} + +/** + * List top-level service directories under specification/ + */ +export async function listSpecModules(): Promise { + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/specification`; + const res = await fetch(url); + if (!res.ok) { + throw new Error(`Failed to list specification directory: ${res.status}`); + } + const list = await res.json(); + return (Array.isArray(list) ? list : []) + .filter((e: any) => e.type === 'dir') + .map((e: any) => e.name) + .sort((a: string, b: string) => a.localeCompare(b)); +} + +/** + * Given a service (spec folder), list provider namespaces under resource-manager. + */ +export async function listProvidersForService(service: string): Promise { + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/specification/${service}/resource-manager`; + const res = await fetch(url); + if (!res.ok) { + // Sometimes service has alternate structure or doesn't exist + throw new Error(`Failed to list providers for service '${service}': ${res.status}`); + } + const list = await res.json(); + return (Array.isArray(list) ? list : []) + .filter((e: any) => e.type === 'dir') + .map((e: any) => e.name) + .sort((a: string, b: string) => a.localeCompare(b)); +} + +/** + * For service + provider, list API version directories under stable/ and preview/. + * Returns map: { stable: string[], preview: string[] } + */ +export async function listApiVersions(service: string, provider: string): Promise<{ stable: string[]; preview: string[] }> { + const base = `specification/${service}/resource-manager/${provider}`; + const folders = ['stable', 'preview'] as const; + const result: { stable: string[]; preview: string[] } = { stable: [], preview: [] }; + for (const f of folders) { + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/${base}/${f}`; + const res = await fetch(url); + if (!res.ok) { + // ignore missing + continue; + } + const list = await res.json(); + const versions = (Array.isArray(list) ? list : []) + .filter((e: any) => e.type === 'dir') + .map((e: any) => e.name) + .sort((a: string, b: string) => a.localeCompare(b, undefined, { numeric: true })); + result[f] = versions; + } + return result; +} + +/** + * For a given service/provider/version, find likely swagger files (.json) under that version path. + * Returns array of repo-relative file paths (starting with specification/...). + */ +export async function listSwaggerFiles(service: string, provider: string, stability: 'stable'|'preview', version: string): Promise { + const dir = `specification/${service}/resource-manager/${provider}/${stability}/${version}`; + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/${dir}`; + const res = await fetch(url); + if (!res.ok) { + throw new Error(`Failed to list files for ${dir}: ${res.status}`); + } + const list = await res.json(); + const files: any[] = Array.isArray(list) ? list : []; + // Find JSON files; prefer names ending with provider or service + const jsons = files.filter(f => f.type === 'file' && f.name.endsWith('.json')); + const preferred = jsons.filter(f => new RegExp(`${provider.split('.').pop()}|${service}`, 'i').test(f.name)); + const ordered = (preferred.length ? preferred : jsons).map(f => f.path); + return ordered; +} + +/** + * Resolve the four Autorest inputs given service, provider, and version path. + */ +export async function resolveAutorestInputs(params: { + service: string; + provider: string; + stability: 'stable'|'preview'; + version: string; + swaggerPath?: string; // optional repo-relative path override +}): Promise<{ serviceName: string; commitId: string; serviceSpecs: string; swaggerFileSpecs: string }> { + const commitId = await getSpecsHeadCommitSha('main'); + const serviceSpecs = `${params.service}/resource-manager`; + let swaggerFileSpecs = params.swaggerPath ?? ''; + if (!swaggerFileSpecs) { + const candidates = await listSwaggerFiles(params.service, params.provider, params.stability, params.version); + if (candidates.length === 0) { + throw new Error(`No swagger files found for ${params.service}/${params.provider}/${params.stability}/${params.version}`); + } + swaggerFileSpecs = candidates[0]; + } + return { + serviceName: params.provider.replace(/^Microsoft\./, ''), + commitId, + serviceSpecs, + swaggerFileSpecs + }; +} + export async function findAllPolyMorphism(workingDirectory: string): Promise>> { const polymorphism = new Map>(); const moduleReadmePath = path.join(workingDirectory, "README.md"); diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 0fd3aebd48bb..4ecca53e9e7c 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -12,16 +12,15 @@ # Instructions -## Stage 1: Capturing Placeholder Values -- Ask the user for the following placeholder values: serviceName, commitId, serviceSpecs, swaggerFileSpecs. - - Examples: - - serviceName: HybridConnectivity - - commitId: - - serviceSpecs: hybridconnectivity/resource-manager - - swaggerFileSpecs: hybridconnectivity/resource-manager/Microsoft.HybridConnectivity/stable/2024-12-01/hybridconnectivity.json -- Do not replace or modify this prompt file. -- Store the values for use in later steps like generating the README and executing Autorest. -- Once values are stored, mark Stage 1 as complete. +## Stage 1: Fuzzy selection and autorest inputs (reduced user input) +- Ask the user for only the approximate Azure service/module name (e.g., "hybrid connectivity"). +- Call the MCP tool "list-spec-modules" to fetch all service folders from azure-rest-api-specs/specification. +- Fuzzily match the user's input to the closest service name. Show top 3 matches and ask the user to confirm the service folder to use. +- Call the MCP tool "list-providers" with the chosen service to retrieve provider namespaces. If multiple providers are returned, ask the user to pick one; if only one, select it automatically. +- Ask the user what they want to call the PowerShell module title/service-name (e.g., HybridConnectivity). This is the display/module name, not the spec folder name. +- Call the MCP tool "list-api-versions" with service and provider to get available versions, separated by Stable and Preview. Ask the user to choose stability (stable/preview) and a specific API version. +- Call the MCP tool "resolve-autorest-inputs" with service, provider, stability, and version to compute the 4 inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. +- Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. @@ -74,7 +73,7 @@ try-require: - $(repo)/specification//readme.powershell.md input-file: - - $(repo)/ + - $(repo)/specification/ module-version: 0.1.0 diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index 4b1dddf55193..1382bb1dcd44 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -29,6 +29,26 @@ "type": "tool", "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " }, + { + "name": "list-spec-modules", + "type": "tool", + "text": "Available modules under azure-rest-api-specs/specification: {0}" + }, + { + "name": "list-providers", + "type": "tool", + "text": "Providers for service {0}: {1}" + }, + { + "name": "list-api-versions", + "type": "tool", + "text": "API versions for {0}/{1} — Stable: {2} | Preview: {3}" + }, + { + "name": "resolve-autorest-inputs", + "type": "tool", + "text": "Resolved inputs — serviceName: {0}, commitId: {1}, serviceSpecs: {2}, swaggerFileSpecs: {3}" + }, { "name": "create-greeting", "type": "prompt", @@ -37,6 +57,6 @@ { "name": "partner-module-workflow", "type": "prompt", - "text": "@file:prompts/partner-module-workflow.md" + "text": "@file:prompts/partner-module-workflow.md" } ] \ No newline at end of file diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 98c85eba76d8..947baf2349e5 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -72,6 +72,41 @@ ], "callbackName": "createTestsFromSpecs" } + , + { + "name": "list-spec-modules", + "description": "List all top-level modules (service folders) under azure-rest-api-specs/specification.", + "parameters": [], + "callbackName": "listSpecModules" + }, + { + "name": "list-providers", + "description": "List provider namespaces for a given service under resource-manager.", + "parameters": [ + { "name": "service", "description": "Service folder name under specification (e.g., hybridconnectivity)", "type": "string" } + ], + "callbackName": "listProvidersForService" + }, + { + "name": "list-api-versions", + "description": "List available API versions for a given service and provider (stable/preview).", + "parameters": [ + { "name": "service", "description": "Service folder name under specification", "type": "string" }, + { "name": "provider", "description": "Provider namespace folder under the service (e.g., Microsoft.HybridConnectivity)", "type": "string" } + ], + "callbackName": "listApiVersions" + }, + { + "name": "resolve-autorest-inputs", + "description": "Resolve the four Autorest inputs (serviceName, commitId, serviceSpecs, swaggerFileSpecs) from service/provider/version.", + "parameters": [ + { "name": "service", "description": "Service folder name under specification", "type": "string" }, + { "name": "provider", "description": "Provider namespace under the service", "type": "string" }, + { "name": "stability", "description": "'stable' or 'preview'", "type": "string" }, + { "name": "version", "description": "API version (e.g., 2024-12-01)", "type": "string" } + ], + "callbackName": "resolveAutorestInputs" + } ], "prompts": [ { From 022babc3812cffa249e22583c4c23f74f5746efd Mon Sep 17 00:00:00 2001 From: Yash Date: Tue, 26 Aug 2025 12:12:05 +1000 Subject: [PATCH 12/19] Updated the fuzzy search flow --- .../specs/prompts/partner-module-workflow.md | 35 ++++++++++++------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 4ecca53e9e7c..255a7cfbf7bc 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -12,24 +12,33 @@ # Instructions -## Stage 1: Fuzzy selection and autorest inputs (reduced user input) -- Ask the user for only the approximate Azure service/module name (e.g., "hybrid connectivity"). -- Call the MCP tool "list-spec-modules" to fetch all service folders from azure-rest-api-specs/specification. -- Fuzzily match the user's input to the closest service name. Show top 3 matches and ask the user to confirm the service folder to use. -- Call the MCP tool "list-providers" with the chosen service to retrieve provider namespaces. If multiple providers are returned, ask the user to pick one; if only one, select it automatically. -- Ask the user what they want to call the PowerShell module title/service-name (e.g., HybridConnectivity). This is the display/module name, not the spec folder name. -- Call the MCP tool "list-api-versions" with service and provider to get available versions, separated by Stable and Preview. Ask the user to choose stability (stable/preview) and a specific API version. -- Call the MCP tool "resolve-autorest-inputs" with service, provider, stability, and version to compute the 4 inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. +## Stage 1: Interactive spec selection and autorest resolution +- Ask the user for their desired **PowerShell module name** (e.g., "HybridConnectivity") +- Call the MCP tool "list-spec-modules" to fetch all available specification folders from azure-rest-api-specs/specification. +- From the full list, present 10 most relevant spec options to the user based on their PowerShell module name, or show a representative sample if no clear match. +- Ask the user to choose which specification they want to use from the presented options, or ask if they want to see more options. +- **Confirm the spec choice**: Once user selects a spec, ask them to confirm this is the correct specification for their needs (show the spec name clearly). +- Call the MCP tool "list-providers" with the chosen spec folder to retrieve available provider namespaces. +- Present the list of providers to the user: + - If multiple providers are returned, ask the user to pick one + - If only one provider exists, select it automatically but confirm with the user +- **Confirm the provider choice**: Ask the user to confirm this is the correct provider namespace. +- Call the MCP tool "list-api-versions" with the chosen spec folder and provider to get available versions, separated by Stable and Preview. +- Present the API version options to the user and ask them to choose: + 1. **Stability**: stable or preview + 2. **API version**: specific version from the available list +- **Confirm the API version choice**: Ask the user to confirm their stability and version selection. +- Call the MCP tool "resolve-autorest-inputs" with the chosen spec folder, provider, stability, and version to compute the 4 autorest inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. - Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. - Navigate to the `src` folder in the home "azure-powershell" directory. -- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) -- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. +- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) +- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. - Create a new file `README.md`. (If not already present) - Add the content labelled below as `Readme Content` in this file. -- Use the "generate-autorest" mcp tool to generate the module. +- Use the "generate-autorest" mcp tool to generate the module. - Stage 2 Complete. ## Stage 3: Updating Example Files @@ -77,8 +86,8 @@ input-file: module-version: 0.1.0 -title: -service-name: +title: +service-name: subject-prefix: $(service-name) directive: From 893848a8aa4278a17c3a3cce78148fd0300182d0 Mon Sep 17 00:00:00 2001 From: Yash Date: Tue, 26 Aug 2025 12:13:51 +1000 Subject: [PATCH 13/19] updated old method as well --- tools/Mcp/test/vscode/mcpprompt.md | 38 ++++++++++++++++++------------ 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/tools/Mcp/test/vscode/mcpprompt.md b/tools/Mcp/test/vscode/mcpprompt.md index 3e0a9d20dc06..255a7cfbf7bc 100644 --- a/tools/Mcp/test/vscode/mcpprompt.md +++ b/tools/Mcp/test/vscode/mcpprompt.md @@ -12,25 +12,33 @@ # Instructions -## Stage 1: Capturing Placeholder Values -- Ask the user for the following placeholder values: serviceName, commitId, serviceSpecs, swaggerFileSpecs. - - Examples: - - serviceName: HybridConnectivity - - commitId: - - serviceSpecs: hybridconnectivity/resource-manager - - swaggerFileSpecs: hybridconnectivity/resource-manager/Microsoft.HybridConnectivity/stable/2024-12-01/hybridconnectivity.json -- Do not replace or modify this prompt file. -- Store the values for use in later steps like generating the README and executing Autorest. -- Once values are stored, mark Stage 1 as complete. +## Stage 1: Interactive spec selection and autorest resolution +- Ask the user for their desired **PowerShell module name** (e.g., "HybridConnectivity") +- Call the MCP tool "list-spec-modules" to fetch all available specification folders from azure-rest-api-specs/specification. +- From the full list, present 10 most relevant spec options to the user based on their PowerShell module name, or show a representative sample if no clear match. +- Ask the user to choose which specification they want to use from the presented options, or ask if they want to see more options. +- **Confirm the spec choice**: Once user selects a spec, ask them to confirm this is the correct specification for their needs (show the spec name clearly). +- Call the MCP tool "list-providers" with the chosen spec folder to retrieve available provider namespaces. +- Present the list of providers to the user: + - If multiple providers are returned, ask the user to pick one + - If only one provider exists, select it automatically but confirm with the user +- **Confirm the provider choice**: Ask the user to confirm this is the correct provider namespace. +- Call the MCP tool "list-api-versions" with the chosen spec folder and provider to get available versions, separated by Stable and Preview. +- Present the API version options to the user and ask them to choose: + 1. **Stability**: stable or preview + 2. **API version**: specific version from the available list +- **Confirm the API version choice**: Ask the user to confirm their stability and version selection. +- Call the MCP tool "resolve-autorest-inputs" with the chosen spec folder, provider, stability, and version to compute the 4 autorest inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. +- Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. - Navigate to the `src` folder in the home "azure-powershell" directory. -- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) -- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. +- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) +- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. - Create a new file `README.md`. (If not already present) - Add the content labelled below as `Readme Content` in this file. -- Use the "generate-autorest" mcp tool to generate the module. +- Use the "generate-autorest" mcp tool to generate the module. - Stage 2 Complete. ## Stage 3: Updating Example Files @@ -78,8 +86,8 @@ input-file: module-version: 0.1.0 -title: -service-name: +title: +service-name: subject-prefix: $(service-name) directive: From e5227f289f1bc095850940d6145a5a70e9b6717a Mon Sep 17 00:00:00 2001 From: Yash Date: Sun, 21 Sep 2025 02:45:28 +1000 Subject: [PATCH 14/19] Ellicitation support for Stage 1 Inputs, Added ResourcesService --- tools/Mcp/src/CodegenServer.ts | 24 +- .../Get-AzDatabricksAccessConnector.md | 177 ++++ ...icksOutboundNetworkDependenciesEndpoint.md | 126 +++ .../examples/Get-AzDatabricksVNetPeering.md | 189 ++++ .../examples/Get-AzDatabricksWorkspace.md | 181 ++++ .../New-AzDatabricksAccessConnector.md | 239 +++++ .../examples/New-AzDatabricksVNetPeering.md | 317 ++++++ .../examples/New-AzDatabricksWorkspace.md | 915 ++++++++++++++++++ ...cksWorkspaceProviderAuthorizationObject.md | 84 ++ .../Remove-AzDatabricksAccessConnector.md | 217 +++++ .../Remove-AzDatabricksVNetPeering.md | 232 +++++ .../examples/Remove-AzDatabricksWorkspace.md | 233 +++++ .../Update-AzDatabricksAccessConnector.md | 264 +++++ .../Update-AzDatabricksVNetPeering.md | 362 +++++++ .../examples/Update-AzDatabricksWorkspace.md | 799 +++++++++++++++ .../src/ideal-modules/Databricks/metadata.md | 57 ++ .../AzDatabricksAccessConnector.Tests.ps1 | 65 ++ .../tests/AzDatabricksVNetPeering.Tests.ps1 | 58 ++ .../tests/AzDatabricksWorkspace.Tests.ps1 | 86 ++ .../ideal-modules/Databricks/tests/utils.ps1 | 114 +++ tools/Mcp/src/services/resourcesService.ts | 60 ++ tools/Mcp/src/services/toolsService.ts | 195 +++- tools/Mcp/src/services/utils.ts | 26 + .../Mcp/src/specs/autorest-readme-template.md | 63 ++ .../specs/prompts/partner-module-workflow.md | 78 +- tools/Mcp/src/specs/responses.json | 25 +- tools/Mcp/src/specs/specs.json | 34 +- tools/Mcp/src/types.ts | 7 + 28 files changed, 5076 insertions(+), 151 deletions(-) create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/metadata.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 create mode 100644 tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 create mode 100644 tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 create mode 100644 tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 create mode 100644 tools/Mcp/src/specs/autorest-readme-template.md diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index e2a10375fc4d..b59c99ccd3d5 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -1,9 +1,10 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; import { z } from "zod"; -import { responseSchema, toolParameterSchema, toolSchema, promptSchema } from "./types.js"; +import { responseSchema, toolParameterSchema, toolSchema, promptSchema, resourceSchema } from "./types.js"; import { ToolsService } from "./services/toolsService.js"; import { PromptsService } from "./services/promptsService.js"; +import { ResourcesService } from "./services/resourcesService.js"; import { readFileSync } from "fs"; import path from "path"; import { fileURLToPath } from "url"; @@ -37,6 +38,7 @@ export class CodegenServer { this.initResponses(); this.initTools(); this.initPrompts(); + this.initResources(); } // dummy method for sending sampling request @@ -74,6 +76,9 @@ export class CodegenServer { await this._mcp.connect(transport); } + public getResponseTemplate(name: string): string | undefined { + return this._responses.get(name); + } initTools() { const toolsService = ToolsService.getInstance().setServer(this); @@ -105,6 +110,21 @@ export class CodegenServer { } } + initResources() { + const resourcesService = ResourcesService.getInstance().setServer(this); + const resourcesSchemas = (specs.resources || []) as resourceSchema[]; + for (const schema of resourcesSchemas) { + const parameter = resourcesService.createResourceParametersFromSchema(schema.parameters || []); + const callback = resourcesService.getResources(schema.callbackName, this._responses.get(schema.name)); + this._mcp.resource( + schema.name, + schema.description, + parameter, + (args: any) => callback(args) + ); + } + } + initResponses() { (responses as responseSchema[])?.forEach((response: responseSchema) => { let text = response.text; @@ -120,4 +140,6 @@ export class CodegenServer { this._responses.set(response.name, text); }); } + + } diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md new file mode 100644 index 000000000000..10b2c575a472 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md @@ -0,0 +1,177 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/get-azdatabricksaccessconnector +schema: 2.0.0 +--- + +# Get-AzDatabricksAccessConnector + +## SYNOPSIS +Gets an Azure Databricks Access Connector. + +## SYNTAX + +### List1 (Default) +``` +Get-AzDatabricksAccessConnector [-SubscriptionId ] [-DefaultProfile ] + [] +``` + +### Get +``` +Get-AzDatabricksAccessConnector -Name -ResourceGroupName [-SubscriptionId ] + [-DefaultProfile ] [] +``` + +### List +``` +Get-AzDatabricksAccessConnector -ResourceGroupName [-SubscriptionId ] + [-DefaultProfile ] [] +``` + +### GetViaIdentity +``` +Get-AzDatabricksAccessConnector -InputObject [-DefaultProfile ] + [] +``` + +## DESCRIPTION +Gets an Azure Databricks Access Connector. + +## EXAMPLES + +### Example 1: List all access connectors under a subscription. +```powershell +Get-AzDatabricksAccessConnector +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command lists all access connectors under a subscription. + +### Example 2: List all access connectors under a resource group. +```powershell +Get-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command lists all access connectors under a resource group. + +### Example 3: Get a access connectors by name. +```powershell +Get-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command gets a access connectors by name. + +## PARAMETERS + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: GetViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the Azure Databricks Access Connector. + +```yaml +Type: System.String +Parameter Sets: Get +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Get, List +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String[] +Parameter Sets: List1, Get, List +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IAccessConnector + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md new file mode 100644 index 000000000000..a594d207aaf6 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md @@ -0,0 +1,126 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/get-azdatabricksoutboundnetworkdependenciesendpoint +schema: 2.0.0 +--- + +# Get-AzDatabricksOutboundNetworkDependenciesEndpoint + +## SYNOPSIS +Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +You must configure outbound access with these endpoints. +For more information, see https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr + +## SYNTAX + +``` +Get-AzDatabricksOutboundNetworkDependenciesEndpoint -ResourceGroupName -WorkspaceName + [-SubscriptionId ] [-DefaultProfile ] + [] +``` + +## DESCRIPTION +Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +You must configure outbound access with these endpoints. +For more information, see https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr + +## EXAMPLES + +### Example 1: Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +```powershell +Get-AzDatabricksOutboundNetworkDependenciesEndpoint -ResourceGroupName azps_test_gp_db -WorkspaceName azps-databricks-workspace-t2 +``` + +```output +Category +-------- +Webapp +Control Plane NAT +Extended infrastructure +Azure Storage +Azure My SQL +Azure Servicebus +``` + +This command gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +You must configure outbound access with these endpoints. +For more information, see https://learn.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr + +## PARAMETERS + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IOutboundEnvironmentEndpoint + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md new file mode 100644 index 000000000000..1cbc3cd9e420 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md @@ -0,0 +1,189 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/get-azdatabricksvnetpeering +schema: 2.0.0 +--- + +# Get-AzDatabricksVNetPeering + +## SYNOPSIS +Gets the workspace vNet Peering. + +## SYNTAX + +### List (Default) +``` +Get-AzDatabricksVNetPeering -ResourceGroupName [-SubscriptionId ] -WorkspaceName + [-DefaultProfile ] [] +``` + +### Get +``` +Get-AzDatabricksVNetPeering -Name -ResourceGroupName [-SubscriptionId ] + -WorkspaceName [-DefaultProfile ] [-PassThru] + [] +``` + +### GetViaIdentity +``` +Get-AzDatabricksVNetPeering -InputObject [-DefaultProfile ] [-PassThru] + [] +``` + +## DESCRIPTION +Gets the workspace vNet Peering. + +## EXAMPLES + +### Example 1: List all vnet peering under a databricks. +```powershell +Get-AzDatabricksVNetPeering -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command lists all vnet peering under a databricks. + +### Example 2: Get a vnet peering. +```powershell +Get-AzDatabricksVNetPeering -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -Name vnet-peering-t1 +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command gets a vnet peering. + +## PARAMETERS + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: GetViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the workspace vNet peering. + +```yaml +Type: System.String +Parameter Sets: Get +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PassThru +Returns true when the command succeeds + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: Get, GetViaIdentity +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: List, Get +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String[] +Parameter Sets: List, Get +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: List, Get +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IVirtualNetworkPeering + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md new file mode 100644 index 000000000000..53ab130b9a0c --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md @@ -0,0 +1,181 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/get-azdatabricksworkspace +schema: 2.0.0 +--- + +# Get-AzDatabricksWorkspace + +## SYNOPSIS +Gets the workspace. + +## SYNTAX + +### List1 (Default) +``` +Get-AzDatabricksWorkspace [-SubscriptionId ] [-DefaultProfile ] + [] +``` + +### Get +``` +Get-AzDatabricksWorkspace -Name -ResourceGroupName [-SubscriptionId ] + [-DefaultProfile ] [] +``` + +### List +``` +Get-AzDatabricksWorkspace -ResourceGroupName [-SubscriptionId ] [-DefaultProfile ] + [] +``` + +### GetViaIdentity +``` +Get-AzDatabricksWorkspace -InputObject [-DefaultProfile ] + [] +``` + +## DESCRIPTION +Gets the workspace. + +## EXAMPLES + +### Example 1: Get a Databricks workspace with name. +```powershell +Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t3 +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +This command gets a Databricks workspace in a resource group. + +### Example 2: List all Databricks workspaces in a subscription. +```powershell +Get-AzDatabricksWorkspace +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t1 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t1 +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +This command lists all Databricks workspaces in a subscription. + +### Example 3: List all Databricks workspaces in a resource group. +```powershell +Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t1 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t1 +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +This command lists all Databricks workspaces in a resource group. + +## PARAMETERS + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: GetViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: Get +Aliases: WorkspaceName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Get, List +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String[] +Parameter Sets: List1, Get, List +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspace + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md new file mode 100644 index 000000000000..7d07a0d568ce --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md @@ -0,0 +1,239 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/new-azdatabricksaccessconnector +schema: 2.0.0 +--- + +# New-AzDatabricksAccessConnector + +## SYNOPSIS +Creates or updates Azure Databricks Access Connector. + +## SYNTAX + +``` +New-AzDatabricksAccessConnector -Name -ResourceGroupName [-SubscriptionId ] + -Location [-IdentityType ] [-Tag ] + [-UserAssignedIdentity ] [-DefaultProfile ] [-AsJob] [-NoWait] + [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Creates or updates Azure Databricks Access Connector. + +## EXAMPLES + +### Example 1: Creates or updates azure databricks accessConnector. +```powershell +New-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector -Location eastus -IdentityType 'SystemAssigned' +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command creates or updates azure databricks accessConnector. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -IdentityType +Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed). + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ManagedServiceIdentityType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Location +The geo-location where the resource lives + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Name +The name of the Azure Databricks Access Connector. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Tag +Resource tags. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UserAssignedIdentity +The set of user assigned identities associated with the resource. +The userAssignedIdentities dictionary keys will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. +The dictionary values can be empty objects ({}) in requests. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IAccessConnector + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md new file mode 100644 index 000000000000..7b8eabffa0fc --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md @@ -0,0 +1,317 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/new-azdatabricksvnetpeering +schema: 2.0.0 +--- + +# New-AzDatabricksVNetPeering + +## SYNOPSIS +Creates vNet Peering for workspace. + +## SYNTAX + +``` +New-AzDatabricksVNetPeering -Name -ResourceGroupName -WorkspaceName + [-SubscriptionId ] [-AllowForwardedTraffic] [-AllowGatewayTransit] [-AllowVirtualNetworkAccess] + [-DatabricksAddressSpacePrefix ] [-DatabricksVirtualNetworkId ] + [-RemoteAddressSpacePrefix ] [-RemoteVirtualNetworkId ] [-UseRemoteGateway] + [-DefaultProfile ] [-AsJob] [-NoWait] [-WhatIf] [-Confirm] + [] +``` + +## DESCRIPTION +Creates vNet Peering for workspace. + +## EXAMPLES + +### Example 1: Create a vnet peering for databricks. +```powershell +New-AzDatabricksVNetPeering -Name vnet-peering-t1 -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -RemoteVirtualNetworkId '/subscriptions/{subId}/resourceGroups/azps_test_gp_db/providers/Microsoft.Network/virtualNetworks/azps-VNnet-t1' +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command creates a vnet peering for databricks. + +## PARAMETERS + +### -AllowForwardedTraffic +Whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AllowGatewayTransit +If gateway links can be used in remote virtual networking to link to this virtual network. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AllowVirtualNetworkAccess +Whether the VMs in the local virtual network space would be able to access the VMs in remote virtual network space. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DatabricksAddressSpacePrefix +A list of address blocks reserved for this virtual network in CIDR notation. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DatabricksVirtualNetworkId +The Id of the databricks virtual network. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Name +The name of the workspace vNet peering. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RemoteAddressSpacePrefix +A list of address blocks reserved for this virtual network in CIDR notation. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RemoteVirtualNetworkId +The Id of the remote virtual network. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UseRemoteGateway +If remote gateways can be used on this virtual network. +If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. +Only one peering can have this flag set to true. +This flag cannot be set if virtual network already has a gateway. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IVirtualNetworkPeering + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md new file mode 100644 index 000000000000..2d17fb3e7d3b --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md @@ -0,0 +1,915 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/new-azdatabricksworkspace +schema: 2.0.0 +--- + +# New-AzDatabricksWorkspace + +## SYNOPSIS +Creates a new workspace. + +## SYNTAX + +``` +New-AzDatabricksWorkspace -Name -ResourceGroupName [-SubscriptionId ] + -Location [-ManagedResourceGroupName ] [-AmlWorkspaceId ] + [-Authorization ] [-DefaultCatalogInitialType ] + [-EnableNoPublicIP] [-EncryptionKeyName ] [-EncryptionKeySource ] + [-EncryptionKeyVaultUri ] [-EncryptionKeyVersion ] [-LoadBalancerBackendPoolName ] + [-LoadBalancerId ] [-ManagedDiskKeySource ] + [-ManagedDiskKeyVaultPropertiesKeyName ] [-ManagedDiskKeyVaultPropertiesKeyVaultUri ] + [-ManagedDiskKeyVaultPropertiesKeyVersion ] [-ManagedDiskRotationToLatestKeyVersionEnabled] + [-ManagedServiceKeySource ] [-ManagedServicesKeyVaultPropertiesKeyName ] + [-ManagedServicesKeyVaultPropertiesKeyVaultUri ] + [-ManagedServicesKeyVaultPropertiesKeyVersion ] [-NatGatewayName ] [-PrepareEncryption] + [-PrivateSubnetName ] [-PublicIPName ] [-PublicNetworkAccess ] + [-PublicSubnetName ] [-RequireInfrastructureEncryption] [-RequiredNsgRule ] + [-Sku ] [-SkuTier ] [-StorageAccountName ] [-StorageAccountSku ] + [-Tag ] [-UiDefinitionUri ] [-VirtualNetworkId ] [-VnetAddressPrefix ] + [-EnhancedSecurityMonitoring ] + [-AutomaticClusterUpdate ] [-ComplianceStandard ] + [-EnhancedSecurityCompliance ] [-AccessConnectorId ] + [-AccessConnectorIdentityType ] [-AccessConnectorUserAssignedIdentityId ] + [-DefaultStorageFirewall ] [-DefaultProfile ] [-AsJob] [-NoWait] + [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Creates a new workspace. + +## EXAMPLES + +### Example 1: Create a Databricks workspace. +```powershell +New-AzDatabricksWorkspace -Name azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -Location eastus -ManagedResourceGroupName azps_test_gp_kv_t1 -Sku Premium +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t1 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t1 +``` + +This command creates a Databricks workspace. + +### Example 2: Create a Databricks workspace with a customized virtual network. +```powershell +$dlg = New-AzDelegation -Name dbrdl -ServiceName "Microsoft.Databricks/workspaces" +$rdpRule = New-AzNetworkSecurityRuleConfig -Name azps-network-security-rule -Description "Allow RDP" -Access Allow -Protocol Tcp -Direction Inbound -Priority 100 -SourceAddressPrefix Internet -SourcePortRange * -DestinationAddressPrefix * -DestinationPortRange 3389 +$networkSecurityGroup = New-AzNetworkSecurityGroup -ResourceGroupName azps_test_gp_db -Location eastus -Name azps-network-security-group -SecurityRules $rdpRule +$kvSubnet = New-AzVirtualNetworkSubnetConfig -Name azps-vnetwork-sub-kv -AddressPrefix "110.0.1.0/24" -ServiceEndpoint "Microsoft.KeyVault" +$priSubnet = New-AzVirtualNetworkSubnetConfig -Name azps-vnetwork-sub-pri -AddressPrefix "110.0.2.0/24" -NetworkSecurityGroup $networkSecurityGroup -Delegation $dlg +$pubSubnet = New-AzVirtualNetworkSubnetConfig -Name azps-vnetwork-sub-pub -AddressPrefix "110.0.3.0/24" -NetworkSecurityGroup $networkSecurityGroup -Delegation $dlg +$testVN = New-AzVirtualNetwork -Name azps-virtual-network -ResourceGroupName azps_test_gp_db -Location eastus -AddressPrefix "110.0.0.0/16" -Subnet $kvSubnet,$priSubnet,$pubSubnet +$vNetResId = (Get-AzVirtualNetwork -Name azps-virtual-network -ResourceGroupName azps_test_gp_db).Subnets[0].Id +$ruleSet = New-AzKeyVaultNetworkRuleSetObject -DefaultAction Allow -Bypass AzureServices -IpAddressRange "110.0.1.0/24" -VirtualNetworkResourceId $vNetResId +New-AzKeyVault -ResourceGroupName azps_test_gp_db -VaultName azps-keyvault -NetworkRuleSet $ruleSet -Location eastus -Sku 'Premium' -EnablePurgeProtection +New-AzDatabricksWorkspace -Name azps-databricks-workspace-t2 -ResourceGroupName azps_test_gp_db -Location eastus -ManagedResourceGroupName azps_test_gp_kv_t2 -VirtualNetworkId $testVN.Id -PrivateSubnetName $priSubnet.Name -PublicSubnetName $pubSubnet.Name -Sku Premium +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +``` + +This command creates a Databricks workspace with customized virtual network in a resource group. + +### Example 3: Create a Databricks workspace with enable encryption. +```powershell +New-AzDatabricksWorkspace -Name azps-databricks-workspace-t3 -ResourceGroupName azps_test_gp_db -Location eastus -PrepareEncryption -ManagedResourceGroupName azps_test_gp_kv_t3 -Sku premium +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +This command creates a Databricks workspace and sets it to prepare for encryption. +Please refer to the examples of Update-AzDatabricksWorkspace for more settings to encryption. + +## PARAMETERS + +### -AccessConnectorId +The resource ID of Azure Databricks Access Connector Resource. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AccessConnectorIdentityType +The identity type of the Access Connector Resource. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.IdentityType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AccessConnectorUserAssignedIdentityId +The resource ID of the User Assigned Identity associated with the Access Connector Resource. +This is required for type 'UserAssigned' and not valid for type 'SystemAssigned'. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AmlWorkspaceId +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Authorization +The workspace provider authorizations. +To construct, see NOTES section for AUTHORIZATION properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspaceProviderAuthorization[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AutomaticClusterUpdate +Status of automated cluster updates feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.AutomaticClusterUpdateValue +Parameter Sets: (All) +Aliases: AutomaticClusterUpdateValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ComplianceStandard +Compliance standards associated with the workspace. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ComplianceStandard[] +Parameter Sets: (All) +Aliases: ComplianceSecurityProfileComplianceStandard + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultCatalogInitialType +Defines the initial type of the default catalog. +Possible values (case-insensitive): HiveMetastore, UnityCatalog + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.InitialType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultStorageFirewall +Gets or Sets Default Storage Firewall configuration information + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.DefaultStorageFirewall +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnableNoPublicIP +The value which should be used for this field. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Default, Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.KeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyVaultUri +The Uri of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnhancedSecurityCompliance +Status of Compliance Security Profile feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ComplianceSecurityProfileValue +Parameter Sets: (All) +Aliases: ComplianceSecurityProfileValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnhancedSecurityMonitoring +Status of Enhanced Security Monitoring feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EnhancedSecurityMonitoringValue +Parameter Sets: (All) +Aliases: EnhancedSecurityMonitoringValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -LoadBalancerBackendPoolName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -LoadBalancerId +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Location +The geo-location where the resource lives + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EncryptionKeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyVaultUri +The URI of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskRotationToLatestKeyVersionEnabled +Indicate whether the latest key version should be automatically used for Managed Disk Encryption. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedResourceGroupName +The managed resource group Id. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServiceKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EncryptionKeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyVaultUri +The Uri of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Name +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: WorkspaceName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NatGatewayName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PrepareEncryption +The value which should be used for this field. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PrivateSubnetName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PublicIPName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PublicNetworkAccess +The network access type for accessing workspace. +Set value to disabled to access workspace only via private link. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.PublicNetworkAccess +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PublicSubnetName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RequiredNsgRule +Gets or sets a value indicating whether data plane (clusters) to control plane communication happen over private endpoint. +Supported values are 'AllRules' and 'NoAzureDatabricksRules'. +'NoAzureServiceRules' value is for internal use only. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.RequiredNsgRules +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RequireInfrastructureEncryption +The value which should be used for this field. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Sku +The SKU name. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SkuTier +The SKU tier. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -StorageAccountName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -StorageAccountSku +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Tag +Resource tags. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UiDefinitionUri +The blob URI where the UI definition file is located. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -VirtualNetworkId +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -VnetAddressPrefix +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspace + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md new file mode 100644 index 000000000000..1d83466872a2 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md @@ -0,0 +1,84 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/Az.Databricks/new-AzDatabricksWorkspaceProviderAuthorizationObject +schema: 2.0.0 +--- + +# New-AzDatabricksWorkspaceProviderAuthorizationObject + +## SYNOPSIS +Create an in-memory object for WorkspaceProviderAuthorization. + +## SYNTAX + +``` +New-AzDatabricksWorkspaceProviderAuthorizationObject -PrincipalId -RoleDefinitionId + [] +``` + +## DESCRIPTION +Create an in-memory object for WorkspaceProviderAuthorization. + +## EXAMPLES + +### Example 1: Create an in-memory object for WorkspaceProviderAuthorization. +```powershell +New-AzDatabricksWorkspaceProviderAuthorizationObject -PrincipalId 024d7367-0890-4ad3-8140-e37374722820 -RoleDefinitionId 2124844c-7e23-48cc-bc52-a3af25f7a4ae +``` + +```output +PrincipalId RoleDefinitionId +----------- ---------------- +024d7367-0890-4ad3-8140-e37374722820 2124844c-7e23-48cc-bc52-a3af25f7a4ae +``` + +Create an in-memory object for WorkspaceProviderAuthorization. + +## PARAMETERS + +### -PrincipalId +The provider's principal identifier. +This is the identity that the provider will use to call ARM to manage the workspace resources. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RoleDefinitionId +The provider's role definition identifier. +This role will define all the permissions that the provider must have on the workspace's container resource group. +This role definition cannot have permission to delete the resource group. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.WorkspaceProviderAuthorization + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md new file mode 100644 index 000000000000..ffeec43eeb83 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md @@ -0,0 +1,217 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/remove-azdatabricksaccessconnector +schema: 2.0.0 +--- + +# Remove-AzDatabricksAccessConnector + +## SYNOPSIS +Deletes the Azure Databricks Access Connector. + +## SYNTAX + +### Delete (Default) +``` +Remove-AzDatabricksAccessConnector -Name -ResourceGroupName [-SubscriptionId ] + [-DefaultProfile ] [-AsJob] [-NoWait] [-PassThru] [-WhatIf] + [-Confirm] [] +``` + +### DeleteViaIdentity +``` +Remove-AzDatabricksAccessConnector -InputObject [-DefaultProfile ] [-AsJob] + [-NoWait] [-PassThru] [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Deletes the Azure Databricks Access Connector. + +## EXAMPLES + +### Example 1: Deletes the azure databricks accessConnector. +```powershell +Remove-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector +``` + +This command deletes the azure databricks accessConnector. + +### Example 2: Deletes the azure databricks accessConnector by pipeline. +```powershell +Get-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector | Remove-AzDatabricksAccessConnector +``` + +This command deletes the azure databricks accessConnector by pipeline. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: DeleteViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the Azure Databricks Access Connector. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PassThru +Returns true when the command succeeds + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### System.Boolean + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md new file mode 100644 index 000000000000..4ec972648ab7 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md @@ -0,0 +1,232 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/remove-azdatabricksvnetpeering +schema: 2.0.0 +--- + +# Remove-AzDatabricksVNetPeering + +## SYNOPSIS +Deletes the workspace vNetPeering. + +## SYNTAX + +### Delete (Default) +``` +Remove-AzDatabricksVNetPeering -Name -ResourceGroupName [-SubscriptionId ] + -WorkspaceName [-DefaultProfile ] [-AsJob] [-NoWait] [-PassThru] + [-WhatIf] [-Confirm] [] +``` + +### DeleteViaIdentity +``` +Remove-AzDatabricksVNetPeering -InputObject [-DefaultProfile ] [-AsJob] + [-NoWait] [-PassThru] [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Deletes the workspace vNetPeering. + +## EXAMPLES + +### Example 1: Remove a vnet peering of databricks by name. +```powershell +Remove-AzDatabricksVNetPeering -Name vnet-peering-t1 -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db +``` + +This command removes a vnet peering of databricks by name. + +### Example 2: Remove a vnet peering of databricks by object. +```powershell +Get-AzDatabricksVNetPeering -Name vnet-peering-t1 -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db | Remove-AzDatabricksVNetPeering +``` + +This command removes a vnet peering of databricks by object. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: DeleteViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the workspace vNet peering. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PassThru +Returns true when the command succeeds + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### System.Boolean + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md new file mode 100644 index 000000000000..9fdfa9fe5a20 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md @@ -0,0 +1,233 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/remove-azdatabricksworkspace +schema: 2.0.0 +--- + +# Remove-AzDatabricksWorkspace + +## SYNOPSIS +Deletes the workspace. + +## SYNTAX + +### Delete (Default) +``` +Remove-AzDatabricksWorkspace -Name -ResourceGroupName [-SubscriptionId ] + [-ForceDeletion] [-DefaultProfile ] [-AsJob] [-NoWait] [-PassThru] + [-WhatIf] [-Confirm] [] +``` + +### DeleteViaIdentity +``` +Remove-AzDatabricksWorkspace -InputObject [-ForceDeletion] [-DefaultProfile ] + [-AsJob] [-NoWait] [-PassThru] [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Deletes the workspace. + +## EXAMPLES + +### Example 1: Remove a Databricks workspace. +```powershell +Remove-AzDatabricksWorkspace -Name azps-databricks-workspace -ResourceGroupName azps_test_gp_db +``` + +This command removes a Databricks workspace from a resource group. + +### Example 2: Remove a Databricks workspace by object. +```powershell +Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t3 | Remove-AzDatabricksWorkspace +``` + +This command removes a Databricks workspace from a resource group. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ForceDeletion +Optional parameter to retain default unity catalog data. +By default the data will retained if Uc is enabled on the workspace. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: DeleteViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: WorkspaceName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PassThru +Returns true when the command succeeds + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### System.Boolean + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md new file mode 100644 index 000000000000..f78a0c5a8782 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md @@ -0,0 +1,264 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/update-azdatabricksaccessconnector +schema: 2.0.0 +--- + +# Update-AzDatabricksAccessConnector + +## SYNOPSIS +Updates an Azure Databricks Access Connector. + +## SYNTAX + +### UpdateExpanded (Default) +``` +Update-AzDatabricksAccessConnector -Name -ResourceGroupName [-SubscriptionId ] + [-IdentityType ] [-IdentityUserAssignedIdentity ] [-Tag ] + [-DefaultProfile ] [-AsJob] [-NoWait] [-WhatIf] [-Confirm] + [] +``` + +### UpdateViaIdentityExpanded +``` +Update-AzDatabricksAccessConnector -InputObject + [-IdentityType ] [-IdentityUserAssignedIdentity ] [-Tag ] + [-DefaultProfile ] [-AsJob] [-NoWait] [-WhatIf] [-Confirm] + [] +``` + +## DESCRIPTION +Updates an Azure Databricks Access Connector. + +## EXAMPLES + +### Example 1: Updates an azure databricks accessConnector. +```powershell +Update-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector -Tag @{'key'='value'} +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command updates an azure databricks accessConnector. + +### Example 2: Updates an azure databricks accessConnector by pipeline. +```powershell +Get-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector | Update-AzDatabricksAccessConnector -Tag @{'key'='value'} +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command updates an azure databricks accessConnector by pipeline. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -IdentityType +Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed). + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ManagedServiceIdentityType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -IdentityUserAssignedIdentity +The set of user assigned identities associated with the resource. +The userAssignedIdentities dictionary keys will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. +The dictionary values can be empty objects ({}) in requests. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: UpdateViaIdentityExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the Azure Databricks Access Connector. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Tag +Resource tags. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IAccessConnector + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md new file mode 100644 index 000000000000..2db4e1fea5c4 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md @@ -0,0 +1,362 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/update-azdatabricksvnetpeering +schema: 2.0.0 +--- + +# Update-AzDatabricksVNetPeering + +## SYNOPSIS +Update vNet Peering for workspace. + +## SYNTAX + +### UpdateExpanded (Default) +``` +Update-AzDatabricksVNetPeering -Name -ResourceGroupName -WorkspaceName + [-SubscriptionId ] [-AllowForwardedTraffic ] [-AllowGatewayTransit ] + [-AllowVirtualNetworkAccess ] [-DatabricksAddressSpacePrefix ] + [-DatabricksVirtualNetworkId ] [-RemoteAddressSpacePrefix ] + [-RemoteVirtualNetworkId ] [-UseRemoteGateway ] [-DefaultProfile ] [-AsJob] + [-NoWait] [-WhatIf] [-Confirm] [] +``` + +### UpdateViaIdentityExpanded +``` +Update-AzDatabricksVNetPeering -InputObject [-AllowForwardedTraffic ] + [-AllowGatewayTransit ] [-AllowVirtualNetworkAccess ] + [-DatabricksAddressSpacePrefix ] [-DatabricksVirtualNetworkId ] + [-RemoteAddressSpacePrefix ] [-RemoteVirtualNetworkId ] [-UseRemoteGateway ] + [-DefaultProfile ] [-AsJob] [-NoWait] [-WhatIf] [-Confirm] + [] +``` + +## DESCRIPTION +Update vNet Peering for workspace. + +## EXAMPLES + +### Example 1: Update AllowForwardedTraffic of vnet peering. +```powershell +Update-AzDatabricksVNetPeering -Name vnet-peering-t1 -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -AllowForwardedTraffic $True +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command updates AllowForwardedTraffic of vnet peering. + +### Example 2: Update AllowForwardedTraffic of vnet peering by object. +```powershell +Get-AzDatabricksVNetPeering -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -Name vnet-peering-t1 | Update-AzDatabricksVNetPeering -AllowGatewayTransit $true +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command updates AllowForwardedTraffic of vnet peering by object. + +## PARAMETERS + +### -AllowForwardedTraffic +[System.Management.Automation.SwitchParameter] +Whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network. + +```yaml +Type: System.Boolean +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AllowGatewayTransit +[System.Management.Automation.SwitchParameter] +If gateway links can be used in remote virtual networking to link to this virtual network. + +```yaml +Type: System.Boolean +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AllowVirtualNetworkAccess +[System.Management.Automation.SwitchParameter] +Whether the VMs in the local virtual network space would be able to access the VMs in remote virtual network space. + +```yaml +Type: System.Boolean +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DatabricksAddressSpacePrefix +A list of address blocks reserved for this virtual network in CIDR notation. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DatabricksVirtualNetworkId +The Id of the databricks virtual network. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity parameter. +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: UpdateViaIdentityExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the VNetPeering. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: PeeringName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RemoteAddressSpacePrefix +A list of address blocks reserved for this virtual network in CIDR notation. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RemoteVirtualNetworkId +The Id of the remote virtual network. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UseRemoteGateway +[System.Management.Automation.SwitchParameter] +If remote gateways can be used on this virtual network. +If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. +Only one peering can have this flag set to true. +This flag cannot be set if virtual network already has a gateway. + +```yaml +Type: System.Boolean +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IVirtualNetworkPeering + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md new file mode 100644 index 000000000000..174152dd46df --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md @@ -0,0 +1,799 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/update-azdatabricksworkspace +schema: 2.0.0 +--- + +# Update-AzDatabricksWorkspace + +## SYNOPSIS +Updates a workspace. + +## SYNTAX + +### UpdateExpanded (Default) +``` +Update-AzDatabricksWorkspace -Name -ResourceGroupName [-SubscriptionId ] + [-PrepareEncryption] [-EncryptionKeySource ] [-EncryptionKeyVaultUri ] + [-EncryptionKeyName ] [-EncryptionKeyVersion ] [-KeyVaultKeyName ] + [-KeyVaultKeyVersion ] [-KeyVaultUri ] [-AmlWorkspaceId ] [-SkuTier ] + [-Authorization ] [-DefaultCatalogInitialType ] + [-ManagedDiskKeySource ] [-ManagedDiskKeyVaultPropertiesKeyName ] + [-ManagedDiskKeyVaultPropertiesKeyVaultUri ] [-ManagedDiskKeyVaultPropertiesKeyVersion ] + [-ManagedDiskRotationToLatestKeyVersionEnabled] [-ManagedServiceKeySource ] + [-ManagedServicesKeyVaultPropertiesKeyName ] [-ManagedServicesKeyVaultPropertiesKeyVaultUri ] + [-ManagedServicesKeyVaultPropertiesKeyVersion ] [-UiDefinitionUri ] [-Tag ] + [-RequiredNsgRule ] [-PublicNetworkAccess ] [-EnableNoPublicIP] + [-EnhancedSecurityMonitoring ] + [-AutomaticClusterUpdate ] [-ComplianceStandard ] + [-EnhancedSecurityCompliance ] [-AccessConnectorId ] + [-AccessConnectorIdentityType ] [-AccessConnectorUserAssignedIdentityId ] + [-DefaultStorageFirewall ] [-DefaultProfile ] [-AsJob] [-NoWait] + [-WhatIf] [-Confirm] [] +``` + +### UpdateViaIdentityExpanded +``` +Update-AzDatabricksWorkspace -InputObject [-PrepareEncryption] + [-EncryptionKeySource ] [-EncryptionKeyVaultUri ] [-EncryptionKeyName ] + [-EncryptionKeyVersion ] [-KeyVaultKeyName ] [-KeyVaultKeyVersion ] + [-KeyVaultUri ] [-AmlWorkspaceId ] [-SkuTier ] + [-Authorization ] [-DefaultCatalogInitialType ] + [-ManagedDiskKeySource ] [-ManagedDiskKeyVaultPropertiesKeyName ] + [-ManagedDiskKeyVaultPropertiesKeyVaultUri ] [-ManagedDiskKeyVaultPropertiesKeyVersion ] + [-ManagedDiskRotationToLatestKeyVersionEnabled] [-ManagedServiceKeySource ] + [-ManagedServicesKeyVaultPropertiesKeyName ] [-ManagedServicesKeyVaultPropertiesKeyVaultUri ] + [-ManagedServicesKeyVaultPropertiesKeyVersion ] [-UiDefinitionUri ] [-Tag ] + [-RequiredNsgRule ] [-PublicNetworkAccess ] [-EnableNoPublicIP] + [-EnhancedSecurityMonitoring ] + [-AutomaticClusterUpdate ] [-ComplianceStandard ] + [-EnhancedSecurityCompliance ] [-AccessConnectorId ] + [-AccessConnectorIdentityType ] [-AccessConnectorUserAssignedIdentityId ] + [-DefaultStorageFirewall ] [-DefaultProfile ] [-AsJob] [-NoWait] + [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Updates a workspace. + +## EXAMPLES + +### Example 1: Updates the tags of a Databricks workspace. +```powershell +Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t1 | Update-AzDatabricksWorkspace -Tag @{"key"="value"} +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t1 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t1 +``` + +This command updates the tags of a Databricks workspace. + +### Example 2: Enable encryption on a Databricks workspace. +```powershell +Update-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t2 -PrepareEncryption +$updWsp = Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t2 +Set-AzKeyVaultAccessPolicy -VaultName azps-keyvault -ObjectId $updWsp.StorageAccountIdentityPrincipalId -PermissionsToKeys wrapkey,unwrapkey,get +Update-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t2 -EncryptionKeySource 'Microsoft.KeyVault' -EncryptionKeyVaultUri https://azps-keyvault.vault.azure.net/ -EncryptionKeyName azps-k1 -EncryptionKeyVersion a563a8021cba47109d93bd6d690621a7 +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +``` + +Enabling encryption on a Databricks workspace takes three steps: +1.Please make sure that KeyVault has Purge protection enabled. +2.Update the workspace with `-PrepareEncryption` (if it was not created so). +3.Find `StorageAccountIdentityPrincipalId` in the output of the last step and grant key permissions to the principal. +4.Update the workspace again to fill in information about the encryption key: + - `-EncryptionKeySource` + - `-EncryptionKeyVaultUri` + - `-EncryptionKeyName` + - `-EncryptionKeyVersion` +5.Important! Please read the information in the following document in detail: https://learn.microsoft.com/en-us/azure/databricks/security/keys/cmk-managed-services-azure/customer-managed-key-managed-services-azure?WT.mc_id=Portal-Microsoft_Azure_Databricks#--use-the-azure-portal + +### Example 3: Disable encryption on a Databricks workspace. +```powershell +Update-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t3 -EncryptionKeySource 'Default' +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +To disable encryption, simply set `-EncryptionKeySource` to `'Default'`. + +### Example 4: Update NsgRule of the Databricks workspace. +```powershell +Update-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t2 -RequiredNsgRule 'AllRules' +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +``` + +This command updates NsgRule of the Databricks workspace. + +## PARAMETERS + +### -AccessConnectorId +The resource ID of Azure Databricks Access Connector Resource. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AccessConnectorIdentityType +The identity type of the Access Connector Resource. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.IdentityType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AccessConnectorUserAssignedIdentityId +The resource ID of the User Assigned Identity associated with the Access Connector Resource. +This is required for type 'UserAssigned' and not valid for type 'SystemAssigned'. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AmlWorkspaceId +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Authorization +The workspace provider authorizations. +To construct, see NOTES section for AUTHORIZATION properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspaceProviderAuthorization[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AutomaticClusterUpdate +Status of automated cluster updates feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.AutomaticClusterUpdateValue +Parameter Sets: (All) +Aliases: AutomaticClusterUpdateValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ComplianceStandard +Compliance standards associated with the workspace. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ComplianceStandard[] +Parameter Sets: (All) +Aliases: ComplianceSecurityProfileComplianceStandard + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultCatalogInitialType +Defines the initial type of the default catalog. +Possible values (case-insensitive): HiveMetastore, UnityCatalog + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.InitialType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The credentials, account, tenant, and subscription used for communication with Azure. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultStorageFirewall +Gets or Sets Default Storage Firewall configuration information + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.DefaultStorageFirewall +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnableNoPublicIP +The value which should be used for this field. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyName +The name of Key Vault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Default, Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.KeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyVaultUri +The URI (DNS name) of the Key Vault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnhancedSecurityCompliance +Status of Compliance Security Profile feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ComplianceSecurityProfileValue +Parameter Sets: (All) +Aliases: ComplianceSecurityProfileValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnhancedSecurityMonitoring +Status of Enhanced Security Monitoring feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EnhancedSecurityMonitoringValue +Parameter Sets: (All) +Aliases: EnhancedSecurityMonitoringValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity parameter. +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: UpdateViaIdentityExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -KeyVaultKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -KeyVaultKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -KeyVaultUri +The Uri of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EncryptionKeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyVaultUri +The URI of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskRotationToLatestKeyVersionEnabled +Indicate whether the latest key version should be automatically used for Managed Disk Encryption. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServiceKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EncryptionKeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyVaultUri +The Uri of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Name +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: WorkspaceName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PrepareEncryption +Prepare the workspace for encryption. +Enables the Managed Identity for managed storage account. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PublicNetworkAccess +The network access type for accessing workspace. +Set value to disabled to access workspace only via private link. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.PublicNetworkAccess +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RequiredNsgRule +Gets or sets a value indicating whether data plane (clusters) to control plane communication happen over private endpoint. +Supported values are 'AllRules' and 'NoAzureDatabricksRules'. +'NoAzureServiceRules' value is for internal use only. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.RequiredNsgRules +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SkuTier +The SKU tier. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Tag +Resource tags. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UiDefinitionUri +The blob URI where the UI definition file is located. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspace + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/metadata.md b/tools/Mcp/src/ideal-modules/Databricks/metadata.md new file mode 100644 index 000000000000..51eb8e7928cb --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/metadata.md @@ -0,0 +1,57 @@ +--- +Module Name: Az.Databricks +Module Guid: fd603f36-03d8-47f4-9f7c-c13a78761936 +Download Help Link: https://learn.microsoft.com/powershell/module/az.databricks +Help Version: 1.0.0.0 +Locale: en-US +--- + +# Az.Databricks Module +## Description +Microsoft Azure PowerShell: Databricks cmdlets + +## Az.Databricks Cmdlets +### [Get-AzDatabricksAccessConnector](Get-AzDatabricksAccessConnector.md) +Gets an Azure Databricks Access Connector. + +### [Get-AzDatabricksOutboundNetworkDependenciesEndpoint](Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md) +Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +You must configure outbound access with these endpoints. +For more information, see https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr + +### [Get-AzDatabricksVNetPeering](Get-AzDatabricksVNetPeering.md) +Gets the workspace vNet Peering. + +### [Get-AzDatabricksWorkspace](Get-AzDatabricksWorkspace.md) +Gets the workspace. + +### [New-AzDatabricksAccessConnector](New-AzDatabricksAccessConnector.md) +Creates or updates Azure Databricks Access Connector. + +### [New-AzDatabricksVNetPeering](New-AzDatabricksVNetPeering.md) +Creates vNet Peering for workspace. + +### [New-AzDatabricksWorkspace](New-AzDatabricksWorkspace.md) +Creates a new workspace. + +### [New-AzDatabricksWorkspaceProviderAuthorizationObject](New-AzDatabricksWorkspaceProviderAuthorizationObject.md) +Create an in-memory object for WorkspaceProviderAuthorization. + +### [Remove-AzDatabricksAccessConnector](Remove-AzDatabricksAccessConnector.md) +Deletes the Azure Databricks Access Connector. + +### [Remove-AzDatabricksVNetPeering](Remove-AzDatabricksVNetPeering.md) +Deletes the workspace vNetPeering. + +### [Remove-AzDatabricksWorkspace](Remove-AzDatabricksWorkspace.md) +Deletes the workspace. + +### [Update-AzDatabricksAccessConnector](Update-AzDatabricksAccessConnector.md) +Updates an Azure Databricks Access Connector. + +### [Update-AzDatabricksVNetPeering](Update-AzDatabricksVNetPeering.md) +Update vNet Peering for workspace. + +### [Update-AzDatabricksWorkspace](Update-AzDatabricksWorkspace.md) +Updates a workspace. + diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 new file mode 100644 index 000000000000..8b04726171bd --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 @@ -0,0 +1,65 @@ +if (($null -eq $TestName) -or ($TestName -contains 'AzDatabricksAccessConnector')) { + $loadEnvPath = Join-Path $PSScriptRoot 'loadEnv.ps1' + if (-Not (Test-Path -Path $loadEnvPath)) { + $loadEnvPath = Join-Path $PSScriptRoot '..\loadEnv.ps1' + } + . ($loadEnvPath) + $TestRecordingFile = Join-Path $PSScriptRoot 'AzDatabricksAccessConnector.Recording.json' + $currentPath = $PSScriptRoot + while (-not $mockingPath) { + $mockingPath = Get-ChildItem -Path $currentPath -Recurse -Include 'HttpPipelineMocking.ps1' -File + $currentPath = Split-Path -Path $currentPath -Parent + } + . ($mockingPath | Select-Object -First 1).FullName +} + +Describe 'AzDatabricksAccessConnector' { + It 'CreateExpanded' { + { + $config = New-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 -Location $env.location -IdentityType 'SystemAssigned' + $config.Name | Should -Be $env.accessConnectorName1 + } | Should -Not -Throw + } + + It 'List1' { + { + $config = Get-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'Get' { + { + $config = Get-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 + $config.Name | Should -Be $env.accessConnectorName1 + } | Should -Not -Throw + } + + It 'List' { + { + $config = Get-AzDatabricksAccessConnector + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'UpdateExpanded' { + { + $config = Update-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 -Tag @{'key' = 'value' } + $config.Name | Should -Be $env.accessConnectorName1 + } | Should -Not -Throw + } + + It 'UpdateViaIdentityExpanded' { + { + $config = Get-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 + $config = Update-AzDatabricksAccessConnector -InputObject $config -Tag @{'key' = 'value' } + $config.Name | Should -Be $env.accessConnectorName1 + } | Should -Not -Throw + } + + It 'Delete' { + { + Remove-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 + } | Should -Not -Throw + } +} \ No newline at end of file diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 new file mode 100644 index 000000000000..d23158b163bc --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 @@ -0,0 +1,58 @@ +if (($null -eq $TestName) -or ($TestName -contains 'AzDatabricksVNetPeering')) { + $loadEnvPath = Join-Path $PSScriptRoot 'loadEnv.ps1' + if (-Not (Test-Path -Path $loadEnvPath)) { + $loadEnvPath = Join-Path $PSScriptRoot '..\loadEnv.ps1' + } + . ($loadEnvPath) + $TestRecordingFile = Join-Path $PSScriptRoot 'AzDatabricksVNetPeering.Recording.json' + $currentPath = $PSScriptRoot + while (-not $mockingPath) { + $mockingPath = Get-ChildItem -Path $currentPath -Recurse -Include 'HttpPipelineMocking.ps1' -File + $currentPath = Split-Path -Path $currentPath -Parent + } + . ($mockingPath | Select-Object -First 1).FullName +} + +Describe 'AzDatabricksVNetPeering' { + It 'CreateExpanded' { + { + $config = New-AzDatabricksVNetPeering -Name $env.vNetName1 -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -RemoteVirtualNetworkId "/subscriptions/$($env.SubscriptionId)/resourceGroups/$($env.resourceGroup)/providers/Microsoft.Network/virtualNetworks/$($env.vNetName)" + $config.Name | Should -Be $env.vNetName1 + } | Should -Not -Throw + } + + It 'List' { + { + $config = Get-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'Get' -Skip { + { + $config = Get-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Name $env.vNetName1 + $config.Name | Should -Be $env.vNetName1 + } | Should -Not -Throw + } + + It 'UpdateExpanded' -Skip { + { + $config = Update-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Name $env.vNetName1 -AllowForwardedTraffic $True + $config.Name | Should -Be $env.vNetName1 + } | Should -Not -Throw + } + + It 'UpdateViaIdentityExpanded' -Skip { + { + $config = Get-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Name $env.vNetName1 + $config = Update-AzDatabricksVNetPeering -InputObject $config -AllowForwardedTraffic $True + $config.Name | Should -Be $env.vNetName1 + } | Should -Not -Throw + } + + It 'Delete' { + { + Remove-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Name $env.vNetName1 + } | Should -Not -Throw + } +} \ No newline at end of file diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 new file mode 100644 index 000000000000..62806d2d23b2 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 @@ -0,0 +1,86 @@ +if (($null -eq $TestName) -or ($TestName -contains 'AzDatabricksWorkspace')) { + $loadEnvPath = Join-Path $PSScriptRoot 'loadEnv.ps1' + if (-Not (Test-Path -Path $loadEnvPath)) { + $loadEnvPath = Join-Path $PSScriptRoot '..\loadEnv.ps1' + } + . ($loadEnvPath) + $TestRecordingFile = Join-Path $PSScriptRoot 'AzDatabricksWorkspace.Recording.json' + $currentPath = $PSScriptRoot + while (-not $mockingPath) { + $mockingPath = Get-ChildItem -Path $currentPath -Recurse -Include 'HttpPipelineMocking.ps1' -File + $currentPath = Split-Path -Path $currentPath -Parent + } + . ($mockingPath | Select-Object -First 1).FullName +} + +Describe 'AzDatabricksWorkspace' { + It 'CreateExpanded' { + { + $config = New-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup -Location $env.location -Sku premium + $config.Name | Should -Be $env.workSpaceName2 + } | Should -Not -Throw + } + + It 'List' { + { + $config = Get-AzDatabricksWorkspace -ResourceGroupName $env.resourceGroup + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'List1' { + { + $config = Get-AzDatabricksWorkspace + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'Get' { + { + $config = Get-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup + $config.Name | Should -Be $env.workSpaceName2 + } | Should -Not -Throw + } + + It 'OutboundNetworkDependenciesEndpointList' { + { + $config = Get-AzDatabricksOutboundNetworkDependenciesEndpoint -WorkspaceName $env.workSpaceName1 -ResourceGroupName $env.resourceGroup + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'UpdateExpanded' { + { + $config = Update-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup -Tag @{"key" = "value" } + $config.Name | Should -Be $env.workSpaceName2 + } | Should -Not -Throw + } + + It 'UpdateViaIdentityExpanded' { + { + $config = Get-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup + $config = Update-AzDatabricksWorkspace -InputObject $config -Tag @{"key" = "value" } + $config.Name | Should -Be $env.workSpaceName2 + } | Should -Not -Throw + } + + It 'UpdateRequiredNsgRule-EnableNoPublicIP-PublicNetworkAccess' { + { + $config = Update-AzDatabricksWorkspace -Name $env.workSpaceName1 -ResourceGroupName $env.resourceGroup -RequiredNsgRule 'AllRules' -EnableNoPublicIP:$false -PublicNetworkAccess 'Enabled' -Tag @{"key" = "value" } + $config.RequiredNsgRule | Should -Be 'AllRules' + $config.EnableNoPublicIP | Should -Be 'false' + $config.PublicNetworkAccess | Should -Be 'Enabled' + + $config = Update-AzDatabricksWorkspace -Name $env.workSpaceName1 -ResourceGroupName $env.resourceGroup -RequiredNsgRule 'NoAzureDatabricksRules' -EnableNoPublicIP:$true -PublicNetworkAccess 'Disabled' + $config.RequiredNsgRule | Should -Be 'NoAzureDatabricksRules' + $config.EnableNoPublicIP | Should -Be 'true' + $config.PublicNetworkAccess | Should -Be 'Disabled' + } + } + + It 'Delete' { + { + Remove-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup + } | Should -Not -Throw + } +} \ No newline at end of file diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 b/tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 new file mode 100644 index 000000000000..0f321ab21a7c --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 @@ -0,0 +1,114 @@ +function RandomString([bool]$allChars, [int32]$len) { + if ($allChars) { + return -join ((33..126) | Get-Random -Count $len | ForEach-Object {[char]$_}) + } else { + return -join ((48..57) + (97..122) | Get-Random -Count $len | ForEach-Object {[char]$_}) + } +} +function Start-TestSleep { + [CmdletBinding(DefaultParameterSetName = 'SleepBySeconds')] + param( + [parameter(Mandatory = $true, Position = 0, ParameterSetName = 'SleepBySeconds')] + [ValidateRange(0.0, 2147483.0)] + [double] $Seconds, + + [parameter(Mandatory = $true, ParameterSetName = 'SleepByMilliseconds')] + [ValidateRange('NonNegative')] + [Alias('ms')] + [int] $Milliseconds + ) + + if ($TestMode -ne 'playback') { + switch ($PSCmdlet.ParameterSetName) { + 'SleepBySeconds' { + Start-Sleep -Seconds $Seconds + } + 'SleepByMilliseconds' { + Start-Sleep -Milliseconds $Milliseconds + } + } + } +} + +$env = @{} +if ($UsePreviousConfigForRecord) { + $previousEnv = Get-Content (Join-Path $PSScriptRoot 'env.json') | ConvertFrom-Json + $previousEnv.psobject.properties | Foreach-Object { $env[$_.Name] = $_.Value } +} +# Add script method called AddWithCache to $env, when useCache is set true, it will try to get the value from the $env first. +# example: $val = $env.AddWithCache('key', $val, $true) +$env | Add-Member -Type ScriptMethod -Value { param( [string]$key, [object]$val, [bool]$useCache) if ($this.Contains($key) -and $useCache) { return $this[$key] } else { $this[$key] = $val; return $val } } -Name 'AddWithCache' +function setupEnv() { + # Preload subscriptionId and tenant from context, which will be used in test + # as default. You could change them if needed. + $env.SubscriptionId = (Get-AzContext).Subscription.Id + $env.Tenant = (Get-AzContext).Tenant.Id + + $workSpaceName1 = RandomString -allChars $false -len 6 + $workSpaceName2 = RandomString -allChars $false -len 6 + $workSpaceName3 = RandomString -allChars $false -len 6 + $vNetName1 = RandomString -allChars $false -len 6 + $accessConnectorName1 = RandomString -allChars $false -len 6 + + $env.Add("workSpaceName1", $workSpaceName1) + $env.Add("workSpaceName2", $workSpaceName2) + $env.Add("workSpaceName3", $workSpaceName3) + $env.Add("vNetName1", $vNetName1) + $env.Add("accessConnectorName1", $accessConnectorName1) + + $networkSecurityRuleName = RandomString -allChars $false -len 6 + $networkSecurityGroupName = RandomString -allChars $false -len 6 + $vNetSubnetName1 = RandomString -allChars $false -len 6 + $vNetSubnetName2 = RandomString -allChars $false -len 6 + $vNetSubnetName3 = RandomString -allChars $false -len 6 + $vNetName = RandomString -allChars $false -len 6 + $keyVaultName = "azps" + (RandomString -allChars $false -len 6) + + $env.Add("networkSecurityRuleName", $networkSecurityRuleName) + $env.Add("networkSecurityGroupName", $networkSecurityGroupName) + $env.Add("vNetSubnetName1", $vNetSubnetName1) + $env.Add("vNetSubnetName2", $vNetSubnetName2) + $env.Add("vNetSubnetName3", $vNetSubnetName3) + $env.Add("vNetName", $vNetName) + $env.Add("keyVaultName", $keyVaultName) + + write-host "start to create test group" + $env.Add("location", "eastus") + $resourceGroup = "auto-test-databricks-" + (RandomString -allChars $false -len 2) + $env.Add("resourceGroup", $resourceGroup) + New-AzResourceGroup -Name $env.resourceGroup -Location $env.location + + $dlg = New-AzDelegation -Name dbrdl -ServiceName "Microsoft.Databricks/workspaces" + + write-host "start to create NetworkSecurity env" + $rdpRule = New-AzNetworkSecurityRuleConfig -Name $env.networkSecurityRuleName -Description "Allow RDP" -Access Allow -Protocol Tcp -Direction Inbound -Priority 100 -SourceAddressPrefix Internet -SourcePortRange * -DestinationAddressPrefix * -DestinationPortRange 3389 + $networkSecurityGroup = New-AzNetworkSecurityGroup -ResourceGroupName $env.resourceGroup -Location $env.location -Name $env.networkSecurityGroupName -SecurityRules $rdpRule + $kvSubnet = New-AzVirtualNetworkSubnetConfig -Name $env.vNetSubnetName1 -AddressPrefix "110.0.1.0/24" -ServiceEndpoint "Microsoft.KeyVault" + $priSubnet = New-AzVirtualNetworkSubnetConfig -Name $env.vNetSubnetName2 -AddressPrefix "110.0.2.0/24" -NetworkSecurityGroup $networkSecurityGroup -Delegation $dlg + $pubSubnet = New-AzVirtualNetworkSubnetConfig -Name $env.vNetSubnetName3 -AddressPrefix "110.0.3.0/24" -NetworkSecurityGroup $networkSecurityGroup -Delegation $dlg + + write-host "start to create VirtualNetwork env" + $testVN = New-AzVirtualNetwork -Name $env.vNetName -ResourceGroupName $env.resourceGroup -Location $env.location -AddressPrefix "110.0.0.0/16" -Subnet $kvSubnet,$priSubnet,$pubSubnet + $vNetResId = (Get-AzVirtualNetwork -Name $env.vNetName -ResourceGroupName $env.resourceGroup).Subnets[0].Id + $ruleSet = New-AzKeyVaultNetworkRuleSetObject -DefaultAction Allow -Bypass AzureServices -IpAddressRange "110.0.1.0/24" -VirtualNetworkResourceId $vNetResId + + write-host "start to create KeyVault env" + New-AzKeyVault -ResourceGroupName $env.resourceGroup -VaultName $env.keyVaultName -NetworkRuleSet $ruleSet -Location $env.location -Sku 'Premium' -EnablePurgeProtection + + write-host "start to create Databricks(have vNet) env" + New-AzDatabricksWorkspace -Name $env.workSpaceName1 -ResourceGroupName $env.resourceGroup -Location $env.location -VirtualNetworkId $testVN.Id -PrivateSubnetName $priSubnet.Name -PublicSubnetName $pubSubnet.Name -Sku Premium + + write-host "start to create Databricks env" + New-AzDatabricksWorkspace -Name $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Location $env.location -Sku premium + + # For any resources you created for test, you should add it to $env here. + $envFile = 'env.json' + if ($TestMode -eq 'live') { + $envFile = 'localEnv.json' + } + set-content -Path (Join-Path $PSScriptRoot $envFile) -Value (ConvertTo-Json $env) +} +function cleanupEnv() { + # Clean resources you create for testing + # Remove-AzResourceGroup -Name $env.resourceGroup +} diff --git a/tools/Mcp/src/services/resourcesService.ts b/tools/Mcp/src/services/resourcesService.ts index e69de29bb2d1..89c8e433e08c 100644 --- a/tools/Mcp/src/services/resourcesService.ts +++ b/tools/Mcp/src/services/resourcesService.ts @@ -0,0 +1,60 @@ +import { z, ZodRawShape } from "zod"; +import { resourceSchema } from "../types.js"; +import { CodegenServer } from "../CodegenServer.js"; + +export class ResourcesService { + private static _instance: ResourcesService; + private _server: CodegenServer | null = null; + private constructor() {} + + static getInstance(): ResourcesService { + if (!ResourcesService._instance) { + ResourcesService._instance = new ResourcesService(); + } + return ResourcesService._instance; + } + + setServer(server: CodegenServer): ResourcesService { + this._server = server; + return this; + } + + getResources(name: string, responseTemplate: string | undefined) { + let func; + switch (name) { + case "autorestReadmeTemplate": + func = this.autorestReadmeTemplate; + break; + default: + throw new Error(`Resource ${name} not found`); + } + return this.constructCallback(func, responseTemplate); + } + + constructCallback(fn: (arr: Args) => Promise, responseTemplate: string | undefined) { + return async (args: Args) => { + const content = await fn(args); + return { + contents: [ + { + uri: `resource://template`, + mimeType: "text/plain", + text: content + } + ] + }; + }; + } + + createResourceParametersFromSchema(schemas: any[]) { + // Resources typically don't have parameters in MCP, but keeping for consistency + const parameter: { [k: string]: any } = {}; + return parameter; + } + + autorestReadmeTemplate = async (args: Args): Promise => { + const template = this._server?.getResponseTemplate('autorest-readme-template'); + return template || "Template Not Found!"; + }; + +} \ No newline at end of file diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index d7d2b13b9dfa..3701c7777d2c 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -48,17 +48,8 @@ export class ToolsService { case "createTestsFromSpecs": func = this.createTestsFromSpecs; break; - case "listSpecModules": - func = this.toolListSpecModules; - break; - case "listProvidersForService": - func = this.toolListProvidersForService; - break; - case "listApiVersions": - func = this.toolListApiVersions; - break; - case "resolveAutorestInputs": - func = this.toolResolveAutorestInputs; + case "setupModuleStructure": + func = this.setupModuleStructure; break; default: throw new Error(`Tool ${name} not found`); @@ -186,31 +177,167 @@ export class ToolsService { return [exampleSpecsPath, testPath]; } - toolListSpecModules = async (_args: Args): Promise => { - const modules = await listSpecModules(); - return [JSON.stringify(modules)]; - } + setupModuleStructure = async (args: Args): Promise => { + try { + // List available services with dropdown + const modules = await listSpecModules(); + const serviceResponse = await this._server!.elicitInput({ + message: `Select an Azure service from the dropdown below:`, + requestedSchema: { + type: "object", + properties: { + service: { + type: "string", + description: "Select a service from the dropdown", + enum: modules + } + }, + required: ["service"] + } + }); - toolListProvidersForService = async (args: Args): Promise => { - const service = z.string().parse(Object.values(args)[0]); - const providers = await listProvidersForService(service); - return [service, JSON.stringify(providers)]; - } + const selectedService = serviceResponse.content?.service as string; + if (!selectedService) { + throw new Error("No service selected"); + } - toolListApiVersions = async (args: Args): Promise => { - const service = z.string().parse(Object.values(args)[0]); - const provider = z.string().parse(Object.values(args)[1]); - const res = await listApiVersions(service, provider); - return [service, provider, JSON.stringify(res.stable), JSON.stringify(res.preview)]; - } + // List providers for the selected service with dropdown + const providers = await listProvidersForService(selectedService); + if (providers.length === 0) { + throw new Error(`No providers found for service '${selectedService}'`); + } + + const providerResponse = await this._server!.elicitInput({ + message: `Select a provider for ${selectedService} from the dropdown below:`, + requestedSchema: { + type: "object", + properties: { + provider: { + type: "string", + description: "Select a provider from the dropdown", + enum: providers + } + }, + required: ["provider"] + } + }); + + const selectedProvider = providerResponse.content?.provider as string; + if (!selectedProvider) { + throw new Error("No provider selected"); + } + + // List API versions with dropdown combining version and stability + const apiVersions = await listApiVersions(selectedService, selectedProvider); + const allVersions = [ + ...apiVersions.stable.map(v => ({ version: v, stability: 'stable' as const })), + ...apiVersions.preview.map(v => ({ version: v, stability: 'preview' as const })) + ]; + + if (allVersions.length === 0) { + throw new Error(`No API versions found for ${selectedService}/${selectedProvider}`); + } + + const versionOptions = allVersions.map(v => `${v.version} (${v.stability})`); + + const versionResponse = await this._server!.elicitInput({ + message: `Select an API version for ${selectedService}/${selectedProvider} from the dropdown below:`, + requestedSchema: { + type: "object", + properties: { + versionWithStability: { + type: "string", + description: "Select an API version with stability level", + enum: versionOptions + } + }, + required: ["versionWithStability"] + } + }); + + const selectedVersionWithStability = versionResponse.content?.versionWithStability as string; + if (!selectedVersionWithStability) { + throw new Error("Version not selected"); + } + + const versionMatch = selectedVersionWithStability.match(/^(.+) \((stable|preview)\)$/); + if (!versionMatch) { + throw new Error("Invalid version format selected"); + } + + const selectedVersion = versionMatch[1]; + const selectedStability = versionMatch[2] as 'stable' | 'preview'; - toolResolveAutorestInputs = async (args: Args): Promise => { - const service = z.string().parse(Object.values(args)[0]); - const provider = z.string().parse(Object.values(args)[1]); - const stability = z.enum(['stable','preview']).parse(Object.values(args)[2]); - const version = z.string().parse(Object.values(args)[3]); - const swaggerPath = Object.values(args)[4] ? z.string().parse(Object.values(args)[4]) : undefined; - const resolved = await resolveAutorestInputs({ service, provider, stability, version, swaggerPath }); - return [resolved.serviceName, resolved.commitId, resolved.serviceSpecs, resolved.swaggerFileSpecs]; + // Resolve Readme placeholder values based on Responses + const resolved = await resolveAutorestInputs({ + service: selectedService, + provider: selectedProvider, + stability: selectedStability, + version: selectedVersion + }); + + const moduleNameResponse = await this._server!.elicitInput({ + message: `Configuration resolved:\n- Service: ${selectedService}\n- Provider: ${selectedProvider}\n- Version: ${selectedVersion} (${selectedStability})\n- Service Name: ${resolved.serviceName}\n- Commit ID: ${resolved.commitId}\n- Service Specs: ${resolved.serviceSpecs}\n- Swagger File: ${resolved.swaggerFileSpecs}`, + requestedSchema: { + type: "object", + properties: { + moduleName: { + type: "string", + description: "Enter the PowerShell module name (e.g., 'HybridConnectivity')" + } + }, + required: ["moduleName"] + } + }); + + const moduleName = moduleNameResponse.content?.moduleName as string; + if (!moduleName) { + throw new Error("No module name provided"); + } + + // Create folder structure and README.md + const mcpPath = process.cwd(); // Current working directory is tools/Mcp + const azurePowerShellRoot = path.resolve(mcpPath, '..', '..'); // Go up two levels to azure-powershell root + const srcPath = path.join(azurePowerShellRoot, 'src'); + const modulePath = path.join(srcPath, moduleName); + const autorestPath = path.join(modulePath, `${moduleName}.Autorest`); + const readmePath = path.join(autorestPath, 'README.md'); + + await utils.createDirectoryIfNotExists(modulePath); + await utils.createDirectoryIfNotExists(autorestPath); + + let readmeContent = this._server!.getResponseTemplate('autorest-readme-template'); + if (!readmeContent) { + throw new Error('README template not found in server responses'); + } + + // Replace placeholders + readmeContent = readmeContent + .replace('{commitId}', resolved.commitId) + .replace('{serviceSpecs}', resolved.serviceSpecs) + .replace(/\{serviceSpecs\}/g, resolved.serviceSpecs) + .replace('{swaggerFileSpecs}', resolved.swaggerFileSpecs) + .replace(/\{moduleName\}/g, moduleName); + + // Write README.md file + await utils.writeFileIfNotExists(readmePath, readmeContent); + + return [ + selectedService, + selectedProvider, + selectedVersion, + selectedStability, + resolved.serviceName, + resolved.commitId, + resolved.serviceSpecs, + resolved.swaggerFileSpecs, + moduleName, + autorestPath + ]; + + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + return [`Error during setup: ${errorMessage}`]; + } } } \ No newline at end of file diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index 4c14c7133138..4627b9ad0d10 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -364,6 +364,32 @@ export function unflattenJsonObject(keyValuePairs: Array<{ key: string; value: a return result; } +export async function createDirectoryIfNotExists(dirPath: string): Promise { + try { + if (!fs.existsSync(dirPath)) { + fs.mkdirSync(dirPath, { recursive: true }); + console.log(`Created directory: ${dirPath}`); + } + } catch (error) { + console.error(`Error creating directory ${dirPath}:`, error); + throw error; + } +} + +export async function writeFileIfNotExists(filePath: string, content: string): Promise { + try { + if (!fs.existsSync(filePath)) { + fs.writeFileSync(filePath, content, 'utf8'); + console.log(`Created file: ${filePath}`); + } else { + console.log(`File already exists: ${filePath}`); + } + } catch (error) { + console.error(`Error writing file ${filePath}:`, error); + throw error; + } +} + diff --git a/tools/Mcp/src/specs/autorest-readme-template.md b/tools/Mcp/src/specs/autorest-readme-template.md new file mode 100644 index 000000000000..d24e3d1fb91d --- /dev/null +++ b/tools/Mcp/src/specs/autorest-readme-template.md @@ -0,0 +1,63 @@ + +# Az.{moduleName} +This directory contains the PowerShell module for the {moduleName} service. + +--- +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 2.7.5 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.{moduleName}`, see [how-to.md](how-to.md). + + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +```yaml + +commit: {commitId} + +require: + - $(this-folder)/../../readme.azure.noprofile.md + - $(repo)/specification/{serviceSpecs}/readme.md + +try-require: + - $(repo)/specification/{serviceSpecs}/readme.powershell.md + +input-file: + - $(repo)/{swaggerFileSpecs} + +module-version: 0.1.0 + +title: {moduleName} +service-name: {moduleName} +subject-prefix: $(service-name) + +directive: + + - where: + variant: ^(Create|Update)(?!.*?(Expanded|JsonFilePath|JsonString)) + remove: true + + - where: + variant: ^CreateViaIdentity$|^CreateViaIdentityExpanded$ + remove: true + + - where: + verb: Set + remove: true +``` diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 255a7cfbf7bc..345afb5dc6d4 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -12,33 +12,21 @@ # Instructions -## Stage 1: Interactive spec selection and autorest resolution -- Ask the user for their desired **PowerShell module name** (e.g., "HybridConnectivity") -- Call the MCP tool "list-spec-modules" to fetch all available specification folders from azure-rest-api-specs/specification. -- From the full list, present 10 most relevant spec options to the user based on their PowerShell module name, or show a representative sample if no clear match. -- Ask the user to choose which specification they want to use from the presented options, or ask if they want to see more options. -- **Confirm the spec choice**: Once user selects a spec, ask them to confirm this is the correct specification for their needs (show the spec name clearly). -- Call the MCP tool "list-providers" with the chosen spec folder to retrieve available provider namespaces. -- Present the list of providers to the user: - - If multiple providers are returned, ask the user to pick one - - If only one provider exists, select it automatically but confirm with the user -- **Confirm the provider choice**: Ask the user to confirm this is the correct provider namespace. -- Call the MCP tool "list-api-versions" with the chosen spec folder and provider to get available versions, separated by Stable and Preview. -- Present the API version options to the user and ask them to choose: - 1. **Stability**: stable or preview - 2. **API version**: specific version from the available list -- **Confirm the API version choice**: Ask the user to confirm their stability and version selection. -- Call the MCP tool "resolve-autorest-inputs" with the chosen spec folder, provider, stability, and version to compute the 4 autorest inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. -- Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. +## Stage 1: Interactive specification selection and autorest resolution +- Call the MCP tool "setupModuleStructure" with no parameters +- This tool will interactively guide you through: + 1. Selecting the specification from available azure-rest-api-specs + 2. Choosing the provider namespace + 3. Selecting the API version (stable or preview) + 4. Getting the module name from the user + 5. Automatically creating the module structure and README.md file +- The tool will create the folder structure under the correct src directory and generate the README.md with proper autorest configuration +- Mark Stage 1 complete once the setupModuleStructure tool finishes successfully ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. -- Navigate to the `src` folder in the home "azure-powershell" directory. -- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) -- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. -- Create a new file `README.md`. (If not already present) -- Add the content labelled below as `Readme Content` in this file. -- Use the "generate-autorest" mcp tool to generate the module. +- Navigate to the newly created module directory (should be under `src//.Autorest`) +- Use the "generate-autorest" mcp tool to generate the module using the README.md that was created by setupModuleStructure - Stage 2 Complete. ## Stage 3: Updating Example Files @@ -50,7 +38,7 @@ - Once all example files are updated, mark stage 3 as complete. ## Stage 4: Updating Test Files -- Use the "test-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- Use the "create-test" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. - Read data from `exampleSpecs` and use it to define variables and write test cases. - Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. - Use those variables in the actual test case content. @@ -64,43 +52,3 @@ - Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. - This is a mandatory finalization step before pushing to GitHub. - Do not skip this regeneration even if the module was generated earlier. - -# Readme Content - -### AutoRest Configuration -> see https://aka.ms/autorest - -```yaml - -commit: - -require: - - $(this-folder)/../../readme.azure.noprofile.md - - $(repo)/specification//readme.md - -try-require: - - $(repo)/specification//readme.powershell.md - -input-file: - - $(repo)/specification/ - -module-version: 0.1.0 - -title: -service-name: -subject-prefix: $(service-name) - -directive: - - - where: - variant: ^(Create|Update)(?!.*?(Expanded|JsonFilePath|JsonString)) - remove: true - - - where: - variant: ^CreateViaIdentity$|^CreateViaIdentityExpanded$ - remove: true - - - where: - verb: Set - remove: true -``` diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index f2fd99e18987..ca1be795f75a 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -29,26 +29,6 @@ "type": "tool", "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " }, - { - "name": "list-spec-modules", - "type": "tool", - "text": "Available modules under azure-rest-api-specs/specification: {0}" - }, - { - "name": "list-providers", - "type": "tool", - "text": "Providers for service {0}: {1}" - }, - { - "name": "list-api-versions", - "type": "tool", - "text": "API versions for {0}/{1} — Stable: {2} | Preview: {3}" - }, - { - "name": "resolve-autorest-inputs", - "type": "tool", - "text": "Resolved inputs — serviceName: {0}, commitId: {1}, serviceSpecs: {2}, swaggerFileSpecs: {3}" - }, { "name": "create-greeting", "type": "prompt", @@ -58,5 +38,10 @@ "name": "partner-module-workflow", "type": "prompt", "text": "@file:prompts/partner-module-workflow.md" + }, + { + "name": "autorest-readme-template", + "type": "resource", + "text": "@file:autorest-readme-template.md" } ] \ No newline at end of file diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 6f38aeef45a6..17e2bf74e432 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -73,38 +73,10 @@ "callbackName": "createTestsFromSpecs" }, { - "name": "list-spec-modules", - "description": "List all top-level modules (service folders) under azure-rest-api-specs/specification.", + "name": "setupModuleStructure", + "description": "Setup Azure PowerShell module structure by selecting service, provider, and API version through interactive dropdowns", "parameters": [], - "callbackName": "listSpecModules" - }, - { - "name": "list-providers", - "description": "List provider namespaces for a given service under resource-manager.", - "parameters": [ - { "name": "service", "description": "Service folder name under specification (e.g., hybridconnectivity)", "type": "string" } - ], - "callbackName": "listProvidersForService" - }, - { - "name": "list-api-versions", - "description": "List available API versions for a given service and provider (stable/preview).", - "parameters": [ - { "name": "service", "description": "Service folder name under specification", "type": "string" }, - { "name": "provider", "description": "Provider namespace folder under the service (e.g., Microsoft.HybridConnectivity)", "type": "string" } - ], - "callbackName": "listApiVersions" - }, - { - "name": "resolve-autorest-inputs", - "description": "Resolve the four Autorest inputs (serviceName, commitId, serviceSpecs, swaggerFileSpecs) from service/provider/version.", - "parameters": [ - { "name": "service", "description": "Service folder name under specification", "type": "string" }, - { "name": "provider", "description": "Provider namespace under the service", "type": "string" }, - { "name": "stability", "description": "'stable' or 'preview'", "type": "string" }, - { "name": "version", "description": "API version (e.g., 2024-12-01)", "type": "string" } - ], - "callbackName": "resolveAutorestInputs" + "callbackName": "setupModuleStructure" } ], "prompts": [ diff --git a/tools/Mcp/src/types.ts b/tools/Mcp/src/types.ts index f578b44abc04..559fbcdf7385 100644 --- a/tools/Mcp/src/types.ts +++ b/tools/Mcp/src/types.ts @@ -25,6 +25,13 @@ export interface promptSchema { callbackName: string; } +export interface resourceSchema { + name: string; + description: string; + parameters?: any[]; + callbackName: string; +} + export interface responseSchema { name: string; type: string; From 8a16ffe01c96cd6ae25a21d0cabe78324507718d Mon Sep 17 00:00:00 2001 From: Yash Date: Sun, 21 Sep 2025 02:55:53 +1000 Subject: [PATCH 15/19] Basic Syntax Fixing --- tools/Mcp/src/services/toolsService.ts | 14 ++++---------- tools/Mcp/src/services/utils.ts | 20 -------------------- 2 files changed, 4 insertions(+), 30 deletions(-) diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index 3701c7777d2c..4c21b51ec1be 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -5,12 +5,6 @@ import path from 'path'; import { get, RequestOptions } from 'http'; import { toolParameterSchema } from '../types.js'; import { CodegenServer } from '../CodegenServer.js'; -import { - listSpecModules, - listProvidersForService, - listApiVersions, - resolveAutorestInputs -} from './utils.js'; export class ToolsService { private static _instance: ToolsService; @@ -180,7 +174,7 @@ export class ToolsService { setupModuleStructure = async (args: Args): Promise => { try { // List available services with dropdown - const modules = await listSpecModules(); + const modules = await utils.listSpecModules(); const serviceResponse = await this._server!.elicitInput({ message: `Select an Azure service from the dropdown below:`, requestedSchema: { @@ -202,7 +196,7 @@ export class ToolsService { } // List providers for the selected service with dropdown - const providers = await listProvidersForService(selectedService); + const providers = await utils.listProvidersForService(selectedService); if (providers.length === 0) { throw new Error(`No providers found for service '${selectedService}'`); } @@ -228,7 +222,7 @@ export class ToolsService { } // List API versions with dropdown combining version and stability - const apiVersions = await listApiVersions(selectedService, selectedProvider); + const apiVersions = await utils.listApiVersions(selectedService, selectedProvider); const allVersions = [ ...apiVersions.stable.map(v => ({ version: v, stability: 'stable' as const })), ...apiVersions.preview.map(v => ({ version: v, stability: 'preview' as const })) @@ -269,7 +263,7 @@ export class ToolsService { const selectedStability = versionMatch[2] as 'stable' | 'preview'; // Resolve Readme placeholder values based on Responses - const resolved = await resolveAutorestInputs({ + const resolved = await utils.resolveAutorestInputs({ service: selectedService, provider: selectedProvider, stability: selectedStability, diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index 4627b9ad0d10..e6e148727231 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -82,9 +82,6 @@ export async function getSwaggerContentFromUrl(swaggerUrl: string): Promise } } -/** - * GitHub helper: get latest commit SHA for azure-rest-api-specs main branch - */ export async function getSpecsHeadCommitSha(branch: string = 'main'): Promise { const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/branches/${branch}`; const res = await fetch(url); @@ -95,9 +92,6 @@ export async function getSpecsHeadCommitSha(branch: string = 'main'): Promise { const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/specification`; const res = await fetch(url); @@ -111,9 +105,6 @@ export async function listSpecModules(): Promise { .sort((a: string, b: string) => a.localeCompare(b)); } -/** - * Given a service (spec folder), list provider namespaces under resource-manager. - */ export async function listProvidersForService(service: string): Promise { const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/specification/${service}/resource-manager`; const res = await fetch(url); @@ -128,10 +119,6 @@ export async function listProvidersForService(service: string): Promise a.localeCompare(b)); } -/** - * For service + provider, list API version directories under stable/ and preview/. - * Returns map: { stable: string[], preview: string[] } - */ export async function listApiVersions(service: string, provider: string): Promise<{ stable: string[]; preview: string[] }> { const base = `specification/${service}/resource-manager/${provider}`; const folders = ['stable', 'preview'] as const; @@ -153,10 +140,6 @@ export async function listApiVersions(service: string, provider: string): Promis return result; } -/** - * For a given service/provider/version, find likely swagger files (.json) under that version path. - * Returns array of repo-relative file paths (starting with specification/...). - */ export async function listSwaggerFiles(service: string, provider: string, stability: 'stable'|'preview', version: string): Promise { const dir = `specification/${service}/resource-manager/${provider}/${stability}/${version}`; const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/${dir}`; @@ -173,9 +156,6 @@ export async function listSwaggerFiles(service: string, provider: string, stabil return ordered; } -/** - * Resolve the four Autorest inputs given service, provider, and version path. - */ export async function resolveAutorestInputs(params: { service: string; provider: string; From 6c796b428a1cd01c0161528abba69d678c169ff6 Mon Sep 17 00:00:00 2001 From: Yash Date: Sun, 21 Sep 2025 03:12:27 +1000 Subject: [PATCH 16/19] Updated return Spec --- tools/Mcp/src/services/toolsService.ts | 13 +------------ .../src/specs/prompts/partner-module-workflow.md | 2 +- tools/Mcp/src/specs/responses.json | 5 +++++ tools/Mcp/src/specs/specs.json | 2 +- 4 files changed, 8 insertions(+), 14 deletions(-) diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index 4c21b51ec1be..f84c43fc64a0 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -316,18 +316,7 @@ export class ToolsService { // Write README.md file await utils.writeFileIfNotExists(readmePath, readmeContent); - return [ - selectedService, - selectedProvider, - selectedVersion, - selectedStability, - resolved.serviceName, - resolved.commitId, - resolved.serviceSpecs, - resolved.swaggerFileSpecs, - moduleName, - autorestPath - ]; + return [moduleName]; } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error); diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 345afb5dc6d4..5ed2ae4e4e23 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -25,7 +25,7 @@ ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. -- Navigate to the newly created module directory (should be under `src//.Autorest`) +- Navigate to the created module directory, you can find this from the input given by the user using ellicitation for the input "moduleName" (should be under `src//.Autorest`) - Use the "generate-autorest" mcp tool to generate the module using the README.md that was created by setupModuleStructure - Stage 2 Complete. diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index ca1be795f75a..143ec5639adf 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -29,6 +29,11 @@ "type": "tool", "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " }, + { + "name": "setup-module-structure", + "type": "tool", + "text": "Created the module structure under the folder: {0}." + }, { "name": "create-greeting", "type": "prompt", diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 17e2bf74e432..44a8b484a32e 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -73,7 +73,7 @@ "callbackName": "createTestsFromSpecs" }, { - "name": "setupModuleStructure", + "name": "setup-module-structure", "description": "Setup Azure PowerShell module structure by selecting service, provider, and API version through interactive dropdowns", "parameters": [], "callbackName": "setupModuleStructure" From acebef04bde3d413b58d9da3c19f15328bc0a769 Mon Sep 17 00:00:00 2001 From: Yash Date: Sun, 21 Sep 2025 04:37:43 +1000 Subject: [PATCH 17/19] Enhanced Quality Generation --- tools/CreateMappings_rules.json | 40 ++++++ tools/Mcp/src/services/toolsService.ts | 8 +- tools/Mcp/src/services/utils.ts | 45 +++++++ .../specs/prompts/partner-module-workflow.md | 123 ++++++++++-------- tools/Mcp/src/specs/prompts/workflow-old.md | 54 ++++++++ tools/Mcp/src/specs/responses.json | 4 +- 6 files changed, 215 insertions(+), 59 deletions(-) create mode 100644 tools/Mcp/src/specs/prompts/workflow-old.md diff --git a/tools/CreateMappings_rules.json b/tools/CreateMappings_rules.json index 92e2a18850ed..504db9cd57f7 100644 --- a/tools/CreateMappings_rules.json +++ b/tools/CreateMappings_rules.json @@ -979,5 +979,45 @@ { "module": "DependencyMap", "alias": "DependencyMap" + }, + { + "alias": "YashMaps", + "module": "YashMaps" + }, + { + "alias": "YashMaps2", + "module": "YashMaps2" + }, + { + "alias": "TestArizeAI", + "module": "TestArizeAI" + }, + { + "alias": "TestData", + "module": "TestData" + }, + { + "alias": "YashMaps3", + "module": "YashMaps3" + }, + { + "alias": "YashMaps4", + "module": "YashMaps4" + }, + { + "alias": "YashMaps5", + "module": "YashMaps5" + }, + { + "alias": "YashMaps6", + "module": "YashMaps6" + }, + { + "alias": "TestPinecone", + "module": "TestPinecone" + }, + { + "module": "YashMaps7", + "alias": "YashMaps7" } ] diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index f84c43fc64a0..a702020ceea0 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -161,14 +161,16 @@ export class ToolsService { console.error(`Error eliciting input for example ${name}:`, error); } } - return [exampleSpecsPath, examplePath]; + const idealExamplePaths = utils.getIdealModuleExamplePaths(); + return [exampleSpecsPath, examplePath, idealExamplePaths]; } createTestsFromSpecs = async (args: Args): Promise => { const workingDirectory = z.string().parse(Object.values(args)[0]); const testPath = path.join(workingDirectory, "test"); const exampleSpecsPath = await utils.getExamplesFromSpecs(workingDirectory); - return [exampleSpecsPath, testPath]; + const idealTestPaths = utils.getIdealModuleTestPaths(); + return [exampleSpecsPath, testPath, idealTestPaths]; } setupModuleStructure = async (args: Args): Promise => { @@ -271,7 +273,7 @@ export class ToolsService { }); const moduleNameResponse = await this._server!.elicitInput({ - message: `Configuration resolved:\n- Service: ${selectedService}\n- Provider: ${selectedProvider}\n- Version: ${selectedVersion} (${selectedStability})\n- Service Name: ${resolved.serviceName}\n- Commit ID: ${resolved.commitId}\n- Service Specs: ${resolved.serviceSpecs}\n- Swagger File: ${resolved.swaggerFileSpecs}`, + message: `What would you like call the powershell module? \n\n Configuration resolved:\n- Service: ${selectedService}\n- Provider: ${selectedProvider}\n- Version: ${selectedVersion} (${selectedStability})\n- Service Name: ${resolved.serviceName}\n- Commit ID: ${resolved.commitId}\n- Service Specs: ${resolved.serviceSpecs}\n- Swagger File: ${resolved.swaggerFileSpecs}`, requestedSchema: { type: "object", properties: { diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index e6e148727231..2af6ddab731f 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -3,6 +3,7 @@ import yaml from "js-yaml"; import { yamlContent } from '../types.js'; import { execSync } from 'child_process'; import path from 'path'; +import { Dirent } from 'fs'; const GITHUB_API_BASE = 'https://api.github.com'; const REST_API_SPECS_OWNER = 'Azure'; @@ -370,6 +371,50 @@ export async function writeFileIfNotExists(filePath: string, content: string): P } } +export function getIdealModuleExamplePaths(): string { + const idealModulesRoot = path.join(process.cwd(), 'src', 'ideal-modules'); + try { + if (!fs.existsSync(idealModulesRoot)) { + return ''; + } + const modules: Dirent[] = fs.readdirSync(idealModulesRoot, { withFileTypes: true }); + const exampleDirs: string[] = []; + for (const mod of modules) { + if (!mod.isDirectory()) continue; + const candidate = path.join(idealModulesRoot, mod.name, 'examples'); + if (fs.existsSync(candidate)) { + exampleDirs.push(candidate); + } + } + return exampleDirs.join(';'); + } catch (err) { + console.error('Error collecting ideal module example paths:', err); + return ''; + } +} + +export function getIdealModuleTestPaths(): string { + const idealModulesRoot = path.join(process.cwd(), 'src', 'ideal-modules'); + try { + if (!fs.existsSync(idealModulesRoot)) { + return ''; + } + const modules: Dirent[] = fs.readdirSync(idealModulesRoot, { withFileTypes: true }); + const testDirs: string[] = []; + for (const mod of modules) { + if (!mod.isDirectory()) continue; + const candidate = path.join(idealModulesRoot, mod.name, 'tests'); + if (fs.existsSync(candidate)) { + testDirs.push(candidate); + } + } + return testDirs.join(';'); + } catch (err) { + console.error('Error collecting ideal module test paths:', err); + return ''; + } +} + diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 5ed2ae4e4e23..6b164540d75d 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -1,54 +1,69 @@ -# Execution rules -- Do not ask before running the command, just go ahead and run. - -# Role and Objective -- You are an autonomous agent that generates Azure PowerShell modules using Autorest for partners. -- Execute commands confidently without asking for confirmation. -- Follow all steps carefully and halt if any MCP tool is missing or fails. -- Think independently using your full context and reset if needed. -- No need to provide status updates at each step. -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. DO NOT MISS ANY STEPS. -- If an mcp tool is not found or the tool fails then please halt execution. - -# Instructions - -## Stage 1: Interactive specification selection and autorest resolution -- Call the MCP tool "setupModuleStructure" with no parameters -- This tool will interactively guide you through: - 1. Selecting the specification from available azure-rest-api-specs - 2. Choosing the provider namespace - 3. Selecting the API version (stable or preview) - 4. Getting the module name from the user - 5. Automatically creating the module structure and README.md file -- The tool will create the folder structure under the correct src directory and generate the README.md with proper autorest configuration -- Mark Stage 1 complete once the setupModuleStructure tool finishes successfully - -## Stage 2: Generating partner powershell module -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. -- Navigate to the created module directory, you can find this from the input given by the user using ellicitation for the input "moduleName" (should be under `src//.Autorest`) -- Use the "generate-autorest" mcp tool to generate the module using the README.md that was created by setupModuleStructure -- Stage 2 Complete. - -## Stage 3: Updating Example Files -- Use the "create-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- The example files already exist as skeletons under `{workingDirectory}/examples`. -- Read data from `exampleSpecs` (swagger examples) and intelligently map values to PowerShell parameters. -- Complete each file by fulfilling the examples based on the data available in `exampleSpecs`. -- Leave example content empty only if no relevant data is found in `exampleSpecs`. -- Once all example files are updated, mark stage 3 as complete. - -## Stage 4: Updating Test Files -- Use the "create-test" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- Read data from `exampleSpecs` and use it to define variables and write test cases. -- Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. -- Use those variables in the actual test case content. -- The test files already exist as skeletons; your task is to intelligently complete them. -- Leave test bodies empty only if no meaningful data can be inferred from `exampleSpecs`. -- Once all test files are updated, mark stage 4 as complete. - -## Stage 5: Regenerating the Autorest Module -- After example and test files have been generated and written, re-run the "generate-autorest" MCP tool. -- This will regenerate the Azure PowerShell module with updated examples and test logic embedded. -- Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. -- This is a mandatory finalization step before pushing to GitHub. -- Do not skip this regeneration even if the module was generated earlier. +# Partner Module Workflow (Simplified) + +Goal: Generate an Azure PowerShell module via Autorest, then populate examples and tests deterministically with minimal ambiguity. + +Core Principle: Derive the working directory once and reuse it. Do not guess or fabricate paths. + +## 1. Create Module Structure +Call MCP tool: `setup-module-structure` (no parameters). +It returns `{0}` = `ModuleName` (from user input inside the tool). +Derive `workingDirectory` strictly as: +`/src/{ModuleName}/{ModuleName}.Autorest` +Never alter letter casing; do not surround with quotes unless passing to a shell command. + +## 2. Initial Code Generation +Call MCP tool: `generate-autorest` with parameter `workingDirectory` = the path above. +Do not call Autorest directly; always use the MCP tool. +If generation fails, STOP. + +## 3. Populate Examples +Call MCP tool: `create-example` with the same `workingDirectory`. +Tool returns: + `{0}` = specs example source dir (downloaded JSON) + `{1}` = target examples dir in module + `{2}` = reference ideal examples (semicolon separated; may be empty) +Use reference examples (if any) for style: parameter ordering (required first), consistent variable placeholder patterns, and advanced options inclusion. +Leave an example file empty only if no relevant swagger example data exists. +Quality guidance (concise): + - Provide both minimal (required params only) and enriched (common optional params) variants if the skeleton expects multiple examples. + - Use stable placeholder naming: ``, ``, `` etc., unless a deterministic value is required by an existing test. + - Do not invent unsupported parameters; omit deprecated or preview-only params unless the swagger version explicitly requires them. + - Maintain consistent casing and ordering: required first, then alphabetized optional. + - Prefer realistic value shapes (GUID formats, URIs) over lorem ipsum. +MANDATORY: After the tool call, TREAT the returned response text itself (instructions referencing {0} {1} {2}) AS AN ACTION PLAN. Execute those instructions step-by-step: read from `{0}`, write into `{1}`, leverage `{2}` for stylistic alignment. Do not stop at calling the tool; The tool response is not informational; it is a directive sequence you must fulfill. + +## 4. Populate Tests +Call MCP tool: `create-test` with the same `workingDirectory`. +Tool returns: + `{0}` = specs example source dir + `{1}` = target test dir + `{2}` = reference ideal test dirs +Fill test stubs using variables defined in `utils.ps1` `setupEnv` function. +IMPORTANT: You can completely ignore the generated Test files as they only have the skeleton. Instead, you can group these test files into groups of 1 particular resource & then write 1 new file combining these 3-4 test file in the CRUD format. +Add: create -> get -> assert -> (optional cleanup). Add a negative test when feasible. +Leave a test empty only if genuinely no meaningful data can be inferred. +Quality guidance (concise): + - CRUD Pattern: Create (or Update) => Get => Assert essential fields => (List => Assert presence) => (Delete => Confirm absence) when resource lifecycle is supported. + - Assertions: Prefer specific property checks (Name, Id segment patterns, ProvisioningState) over broad object dumps. + - Negative Case: Intentionally supply one invalid parameter or missing required field; assert error category/message pattern. + - Reuse Variables: All names & IDs defined once in `setupEnv` to avoid duplication and enable future parameterization. + - Idempotency: Avoid destructive operations unless they clean up what this test created. +MANDATORY: After the tool call, EXECUTE the returned test instructions: interpret response text referencing `{0}`, `{1}`, `{2}`, open each stub under `{1}`, populate bodies using data from `{0}`, and mirror style from `{2}`. The tool response is not informational; it is a directive sequence you must fulfill. + +## 5. Regenerate Module +Call `generate-autorest` again with identical `workingDirectory` to ensure examples/tests are integrated. Do not modify the README.yaml block except via directives inserted earlier. + +## 6. Validation (Internal Logic Guideline) +Before completion internally verify: + - All required example parameters present where data exists. + - No unknown parameters introduced. + - Tests assert at least one key property per created resource. +If any check fails, refine the affected file(s) then proceed. + +## Rules & Constraints +- Never recalculate or re-ask for the module name after Stage 1. +- Never invent alternative directory paths. +- Do not skip steps 1–5. +- Halt immediately if an MCP tool is unavailable or errors. + +End of workflow. \ No newline at end of file diff --git a/tools/Mcp/src/specs/prompts/workflow-old.md b/tools/Mcp/src/specs/prompts/workflow-old.md new file mode 100644 index 000000000000..5ed2ae4e4e23 --- /dev/null +++ b/tools/Mcp/src/specs/prompts/workflow-old.md @@ -0,0 +1,54 @@ +# Execution rules +- Do not ask before running the command, just go ahead and run. + +# Role and Objective +- You are an autonomous agent that generates Azure PowerShell modules using Autorest for partners. +- Execute commands confidently without asking for confirmation. +- Follow all steps carefully and halt if any MCP tool is missing or fails. +- Think independently using your full context and reset if needed. +- No need to provide status updates at each step. +- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. DO NOT MISS ANY STEPS. +- If an mcp tool is not found or the tool fails then please halt execution. + +# Instructions + +## Stage 1: Interactive specification selection and autorest resolution +- Call the MCP tool "setupModuleStructure" with no parameters +- This tool will interactively guide you through: + 1. Selecting the specification from available azure-rest-api-specs + 2. Choosing the provider namespace + 3. Selecting the API version (stable or preview) + 4. Getting the module name from the user + 5. Automatically creating the module structure and README.md file +- The tool will create the folder structure under the correct src directory and generate the README.md with proper autorest configuration +- Mark Stage 1 complete once the setupModuleStructure tool finishes successfully + +## Stage 2: Generating partner powershell module +- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. +- Navigate to the created module directory, you can find this from the input given by the user using ellicitation for the input "moduleName" (should be under `src//.Autorest`) +- Use the "generate-autorest" mcp tool to generate the module using the README.md that was created by setupModuleStructure +- Stage 2 Complete. + +## Stage 3: Updating Example Files +- Use the "create-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- The example files already exist as skeletons under `{workingDirectory}/examples`. +- Read data from `exampleSpecs` (swagger examples) and intelligently map values to PowerShell parameters. +- Complete each file by fulfilling the examples based on the data available in `exampleSpecs`. +- Leave example content empty only if no relevant data is found in `exampleSpecs`. +- Once all example files are updated, mark stage 3 as complete. + +## Stage 4: Updating Test Files +- Use the "create-test" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- Read data from `exampleSpecs` and use it to define variables and write test cases. +- Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. +- Use those variables in the actual test case content. +- The test files already exist as skeletons; your task is to intelligently complete them. +- Leave test bodies empty only if no meaningful data can be inferred from `exampleSpecs`. +- Once all test files are updated, mark stage 4 as complete. + +## Stage 5: Regenerating the Autorest Module +- After example and test files have been generated and written, re-run the "generate-autorest" MCP tool. +- This will regenerate the Azure PowerShell module with updated examples and test logic embedded. +- Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. +- This is a mandatory finalization step before pushing to GitHub. +- Do not skip this regeneration even if the module was generated earlier. diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index 143ec5639adf..9373e3b0e394 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -22,12 +22,12 @@ { "name": "create-example", "type": "tool", - "text": "Read examples from specs under {0}. Fulfill examples under {1}. You are expert in Azure-PowerShell and Autorest.PowerShell. Leave example as empty if you don't find any matches. You know how to map data from {0} to {1}" + "text": "Read examples from specs under {0}. Fulfill examples under {1}. Also leverage high-quality reference examples located in directories: {2} (semicolon-separated). When generating, mirror parameter naming, structure, and advanced option usage patterns seen in those reference examples when applicable. Produce minimal yet complete runnable examples; omit unknown or deprecated params. Leave example empty only if no relevant mapping exists." }, { "name": "create-test", "type": "tool", - "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " + "text": "Read examples from specs under {0}. Implement test stubs under {1}. Reference high-quality existing tests from directories: {2} (semicolon-separated) to replicate assertion style, variable patterns, and setup/teardown conventions. Test stubs are named '.Test.ps1'. Populate 'setupEnv' in 'utils.ps1' with variables derived from {0} examples; reuse them across tests. For each CRUD operation: (1) Create/Update then Get and assert key properties, (2) List and validate presence, (3) Clean up if destructive. Add one negative test if feasible (invalid parameter) asserting specific error type/message. Leave a stub empty only if absolutely no relevant example data exists." }, { "name": "setup-module-structure", From 22a3a265877c00a55514439842d6f58bd6f6b785 Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 22 Sep 2025 21:16:45 +1000 Subject: [PATCH 18/19] Updated Workflow --- tools/Mcp/src/specs/prompts/partner-module-workflow.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 6b164540d75d..881377f5da7f 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -39,7 +39,7 @@ Tool returns: `{1}` = target test dir `{2}` = reference ideal test dirs Fill test stubs using variables defined in `utils.ps1` `setupEnv` function. -IMPORTANT: You can completely ignore the generated Test files as they only have the skeleton. Instead, you can group these test files into groups of 1 particular resource & then write 1 new file combining these 3-4 test file in the CRUD format. +IMPORTANT: You can completely ignore the generated Test files, do not fill these as they only have the skeleton. Instead, you can group these test files into groups of a particular resource & then write new file for each group combining these 3-4 test file in the CRUD format. Add: create -> get -> assert -> (optional cleanup). Add a negative test when feasible. Leave a test empty only if genuinely no meaningful data can be inferred. Quality guidance (concise): From 1d15166321cf0910f3ce16c49d745184fbde3630 Mon Sep 17 00:00:00 2001 From: Yash Date: Wed, 24 Sep 2025 13:50:04 +1000 Subject: [PATCH 19/19] Deleted mcpprompt.md --- tools/Mcp/test/vscode/mcpprompt.md | 106 ----------------------------- 1 file changed, 106 deletions(-) delete mode 100644 tools/Mcp/test/vscode/mcpprompt.md diff --git a/tools/Mcp/test/vscode/mcpprompt.md b/tools/Mcp/test/vscode/mcpprompt.md deleted file mode 100644 index 255a7cfbf7bc..000000000000 --- a/tools/Mcp/test/vscode/mcpprompt.md +++ /dev/null @@ -1,106 +0,0 @@ -# Execution rules -- Do not ask before running the command, just go ahead and run. - -# Role and Objective -- You are an autonomous agent that generates Azure PowerShell modules using Autorest for partners. -- Execute commands confidently without asking for confirmation. -- Follow all steps carefully and halt if any MCP tool is missing or fails. -- Think independently using your full context and reset if needed. -- No need to provide status updates at each step. -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. DO NOT MISS ANY STEPS. -- If an mcp tool is not found or the tool fails then please halt execution. - -# Instructions - -## Stage 1: Interactive spec selection and autorest resolution -- Ask the user for their desired **PowerShell module name** (e.g., "HybridConnectivity") -- Call the MCP tool "list-spec-modules" to fetch all available specification folders from azure-rest-api-specs/specification. -- From the full list, present 10 most relevant spec options to the user based on their PowerShell module name, or show a representative sample if no clear match. -- Ask the user to choose which specification they want to use from the presented options, or ask if they want to see more options. -- **Confirm the spec choice**: Once user selects a spec, ask them to confirm this is the correct specification for their needs (show the spec name clearly). -- Call the MCP tool "list-providers" with the chosen spec folder to retrieve available provider namespaces. -- Present the list of providers to the user: - - If multiple providers are returned, ask the user to pick one - - If only one provider exists, select it automatically but confirm with the user -- **Confirm the provider choice**: Ask the user to confirm this is the correct provider namespace. -- Call the MCP tool "list-api-versions" with the chosen spec folder and provider to get available versions, separated by Stable and Preview. -- Present the API version options to the user and ask them to choose: - 1. **Stability**: stable or preview - 2. **API version**: specific version from the available list -- **Confirm the API version choice**: Ask the user to confirm their stability and version selection. -- Call the MCP tool "resolve-autorest-inputs" with the chosen spec folder, provider, stability, and version to compute the 4 autorest inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. -- Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. - -## Stage 2: Generating partner powershell module -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. -- Navigate to the `src` folder in the home "azure-powershell" directory. -- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) -- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. -- Create a new file `README.md`. (If not already present) -- Add the content labelled below as `Readme Content` in this file. -- Use the "generate-autorest" mcp tool to generate the module. -- Stage 2 Complete. - -## Stage 3: Updating Example Files -- Use the "create-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- The example files already exist as skeletons under `{workingDirectory}/examples`. -- Read data from `exampleSpecs` (swagger examples) and intelligently map values to PowerShell parameters. -- Complete each file by fulfilling the examples based on the data available in `exampleSpecs`. -- Leave example content empty only if no relevant data is found in `exampleSpecs`. -- Once all example files are updated, mark stage 3 as complete. - -## Stage 4: Updating Test Files -- Use the "test-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- Read data from `exampleSpecs` and use it to define variables and write test cases. -- Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. -- Use those variables in the actual test case content. -- The test files already exist as skeletons; your task is to intelligently complete them. -- Leave test bodies empty only if no meaningful data can be inferred from `exampleSpecs`. -- Once all test files are updated, mark stage 4 as complete. - -## Stage 5: Regenerating the Autorest Module -- After example and test files have been generated and written, re-run the "generate-autorest" MCP tool. -- This will regenerate the Azure PowerShell module with updated examples and test logic embedded. -- Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. -- This is a mandatory finalization step before pushing to GitHub. -- Do not skip this regeneration even if the module was generated earlier. - -# Readme Content - -### AutoRest Configuration -> see https://aka.ms/autorest - -```yaml - -commit: - -require: - - $(this-folder)/../../readme.azure.noprofile.md - - $(repo)/specification//readme.md - -try-require: - - $(repo)/specification//readme.powershell.md - -input-file: - - $(repo)/specification/ - -module-version: 0.1.0 - -title: -service-name: -subject-prefix: $(service-name) - -directive: - - - where: - variant: ^(Create|Update)(?!.*?(Expanded|JsonFilePath|JsonString)) - remove: true - - - where: - variant: ^CreateViaIdentity$|^CreateViaIdentityExpanded$ - remove: true - - - where: - verb: Set - remove: true -```