From aa06e59f3342628e1a3b48f4f80eb027d9a7d1dc Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 16:12:20 +1000 Subject: [PATCH 01/24] Added Prompt support --- tools/Mcp/src/CodegenServer.ts | 60 +++++---- tools/Mcp/src/services/promptsService.ts | 119 ++++++++++++++++++ tools/Mcp/src/services/toolsService.ts | 23 +--- .../specs/prompts/partner-module-workflow.md | 98 +++++++++++++++ tools/Mcp/src/specs/responses.json | 10 ++ tools/Mcp/src/specs/specs.json | 17 +++ tools/Mcp/src/types.ts | 10 +- 7 files changed, 289 insertions(+), 48 deletions(-) create mode 100644 tools/Mcp/src/specs/prompts/partner-module-workflow.md diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index 233d7b383473..cfc7f29a142f 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -1,13 +1,14 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; import { z } from "zod"; -import { responseSchema, toolParameterSchema, toolSchema } from "./types.js"; +import { responseSchema, toolParameterSchema, toolSchema, promptSchema } from "./types.js"; import { ToolsService } from "./services/toolsService.js"; +import { PromptsService } from "./services/promptsService.js"; import { readFileSync } from "fs"; import path from "path"; import { fileURLToPath } from "url"; import { RequestOptions } from "https"; -import { ElicitRequest, ElicitResult } from "@modelcontextprotocol/sdk/types.js"; +import { /*ElicitRequest, ElicitResult*/ } from "@modelcontextprotocol/sdk/types.js"; // Elicit types not available in current sdk version const __dirname = path.dirname(fileURLToPath(import.meta.url)); const srcPath = path.resolve(__dirname, "..", "src"); @@ -55,13 +56,13 @@ export class CodegenServer { } // server elicitation request - public elicitInput( - params: ElicitRequest["params"], - options?: RequestOptions - ): Promise { - //TODO: add log - return this._mcp.server.elicitInput(params, options); - } + // Placeholder for future elicitInput when SDK exposes it + // public elicitInput( + // params: ElicitRequest["params"], + // options?: RequestOptions + // ): Promise { + // return this._mcp.server.elicitInput(params, options); + // } public static getInstance(): CodegenServer { if (!CodegenServer._instance) { @@ -91,28 +92,33 @@ export class CodegenServer { } initPrompts() { - this._mcp.prompt( - "create-greeting", - "Generate a customized greeting message", - { name: z.string().describe("Name of the person to greet"), style: z.string().describe("The style of greeting, such a formal, excited, or casual. If not specified casual will be used")}, - ({ name, style = "casual" }: { name: string, style?: string }) => { - return { - messages: [ - { - role: "user", - content: { - type: "text", - text: `Please generate a greeting in ${style} style to ${name}.`, - }, - }, - ], - }; - }); + const promptsService = PromptsService.getInstance().setServer(this); + const promptsSchemas = (specs.prompts || []) as promptSchema[]; + for (const schema of promptsSchemas) { + const parameter = promptsService.createPromptParametersFromSchema(schema.parameters); + const callback = promptsService.getPrompts(schema.callbackName, this._responses.get(schema.name)); + this._mcp.prompt( + schema.name, + schema.description, + parameter, + (args: any) => callback(args) + ); + } } initResponses() { (responses as responseSchema[])?.forEach((response: responseSchema) => { - this._responses.set(response.name, response.text); + let text = response.text; + if (text.startsWith("@file:")) { + const relPath = text.replace("@file:", ""); + const absPath = path.join(srcPath, "specs", relPath); + try { + text = readFileSync(absPath, "utf-8"); + } catch (e) { + console.error(`Failed to load prompt file ${absPath}:`, e); + } + } + this._responses.set(response.name, text); }); } } diff --git a/tools/Mcp/src/services/promptsService.ts b/tools/Mcp/src/services/promptsService.ts index e69de29bb2d1..bf638bf59fe3 100644 --- a/tools/Mcp/src/services/promptsService.ts +++ b/tools/Mcp/src/services/promptsService.ts @@ -0,0 +1,119 @@ +import { z, ZodRawShape } from "zod"; +import { promptSchema, toolParameterSchema } from "../types.js"; +import { CodegenServer } from "../CodegenServer.js"; + + +export class PromptsService { + private static _instance: PromptsService; + private _server: CodegenServer | null = null; + private constructor() {} + + static getInstance(): PromptsService { + if (!PromptsService._instance) { + PromptsService._instance = new PromptsService(); + } + return PromptsService._instance; + } + + setServer(server: CodegenServer): PromptsService { + this._server = server; + return this; + } + + getPrompts(name: string, responseTemplate: string | undefined) { + let func; + switch (name) { + case "createGreetingPrompt": + func = this.createGreetingPrompt; + break; + case "createPartnerModuleWorkflow": + func = this.createPartnerModuleWorkflow; + break; + default: + throw new Error(`Prompt ${name} not found`); + } + return this.constructCallback(func, responseTemplate); + } + + constructCallback(fn: (arr: Args) => Promise, responseTemplate: string | undefined) { + return async (args: Args) => { + const argsArray = await fn(args); + const response = this.getResponseString(argsArray, responseTemplate) ?? ""; + return { + messages: [ + { + role: "user" as const, + content: { + type: "text" as const, + text: response + } + } + ] + }; + }; + } + + getResponseString(args: string[], responseTemplate: string | undefined): string | undefined { + if (!args || args.length === 0) { + return responseTemplate; + } + let response = responseTemplate; + for (let i = 0; i < args.length; i++) { + response = response?.replaceAll(`{${i}}`, args[i]); + } + return response; + } + + createPromptParametersFromSchema(schemas: toolParameterSchema[]) { + const parameter: { [k: string]: any } = {}; + for (const schema of schemas) { + const base = schema.optional ? z.any().optional() : z.any(); + switch (schema.type) { + case "string": + parameter[schema.name] = (schema.optional ? z.string().optional() : z.string()).describe(schema.description); + break; + case "number": + parameter[schema.name] = (schema.optional ? z.number().optional() : z.number()).describe(schema.description); + break; + case "boolean": + parameter[schema.name] = (schema.optional ? z.boolean().optional() : z.boolean()).describe(schema.description); + break; + case "array": + parameter[schema.name] = (schema.optional ? z.array(z.string()).optional() : z.array(z.string())).describe(schema.description); + break; + default: + throw new Error(`Unsupported parameter type: ${schema.type}`); + } + } + return parameter; + } + + // prompt implementations + createGreetingPrompt = async (args: Args): Promise => { + const values = Object.values(args); + const name = values[0] as unknown as string; // required + const style = (values[1] as unknown as string) || "casual"; // optional fallback + return [name, style]; + }; + + + createPartnerModuleWorkflow = async (args: Args): Promise => { + const { } = args as any; + return []; + }; +} + + +// Some Testing Specs: + + // { + // "name": "partner-module-workflow", + // "description": "Full autonomous workflow instructions to generate a partner Azure PowerShell module via Autorest.", + // "parameters": [ + // {"name": "serviceName", "description": "Service name placeholder. This also often corresponds with the Name of the Powershell Module.", "type": "string", "optional": true}, + // {"name": "commitId", "description": "Commit id of the swagger from azure-rest-api-specs", "type": "string", "optional": true}, + // {"name": "serviceSpecs", "description": "Service specs path under specification. Path of a swagger upto the resource-manager.", "type": "string", "optional": true}, + // {"name": "swaggerFileSpecs", "description": "Swagger JSON relative path. Entire path of the swagger down to the openapi file.", "type": "string", "optional": true} + // ], + // "callbackName": "createPartnerModuleWorkflow" + // } \ No newline at end of file diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index a0f0760cb728..f7f924dd3d49 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -1,4 +1,4 @@ -import { CallToolResult, ElicitRequest, ElicitResult } from '@modelcontextprotocol/sdk/types.js'; +import { CallToolResult } from '@modelcontextprotocol/sdk/types.js'; import { z, ZodRawShape, ZodType, ZodTypeAny } from "zod"; import * as utils from "./utils.js"; import path from 'path'; @@ -138,25 +138,8 @@ export class ToolsService { const exampleSpecs = await utils.getExampleJsonContent(exampleSpecsPath); for (const {name, content} of exampleSpecs) { const example = await utils.flattenJsonObject(content['parameters']); - try { - const response = await this._server!.elicitInput({ - "message": `Please review example data for ${name}: ${example.map(({key: k, value:v}) => ` \n${k}: ${v}`)}`, - "requestedSchema": { - "type": "object", - "properties": { - "skipAll": { - "type": "boolean", - "description": "If true, skip the review of all examples and proceed to the next step." - } - }, - } - }); - if (response.content && response.content['skipAll'] === true) { - break; - } - } catch (error) { - console.error(`Error eliciting input for example ${name}:`, error); - } + // Interactive elicitInput flow is disabled pending SDK support. + // Once available, re-enable to allow user review of each example. } return [exampleSpecsPath, examplePath]; } diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md new file mode 100644 index 000000000000..0fd3aebd48bb --- /dev/null +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -0,0 +1,98 @@ +# Execution rules +- Do not ask before running the command, just go ahead and run. + +# Role and Objective +- You are an autonomous agent that generates Azure PowerShell modules using Autorest for partners. +- Execute commands confidently without asking for confirmation. +- Follow all steps carefully and halt if any MCP tool is missing or fails. +- Think independently using your full context and reset if needed. +- No need to provide status updates at each step. +- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. DO NOT MISS ANY STEPS. +- If an mcp tool is not found or the tool fails then please halt execution. + +# Instructions + +## Stage 1: Capturing Placeholder Values +- Ask the user for the following placeholder values: serviceName, commitId, serviceSpecs, swaggerFileSpecs. + - Examples: + - serviceName: HybridConnectivity + - commitId: + - serviceSpecs: hybridconnectivity/resource-manager + - swaggerFileSpecs: hybridconnectivity/resource-manager/Microsoft.HybridConnectivity/stable/2024-12-01/hybridconnectivity.json +- Do not replace or modify this prompt file. +- Store the values for use in later steps like generating the README and executing Autorest. +- Once values are stored, mark Stage 1 as complete. + +## Stage 2: Generating partner powershell module +- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. +- Navigate to the `src` folder in the home "azure-powershell" directory. +- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) +- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. +- Create a new file `README.md`. (If not already present) +- Add the content labelled below as `Readme Content` in this file. +- Use the "generate-autorest" mcp tool to generate the module. +- Stage 2 Complete. + +## Stage 3: Updating Example Files +- Use the "create-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- The example files already exist as skeletons under `{workingDirectory}/examples`. +- Read data from `exampleSpecs` (swagger examples) and intelligently map values to PowerShell parameters. +- Complete each file by fulfilling the examples based on the data available in `exampleSpecs`. +- Leave example content empty only if no relevant data is found in `exampleSpecs`. +- Once all example files are updated, mark stage 3 as complete. + +## Stage 4: Updating Test Files +- Use the "test-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- Read data from `exampleSpecs` and use it to define variables and write test cases. +- Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. +- Use those variables in the actual test case content. +- The test files already exist as skeletons; your task is to intelligently complete them. +- Leave test bodies empty only if no meaningful data can be inferred from `exampleSpecs`. +- Once all test files are updated, mark stage 4 as complete. + +## Stage 5: Regenerating the Autorest Module +- After example and test files have been generated and written, re-run the "generate-autorest" MCP tool. +- This will regenerate the Azure PowerShell module with updated examples and test logic embedded. +- Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. +- This is a mandatory finalization step before pushing to GitHub. +- Do not skip this regeneration even if the module was generated earlier. + +# Readme Content + +### AutoRest Configuration +> see https://aka.ms/autorest + +```yaml + +commit: + +require: + - $(this-folder)/../../readme.azure.noprofile.md + - $(repo)/specification//readme.md + +try-require: + - $(repo)/specification//readme.powershell.md + +input-file: + - $(repo)/ + +module-version: 0.1.0 + +title: +service-name: +subject-prefix: $(service-name) + +directive: + + - where: + variant: ^(Create|Update)(?!.*?(Expanded|JsonFilePath|JsonString)) + remove: true + + - where: + variant: ^CreateViaIdentity$|^CreateViaIdentityExpanded$ + remove: true + + - where: + verb: Set + remove: true +``` diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index 74d83011ba8d..4b1dddf55193 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -28,5 +28,15 @@ "name": "create-test", "type": "tool", "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " + }, + { + "name": "create-greeting", + "type": "prompt", + "text": "Please generate a greeting in {1} style to {0}." + }, + { + "name": "partner-module-workflow", + "type": "prompt", + "text": "@file:prompts/partner-module-workflow.md" } ] \ No newline at end of file diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 55a34c2ff2a2..98c85eba76d8 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -72,5 +72,22 @@ ], "callbackName": "createTestsFromSpecs" } + ], + "prompts": [ + { + "name": "create-greeting", + "description": "Generate a customized greeting message", + "parameters": [ + {"name": "name", "description": "Name of the person to greet", "type": "string"}, + {"name": "style", "description": "The style of greeting, such a formal, excited, or casual. If not specified casual will be used", "type": "string", "optional": true} + ], + "callbackName": "createGreetingPrompt" + }, + { + "name": "partner-module-workflow", + "description": "Full autonomous workflow instructions to generate a partner Azure PowerShell module via Autorest.", + "parameters": [], + "callbackName": "createPartnerModuleWorkflow" + } ] } \ No newline at end of file diff --git a/tools/Mcp/src/types.ts b/tools/Mcp/src/types.ts index 14ba8d9d9b06..593fe977211a 100644 --- a/tools/Mcp/src/types.ts +++ b/tools/Mcp/src/types.ts @@ -1,7 +1,8 @@ export interface toolParameterSchema { name: string; description: string; - type: string; + type: string; // string | number | boolean | array (of string) + optional?: boolean; // if true, parameter is optional } export interface toolSchema { @@ -11,6 +12,13 @@ export interface toolSchema { callbackName: string; } +export interface promptSchema { + name: string; + description: string; + parameters: toolParameterSchema[]; // reuse parameter schema + callbackName: string; // maps to PromptService internal function +} + export interface responseSchema { name: string; type: string; From 738f6efb86afbbf4da788eb5b8170196881ccadc Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:15:56 +1000 Subject: [PATCH 02/24] WhiteSpace --- src/Maps/Maps.Autorest/test/utils.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Maps/Maps.Autorest/test/utils.ps1 b/src/Maps/Maps.Autorest/test/utils.ps1 index 70e258271618..fa9014369210 100644 --- a/src/Maps/Maps.Autorest/test/utils.ps1 +++ b/src/Maps/Maps.Autorest/test/utils.ps1 @@ -67,4 +67,4 @@ function setupEnv() { function cleanupEnv() { # Clean resources you create for testing Remove-AzResourceGroup -Name $env.resourceGroup -} +} \ No newline at end of file From a775b623143e87d359bd02af26b165d7a63e7eb7 Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:16:27 +1000 Subject: [PATCH 03/24] whitespace fix --- src/Maps/Maps.Autorest/test/utils.ps1 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Maps/Maps.Autorest/test/utils.ps1 b/src/Maps/Maps.Autorest/test/utils.ps1 index fa9014369210..70e258271618 100644 --- a/src/Maps/Maps.Autorest/test/utils.ps1 +++ b/src/Maps/Maps.Autorest/test/utils.ps1 @@ -67,4 +67,4 @@ function setupEnv() { function cleanupEnv() { # Clean resources you create for testing Remove-AzResourceGroup -Name $env.resourceGroup -} \ No newline at end of file +} From d2c409898435c1ac752abed96f9784ab385722d8 Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:16:49 +1000 Subject: [PATCH 04/24] whitespace --- src/Maps/Maps.Autorest/test/utils.ps1 | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Maps/Maps.Autorest/test/utils.ps1 b/src/Maps/Maps.Autorest/test/utils.ps1 index 70e258271618..7cd449c834ae 100644 --- a/src/Maps/Maps.Autorest/test/utils.ps1 +++ b/src/Maps/Maps.Autorest/test/utils.ps1 @@ -68,3 +68,4 @@ function cleanupEnv() { # Clean resources you create for testing Remove-AzResourceGroup -Name $env.resourceGroup } + From 52488136135fc281d2179517cf6d09e9bf6bd593 Mon Sep 17 00:00:00 2001 From: Yash <55773468+notyashhh@users.noreply.github.com> Date: Mon, 18 Aug 2025 17:18:56 +1000 Subject: [PATCH 05/24] Update tools/Mcp/test/vscode/mcpprompt.md Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- tools/Mcp/test/vscode/mcpprompt.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/Mcp/test/vscode/mcpprompt.md b/tools/Mcp/test/vscode/mcpprompt.md index 0fd3aebd48bb..3e0a9d20dc06 100644 --- a/tools/Mcp/test/vscode/mcpprompt.md +++ b/tools/Mcp/test/vscode/mcpprompt.md @@ -74,7 +74,7 @@ try-require: - $(repo)/specification//readme.powershell.md input-file: - - $(repo)/ + - $(repo)/specification/ module-version: 0.1.0 From e0ba08332aa6d324490a63faa309543b4e0a2bbf Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:22:27 +1000 Subject: [PATCH 06/24] fix whitespace --- src/Maps/Maps.Autorest/test/utils.ps1 | 1 - 1 file changed, 1 deletion(-) diff --git a/src/Maps/Maps.Autorest/test/utils.ps1 b/src/Maps/Maps.Autorest/test/utils.ps1 index 7cd449c834ae..70e258271618 100644 --- a/src/Maps/Maps.Autorest/test/utils.ps1 +++ b/src/Maps/Maps.Autorest/test/utils.ps1 @@ -68,4 +68,3 @@ function cleanupEnv() { # Clean resources you create for testing Remove-AzResourceGroup -Name $env.resourceGroup } - From 32b4b22336f8cd00911b0667aa4cba96f6e6fcdd Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 18 Aug 2025 17:29:24 +1000 Subject: [PATCH 07/24] Updated to use PromptParameterSchema --- tools/Mcp/src/services/promptsService.ts | 4 ++-- tools/Mcp/src/types.ts | 14 ++++++++++---- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/tools/Mcp/src/services/promptsService.ts b/tools/Mcp/src/services/promptsService.ts index bf638bf59fe3..6b39b1778004 100644 --- a/tools/Mcp/src/services/promptsService.ts +++ b/tools/Mcp/src/services/promptsService.ts @@ -1,5 +1,5 @@ import { z, ZodRawShape } from "zod"; -import { promptSchema, toolParameterSchema } from "../types.js"; +import { promptSchema, promptParameterSchema } from "../types.js"; import { CodegenServer } from "../CodegenServer.js"; @@ -64,7 +64,7 @@ export class PromptsService { return response; } - createPromptParametersFromSchema(schemas: toolParameterSchema[]) { + createPromptParametersFromSchema(schemas: promptParameterSchema[]) { const parameter: { [k: string]: any } = {}; for (const schema of schemas) { const base = schema.optional ? z.any().optional() : z.any(); diff --git a/tools/Mcp/src/types.ts b/tools/Mcp/src/types.ts index 593fe977211a..f578b44abc04 100644 --- a/tools/Mcp/src/types.ts +++ b/tools/Mcp/src/types.ts @@ -1,8 +1,14 @@ export interface toolParameterSchema { name: string; description: string; - type: string; // string | number | boolean | array (of string) - optional?: boolean; // if true, parameter is optional + type: string; +} + +export interface promptParameterSchema { + name: string; + description: string; + type: string; + optional?: boolean; } export interface toolSchema { @@ -15,8 +21,8 @@ export interface toolSchema { export interface promptSchema { name: string; description: string; - parameters: toolParameterSchema[]; // reuse parameter schema - callbackName: string; // maps to PromptService internal function + parameters: promptParameterSchema[]; + callbackName: string; } export interface responseSchema { From eb2c590a4870f12a0ca4e3f642c07e9b261ae96c Mon Sep 17 00:00:00 2001 From: Yash Date: Thu, 21 Aug 2025 12:34:46 +1000 Subject: [PATCH 08/24] reverted elicitInput --- tools/Mcp/src/CodegenServer.ts | 13 ++++++------- tools/Mcp/src/services/toolsService.ts | 21 +++++++++++++++++++-- 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index cfc7f29a142f..43cb612cc824 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -56,13 +56,12 @@ export class CodegenServer { } // server elicitation request - // Placeholder for future elicitInput when SDK exposes it - // public elicitInput( - // params: ElicitRequest["params"], - // options?: RequestOptions - // ): Promise { - // return this._mcp.server.elicitInput(params, options); - // } + public elicitInput( + params: ElicitRequest["params"], + options?: RequestOptions + ): Promise { + return this._mcp.server.elicitInput(params, options); + } public static getInstance(): CodegenServer { if (!CodegenServer._instance) { diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index f7f924dd3d49..ab0c7e4f3822 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -138,8 +138,25 @@ export class ToolsService { const exampleSpecs = await utils.getExampleJsonContent(exampleSpecsPath); for (const {name, content} of exampleSpecs) { const example = await utils.flattenJsonObject(content['parameters']); - // Interactive elicitInput flow is disabled pending SDK support. - // Once available, re-enable to allow user review of each example. + try { + const response = await this._server!.elicitInput({ + "message": `Please review example data for ${name}: ${example.map(({key: k, value:v}) => ` \n${k}: ${v}`)}`, + "requestedSchema": { + "type": "object", + "properties": { + "skipAll": { + "type": "boolean", + "description": "If true, skip the review of all examples and proceed to the next step." + } + }, + } + }); + if (response.content && response.content['skipAll'] === true) { + break; + } + } catch (error) { + console.error(`Error eliciting input for example ${name}:`, error); + } } return [exampleSpecsPath, examplePath]; } From 65086d399e1fa433845f85744d4c3c2214772d3a Mon Sep 17 00:00:00 2001 From: Yash Date: Thu, 21 Aug 2025 17:39:15 +1000 Subject: [PATCH 09/24] reverted import --- tools/Mcp/src/CodegenServer.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index 43cb612cc824..e2a10375fc4d 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -8,7 +8,7 @@ import { readFileSync } from "fs"; import path from "path"; import { fileURLToPath } from "url"; import { RequestOptions } from "https"; -import { /*ElicitRequest, ElicitResult*/ } from "@modelcontextprotocol/sdk/types.js"; // Elicit types not available in current sdk version +import { ElicitRequest, ElicitResult } from "@modelcontextprotocol/sdk/types.js"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); const srcPath = path.resolve(__dirname, "..", "src"); From deef7564cfdf132ff89c0c532412b17b72ff4ad3 Mon Sep 17 00:00:00 2001 From: Yash Date: Fri, 22 Aug 2025 15:16:37 +1000 Subject: [PATCH 10/24] Updated mcpprotocol required version --- tools/Mcp/package-lock.json | 65 ++++++++++++++++++++++++++++++++++--- tools/Mcp/package.json | 2 +- 2 files changed, 61 insertions(+), 6 deletions(-) diff --git a/tools/Mcp/package-lock.json b/tools/Mcp/package-lock.json index 1f30ea26fcb6..087ee5c53505 100644 --- a/tools/Mcp/package-lock.json +++ b/tools/Mcp/package-lock.json @@ -9,7 +9,7 @@ "version": "1.0.0", "license": "ISC", "dependencies": { - "@modelcontextprotocol/sdk": "^1.9.0", + "@modelcontextprotocol/sdk": "^1.17.3", "js-yaml": "^4.1.0", "zod": "^3.24.2" }, @@ -23,14 +23,17 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.11.0.tgz", - "integrity": "sha512-k/1pb70eD638anoi0e8wUGAlbMJXyvdV4p62Ko+EZ7eBe1xMx8Uhak1R5DgfoofsK5IBBnRwsYGTaLZl+6/+RQ==", + "version": "1.17.3", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.17.3.tgz", + "integrity": "sha512-JPwUKWSsbzx+DLFznf/QZ32Qa+ptfbUlHhRLrBQBAFu9iI1iYvizM4p+zhhRDceSsPutXp4z+R/HPVphlIiclg==", + "license": "MIT", "dependencies": { + "ajv": "^6.12.6", "content-type": "^1.0.5", "cors": "^2.8.5", - "cross-spawn": "^7.0.3", + "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", + "eventsource-parser": "^3.0.0", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "pkce-challenge": "^5.0.0", @@ -69,6 +72,22 @@ "node": ">= 0.6" } }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -352,6 +371,18 @@ "express": "^4.11 || 5 || ^5.0.0-beta.1" } }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "license": "MIT" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "license": "MIT" + }, "node_modules/finalhandler": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", @@ -520,6 +551,12 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "license": "MIT" + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -661,6 +698,15 @@ "node": ">= 0.10" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/qs": { "version": "6.14.0", "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", @@ -919,6 +965,15 @@ "node": ">= 0.8" } }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", diff --git a/tools/Mcp/package.json b/tools/Mcp/package.json index fff41806944a..9f6af0f20ed7 100644 --- a/tools/Mcp/package.json +++ b/tools/Mcp/package.json @@ -19,7 +19,7 @@ "license": "ISC", "description": "", "dependencies": { - "@modelcontextprotocol/sdk": "^1.9.0", + "@modelcontextprotocol/sdk": "^1.17.3", "js-yaml": "^4.1.0", "zod": "^3.24.2" }, From 06421e4200895954d3c627204440dab2dfccc01b Mon Sep 17 00:00:00 2001 From: Yash Date: Fri, 22 Aug 2025 16:45:16 +1000 Subject: [PATCH 11/24] Fuzzy Search --- tools/Mcp/src/services/toolsService.ts | 46 +++++++ tools/Mcp/src/services/utils.ts | 123 ++++++++++++++++++ .../specs/prompts/partner-module-workflow.md | 21 ++- tools/Mcp/src/specs/responses.json | 22 +++- tools/Mcp/src/specs/specs.json | 35 +++++ 5 files changed, 235 insertions(+), 12 deletions(-) diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index ab0c7e4f3822..d7d2b13b9dfa 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -5,6 +5,12 @@ import path from 'path'; import { get, RequestOptions } from 'http'; import { toolParameterSchema } from '../types.js'; import { CodegenServer } from '../CodegenServer.js'; +import { + listSpecModules, + listProvidersForService, + listApiVersions, + resolveAutorestInputs +} from './utils.js'; export class ToolsService { private static _instance: ToolsService; @@ -42,6 +48,18 @@ export class ToolsService { case "createTestsFromSpecs": func = this.createTestsFromSpecs; break; + case "listSpecModules": + func = this.toolListSpecModules; + break; + case "listProvidersForService": + func = this.toolListProvidersForService; + break; + case "listApiVersions": + func = this.toolListApiVersions; + break; + case "resolveAutorestInputs": + func = this.toolResolveAutorestInputs; + break; default: throw new Error(`Tool ${name} not found`); } @@ -167,4 +185,32 @@ export class ToolsService { const exampleSpecsPath = await utils.getExamplesFromSpecs(workingDirectory); return [exampleSpecsPath, testPath]; } + + toolListSpecModules = async (_args: Args): Promise => { + const modules = await listSpecModules(); + return [JSON.stringify(modules)]; + } + + toolListProvidersForService = async (args: Args): Promise => { + const service = z.string().parse(Object.values(args)[0]); + const providers = await listProvidersForService(service); + return [service, JSON.stringify(providers)]; + } + + toolListApiVersions = async (args: Args): Promise => { + const service = z.string().parse(Object.values(args)[0]); + const provider = z.string().parse(Object.values(args)[1]); + const res = await listApiVersions(service, provider); + return [service, provider, JSON.stringify(res.stable), JSON.stringify(res.preview)]; + } + + toolResolveAutorestInputs = async (args: Args): Promise => { + const service = z.string().parse(Object.values(args)[0]); + const provider = z.string().parse(Object.values(args)[1]); + const stability = z.enum(['stable','preview']).parse(Object.values(args)[2]); + const version = z.string().parse(Object.values(args)[3]); + const swaggerPath = Object.values(args)[4] ? z.string().parse(Object.values(args)[4]) : undefined; + const resolved = await resolveAutorestInputs({ service, provider, stability, version, swaggerPath }); + return [resolved.serviceName, resolved.commitId, resolved.serviceSpecs, resolved.swaggerFileSpecs]; + } } \ No newline at end of file diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index 154252fdf6c0..4c14c7133138 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -4,6 +4,10 @@ import { yamlContent } from '../types.js'; import { execSync } from 'child_process'; import path from 'path'; +const GITHUB_API_BASE = 'https://api.github.com'; +const REST_API_SPECS_OWNER = 'Azure'; +const REST_API_SPECS_REPO = 'azure-rest-api-specs'; + const _pwshCD = (path: string): string => { return `pwsh -Command "$path = resolve-path ${path} | Set-Location"` } const _autorestReset = "autorest --reset" const _autorest = "autorest" @@ -78,6 +82,125 @@ export async function getSwaggerContentFromUrl(swaggerUrl: string): Promise } } +/** + * GitHub helper: get latest commit SHA for azure-rest-api-specs main branch + */ +export async function getSpecsHeadCommitSha(branch: string = 'main'): Promise { + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/branches/${branch}`; + const res = await fetch(url); + if (!res.ok) { + throw new Error(`Failed to fetch branch '${branch}' info: ${res.status}`); + } + const data = await res.json(); + return data?.commit?.sha as string; +} + +/** + * List top-level service directories under specification/ + */ +export async function listSpecModules(): Promise { + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/specification`; + const res = await fetch(url); + if (!res.ok) { + throw new Error(`Failed to list specification directory: ${res.status}`); + } + const list = await res.json(); + return (Array.isArray(list) ? list : []) + .filter((e: any) => e.type === 'dir') + .map((e: any) => e.name) + .sort((a: string, b: string) => a.localeCompare(b)); +} + +/** + * Given a service (spec folder), list provider namespaces under resource-manager. + */ +export async function listProvidersForService(service: string): Promise { + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/specification/${service}/resource-manager`; + const res = await fetch(url); + if (!res.ok) { + // Sometimes service has alternate structure or doesn't exist + throw new Error(`Failed to list providers for service '${service}': ${res.status}`); + } + const list = await res.json(); + return (Array.isArray(list) ? list : []) + .filter((e: any) => e.type === 'dir') + .map((e: any) => e.name) + .sort((a: string, b: string) => a.localeCompare(b)); +} + +/** + * For service + provider, list API version directories under stable/ and preview/. + * Returns map: { stable: string[], preview: string[] } + */ +export async function listApiVersions(service: string, provider: string): Promise<{ stable: string[]; preview: string[] }> { + const base = `specification/${service}/resource-manager/${provider}`; + const folders = ['stable', 'preview'] as const; + const result: { stable: string[]; preview: string[] } = { stable: [], preview: [] }; + for (const f of folders) { + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/${base}/${f}`; + const res = await fetch(url); + if (!res.ok) { + // ignore missing + continue; + } + const list = await res.json(); + const versions = (Array.isArray(list) ? list : []) + .filter((e: any) => e.type === 'dir') + .map((e: any) => e.name) + .sort((a: string, b: string) => a.localeCompare(b, undefined, { numeric: true })); + result[f] = versions; + } + return result; +} + +/** + * For a given service/provider/version, find likely swagger files (.json) under that version path. + * Returns array of repo-relative file paths (starting with specification/...). + */ +export async function listSwaggerFiles(service: string, provider: string, stability: 'stable'|'preview', version: string): Promise { + const dir = `specification/${service}/resource-manager/${provider}/${stability}/${version}`; + const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/${dir}`; + const res = await fetch(url); + if (!res.ok) { + throw new Error(`Failed to list files for ${dir}: ${res.status}`); + } + const list = await res.json(); + const files: any[] = Array.isArray(list) ? list : []; + // Find JSON files; prefer names ending with provider or service + const jsons = files.filter(f => f.type === 'file' && f.name.endsWith('.json')); + const preferred = jsons.filter(f => new RegExp(`${provider.split('.').pop()}|${service}`, 'i').test(f.name)); + const ordered = (preferred.length ? preferred : jsons).map(f => f.path); + return ordered; +} + +/** + * Resolve the four Autorest inputs given service, provider, and version path. + */ +export async function resolveAutorestInputs(params: { + service: string; + provider: string; + stability: 'stable'|'preview'; + version: string; + swaggerPath?: string; // optional repo-relative path override +}): Promise<{ serviceName: string; commitId: string; serviceSpecs: string; swaggerFileSpecs: string }> { + const commitId = await getSpecsHeadCommitSha('main'); + const serviceSpecs = `${params.service}/resource-manager`; + let swaggerFileSpecs = params.swaggerPath ?? ''; + if (!swaggerFileSpecs) { + const candidates = await listSwaggerFiles(params.service, params.provider, params.stability, params.version); + if (candidates.length === 0) { + throw new Error(`No swagger files found for ${params.service}/${params.provider}/${params.stability}/${params.version}`); + } + swaggerFileSpecs = candidates[0]; + } + return { + serviceName: params.provider.replace(/^Microsoft\./, ''), + commitId, + serviceSpecs, + swaggerFileSpecs + }; +} + export async function findAllPolyMorphism(workingDirectory: string): Promise>> { const polymorphism = new Map>(); const moduleReadmePath = path.join(workingDirectory, "README.md"); diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 0fd3aebd48bb..4ecca53e9e7c 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -12,16 +12,15 @@ # Instructions -## Stage 1: Capturing Placeholder Values -- Ask the user for the following placeholder values: serviceName, commitId, serviceSpecs, swaggerFileSpecs. - - Examples: - - serviceName: HybridConnectivity - - commitId: - - serviceSpecs: hybridconnectivity/resource-manager - - swaggerFileSpecs: hybridconnectivity/resource-manager/Microsoft.HybridConnectivity/stable/2024-12-01/hybridconnectivity.json -- Do not replace or modify this prompt file. -- Store the values for use in later steps like generating the README and executing Autorest. -- Once values are stored, mark Stage 1 as complete. +## Stage 1: Fuzzy selection and autorest inputs (reduced user input) +- Ask the user for only the approximate Azure service/module name (e.g., "hybrid connectivity"). +- Call the MCP tool "list-spec-modules" to fetch all service folders from azure-rest-api-specs/specification. +- Fuzzily match the user's input to the closest service name. Show top 3 matches and ask the user to confirm the service folder to use. +- Call the MCP tool "list-providers" with the chosen service to retrieve provider namespaces. If multiple providers are returned, ask the user to pick one; if only one, select it automatically. +- Ask the user what they want to call the PowerShell module title/service-name (e.g., HybridConnectivity). This is the display/module name, not the spec folder name. +- Call the MCP tool "list-api-versions" with service and provider to get available versions, separated by Stable and Preview. Ask the user to choose stability (stable/preview) and a specific API version. +- Call the MCP tool "resolve-autorest-inputs" with service, provider, stability, and version to compute the 4 inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. +- Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. @@ -74,7 +73,7 @@ try-require: - $(repo)/specification//readme.powershell.md input-file: - - $(repo)/ + - $(repo)/specification/ module-version: 0.1.0 diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index 4b1dddf55193..1382bb1dcd44 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -29,6 +29,26 @@ "type": "tool", "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " }, + { + "name": "list-spec-modules", + "type": "tool", + "text": "Available modules under azure-rest-api-specs/specification: {0}" + }, + { + "name": "list-providers", + "type": "tool", + "text": "Providers for service {0}: {1}" + }, + { + "name": "list-api-versions", + "type": "tool", + "text": "API versions for {0}/{1} — Stable: {2} | Preview: {3}" + }, + { + "name": "resolve-autorest-inputs", + "type": "tool", + "text": "Resolved inputs — serviceName: {0}, commitId: {1}, serviceSpecs: {2}, swaggerFileSpecs: {3}" + }, { "name": "create-greeting", "type": "prompt", @@ -37,6 +57,6 @@ { "name": "partner-module-workflow", "type": "prompt", - "text": "@file:prompts/partner-module-workflow.md" + "text": "@file:prompts/partner-module-workflow.md" } ] \ No newline at end of file diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 98c85eba76d8..947baf2349e5 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -72,6 +72,41 @@ ], "callbackName": "createTestsFromSpecs" } + , + { + "name": "list-spec-modules", + "description": "List all top-level modules (service folders) under azure-rest-api-specs/specification.", + "parameters": [], + "callbackName": "listSpecModules" + }, + { + "name": "list-providers", + "description": "List provider namespaces for a given service under resource-manager.", + "parameters": [ + { "name": "service", "description": "Service folder name under specification (e.g., hybridconnectivity)", "type": "string" } + ], + "callbackName": "listProvidersForService" + }, + { + "name": "list-api-versions", + "description": "List available API versions for a given service and provider (stable/preview).", + "parameters": [ + { "name": "service", "description": "Service folder name under specification", "type": "string" }, + { "name": "provider", "description": "Provider namespace folder under the service (e.g., Microsoft.HybridConnectivity)", "type": "string" } + ], + "callbackName": "listApiVersions" + }, + { + "name": "resolve-autorest-inputs", + "description": "Resolve the four Autorest inputs (serviceName, commitId, serviceSpecs, swaggerFileSpecs) from service/provider/version.", + "parameters": [ + { "name": "service", "description": "Service folder name under specification", "type": "string" }, + { "name": "provider", "description": "Provider namespace under the service", "type": "string" }, + { "name": "stability", "description": "'stable' or 'preview'", "type": "string" }, + { "name": "version", "description": "API version (e.g., 2024-12-01)", "type": "string" } + ], + "callbackName": "resolveAutorestInputs" + } ], "prompts": [ { From 022babc3812cffa249e22583c4c23f74f5746efd Mon Sep 17 00:00:00 2001 From: Yash Date: Tue, 26 Aug 2025 12:12:05 +1000 Subject: [PATCH 12/24] Updated the fuzzy search flow --- .../specs/prompts/partner-module-workflow.md | 35 ++++++++++++------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 4ecca53e9e7c..255a7cfbf7bc 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -12,24 +12,33 @@ # Instructions -## Stage 1: Fuzzy selection and autorest inputs (reduced user input) -- Ask the user for only the approximate Azure service/module name (e.g., "hybrid connectivity"). -- Call the MCP tool "list-spec-modules" to fetch all service folders from azure-rest-api-specs/specification. -- Fuzzily match the user's input to the closest service name. Show top 3 matches and ask the user to confirm the service folder to use. -- Call the MCP tool "list-providers" with the chosen service to retrieve provider namespaces. If multiple providers are returned, ask the user to pick one; if only one, select it automatically. -- Ask the user what they want to call the PowerShell module title/service-name (e.g., HybridConnectivity). This is the display/module name, not the spec folder name. -- Call the MCP tool "list-api-versions" with service and provider to get available versions, separated by Stable and Preview. Ask the user to choose stability (stable/preview) and a specific API version. -- Call the MCP tool "resolve-autorest-inputs" with service, provider, stability, and version to compute the 4 inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. +## Stage 1: Interactive spec selection and autorest resolution +- Ask the user for their desired **PowerShell module name** (e.g., "HybridConnectivity") +- Call the MCP tool "list-spec-modules" to fetch all available specification folders from azure-rest-api-specs/specification. +- From the full list, present 10 most relevant spec options to the user based on their PowerShell module name, or show a representative sample if no clear match. +- Ask the user to choose which specification they want to use from the presented options, or ask if they want to see more options. +- **Confirm the spec choice**: Once user selects a spec, ask them to confirm this is the correct specification for their needs (show the spec name clearly). +- Call the MCP tool "list-providers" with the chosen spec folder to retrieve available provider namespaces. +- Present the list of providers to the user: + - If multiple providers are returned, ask the user to pick one + - If only one provider exists, select it automatically but confirm with the user +- **Confirm the provider choice**: Ask the user to confirm this is the correct provider namespace. +- Call the MCP tool "list-api-versions" with the chosen spec folder and provider to get available versions, separated by Stable and Preview. +- Present the API version options to the user and ask them to choose: + 1. **Stability**: stable or preview + 2. **API version**: specific version from the available list +- **Confirm the API version choice**: Ask the user to confirm their stability and version selection. +- Call the MCP tool "resolve-autorest-inputs" with the chosen spec folder, provider, stability, and version to compute the 4 autorest inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. - Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. - Navigate to the `src` folder in the home "azure-powershell" directory. -- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) -- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. +- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) +- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. - Create a new file `README.md`. (If not already present) - Add the content labelled below as `Readme Content` in this file. -- Use the "generate-autorest" mcp tool to generate the module. +- Use the "generate-autorest" mcp tool to generate the module. - Stage 2 Complete. ## Stage 3: Updating Example Files @@ -77,8 +86,8 @@ input-file: module-version: 0.1.0 -title: -service-name: +title: +service-name: subject-prefix: $(service-name) directive: From 893848a8aa4278a17c3a3cce78148fd0300182d0 Mon Sep 17 00:00:00 2001 From: Yash Date: Tue, 26 Aug 2025 12:13:51 +1000 Subject: [PATCH 13/24] updated old method as well --- tools/Mcp/test/vscode/mcpprompt.md | 38 ++++++++++++++++++------------ 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/tools/Mcp/test/vscode/mcpprompt.md b/tools/Mcp/test/vscode/mcpprompt.md index 3e0a9d20dc06..255a7cfbf7bc 100644 --- a/tools/Mcp/test/vscode/mcpprompt.md +++ b/tools/Mcp/test/vscode/mcpprompt.md @@ -12,25 +12,33 @@ # Instructions -## Stage 1: Capturing Placeholder Values -- Ask the user for the following placeholder values: serviceName, commitId, serviceSpecs, swaggerFileSpecs. - - Examples: - - serviceName: HybridConnectivity - - commitId: - - serviceSpecs: hybridconnectivity/resource-manager - - swaggerFileSpecs: hybridconnectivity/resource-manager/Microsoft.HybridConnectivity/stable/2024-12-01/hybridconnectivity.json -- Do not replace or modify this prompt file. -- Store the values for use in later steps like generating the README and executing Autorest. -- Once values are stored, mark Stage 1 as complete. +## Stage 1: Interactive spec selection and autorest resolution +- Ask the user for their desired **PowerShell module name** (e.g., "HybridConnectivity") +- Call the MCP tool "list-spec-modules" to fetch all available specification folders from azure-rest-api-specs/specification. +- From the full list, present 10 most relevant spec options to the user based on their PowerShell module name, or show a representative sample if no clear match. +- Ask the user to choose which specification they want to use from the presented options, or ask if they want to see more options. +- **Confirm the spec choice**: Once user selects a spec, ask them to confirm this is the correct specification for their needs (show the spec name clearly). +- Call the MCP tool "list-providers" with the chosen spec folder to retrieve available provider namespaces. +- Present the list of providers to the user: + - If multiple providers are returned, ask the user to pick one + - If only one provider exists, select it automatically but confirm with the user +- **Confirm the provider choice**: Ask the user to confirm this is the correct provider namespace. +- Call the MCP tool "list-api-versions" with the chosen spec folder and provider to get available versions, separated by Stable and Preview. +- Present the API version options to the user and ask them to choose: + 1. **Stability**: stable or preview + 2. **API version**: specific version from the available list +- **Confirm the API version choice**: Ask the user to confirm their stability and version selection. +- Call the MCP tool "resolve-autorest-inputs" with the chosen spec folder, provider, stability, and version to compute the 4 autorest inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. +- Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. - Navigate to the `src` folder in the home "azure-powershell" directory. -- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) -- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. +- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) +- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. - Create a new file `README.md`. (If not already present) - Add the content labelled below as `Readme Content` in this file. -- Use the "generate-autorest" mcp tool to generate the module. +- Use the "generate-autorest" mcp tool to generate the module. - Stage 2 Complete. ## Stage 3: Updating Example Files @@ -78,8 +86,8 @@ input-file: module-version: 0.1.0 -title: -service-name: +title: +service-name: subject-prefix: $(service-name) directive: From e5227f289f1bc095850940d6145a5a70e9b6717a Mon Sep 17 00:00:00 2001 From: Yash Date: Sun, 21 Sep 2025 02:45:28 +1000 Subject: [PATCH 14/24] Ellicitation support for Stage 1 Inputs, Added ResourcesService --- tools/Mcp/src/CodegenServer.ts | 24 +- .../Get-AzDatabricksAccessConnector.md | 177 ++++ ...icksOutboundNetworkDependenciesEndpoint.md | 126 +++ .../examples/Get-AzDatabricksVNetPeering.md | 189 ++++ .../examples/Get-AzDatabricksWorkspace.md | 181 ++++ .../New-AzDatabricksAccessConnector.md | 239 +++++ .../examples/New-AzDatabricksVNetPeering.md | 317 ++++++ .../examples/New-AzDatabricksWorkspace.md | 915 ++++++++++++++++++ ...cksWorkspaceProviderAuthorizationObject.md | 84 ++ .../Remove-AzDatabricksAccessConnector.md | 217 +++++ .../Remove-AzDatabricksVNetPeering.md | 232 +++++ .../examples/Remove-AzDatabricksWorkspace.md | 233 +++++ .../Update-AzDatabricksAccessConnector.md | 264 +++++ .../Update-AzDatabricksVNetPeering.md | 362 +++++++ .../examples/Update-AzDatabricksWorkspace.md | 799 +++++++++++++++ .../src/ideal-modules/Databricks/metadata.md | 57 ++ .../AzDatabricksAccessConnector.Tests.ps1 | 65 ++ .../tests/AzDatabricksVNetPeering.Tests.ps1 | 58 ++ .../tests/AzDatabricksWorkspace.Tests.ps1 | 86 ++ .../ideal-modules/Databricks/tests/utils.ps1 | 114 +++ tools/Mcp/src/services/resourcesService.ts | 60 ++ tools/Mcp/src/services/toolsService.ts | 195 +++- tools/Mcp/src/services/utils.ts | 26 + .../Mcp/src/specs/autorest-readme-template.md | 63 ++ .../specs/prompts/partner-module-workflow.md | 78 +- tools/Mcp/src/specs/responses.json | 25 +- tools/Mcp/src/specs/specs.json | 34 +- tools/Mcp/src/types.ts | 7 + 28 files changed, 5076 insertions(+), 151 deletions(-) create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/metadata.md create mode 100644 tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 create mode 100644 tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 create mode 100644 tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 create mode 100644 tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 create mode 100644 tools/Mcp/src/specs/autorest-readme-template.md diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index e2a10375fc4d..b59c99ccd3d5 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -1,9 +1,10 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; import { z } from "zod"; -import { responseSchema, toolParameterSchema, toolSchema, promptSchema } from "./types.js"; +import { responseSchema, toolParameterSchema, toolSchema, promptSchema, resourceSchema } from "./types.js"; import { ToolsService } from "./services/toolsService.js"; import { PromptsService } from "./services/promptsService.js"; +import { ResourcesService } from "./services/resourcesService.js"; import { readFileSync } from "fs"; import path from "path"; import { fileURLToPath } from "url"; @@ -37,6 +38,7 @@ export class CodegenServer { this.initResponses(); this.initTools(); this.initPrompts(); + this.initResources(); } // dummy method for sending sampling request @@ -74,6 +76,9 @@ export class CodegenServer { await this._mcp.connect(transport); } + public getResponseTemplate(name: string): string | undefined { + return this._responses.get(name); + } initTools() { const toolsService = ToolsService.getInstance().setServer(this); @@ -105,6 +110,21 @@ export class CodegenServer { } } + initResources() { + const resourcesService = ResourcesService.getInstance().setServer(this); + const resourcesSchemas = (specs.resources || []) as resourceSchema[]; + for (const schema of resourcesSchemas) { + const parameter = resourcesService.createResourceParametersFromSchema(schema.parameters || []); + const callback = resourcesService.getResources(schema.callbackName, this._responses.get(schema.name)); + this._mcp.resource( + schema.name, + schema.description, + parameter, + (args: any) => callback(args) + ); + } + } + initResponses() { (responses as responseSchema[])?.forEach((response: responseSchema) => { let text = response.text; @@ -120,4 +140,6 @@ export class CodegenServer { this._responses.set(response.name, text); }); } + + } diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md new file mode 100644 index 000000000000..10b2c575a472 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md @@ -0,0 +1,177 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/get-azdatabricksaccessconnector +schema: 2.0.0 +--- + +# Get-AzDatabricksAccessConnector + +## SYNOPSIS +Gets an Azure Databricks Access Connector. + +## SYNTAX + +### List1 (Default) +``` +Get-AzDatabricksAccessConnector [-SubscriptionId ] [-DefaultProfile ] + [] +``` + +### Get +``` +Get-AzDatabricksAccessConnector -Name -ResourceGroupName [-SubscriptionId ] + [-DefaultProfile ] [] +``` + +### List +``` +Get-AzDatabricksAccessConnector -ResourceGroupName [-SubscriptionId ] + [-DefaultProfile ] [] +``` + +### GetViaIdentity +``` +Get-AzDatabricksAccessConnector -InputObject [-DefaultProfile ] + [] +``` + +## DESCRIPTION +Gets an Azure Databricks Access Connector. + +## EXAMPLES + +### Example 1: List all access connectors under a subscription. +```powershell +Get-AzDatabricksAccessConnector +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command lists all access connectors under a subscription. + +### Example 2: List all access connectors under a resource group. +```powershell +Get-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command lists all access connectors under a resource group. + +### Example 3: Get a access connectors by name. +```powershell +Get-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command gets a access connectors by name. + +## PARAMETERS + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: GetViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the Azure Databricks Access Connector. + +```yaml +Type: System.String +Parameter Sets: Get +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Get, List +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String[] +Parameter Sets: List1, Get, List +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IAccessConnector + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md new file mode 100644 index 000000000000..a594d207aaf6 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md @@ -0,0 +1,126 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/get-azdatabricksoutboundnetworkdependenciesendpoint +schema: 2.0.0 +--- + +# Get-AzDatabricksOutboundNetworkDependenciesEndpoint + +## SYNOPSIS +Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +You must configure outbound access with these endpoints. +For more information, see https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr + +## SYNTAX + +``` +Get-AzDatabricksOutboundNetworkDependenciesEndpoint -ResourceGroupName -WorkspaceName + [-SubscriptionId ] [-DefaultProfile ] + [] +``` + +## DESCRIPTION +Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +You must configure outbound access with these endpoints. +For more information, see https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr + +## EXAMPLES + +### Example 1: Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +```powershell +Get-AzDatabricksOutboundNetworkDependenciesEndpoint -ResourceGroupName azps_test_gp_db -WorkspaceName azps-databricks-workspace-t2 +``` + +```output +Category +-------- +Webapp +Control Plane NAT +Extended infrastructure +Azure Storage +Azure My SQL +Azure Servicebus +``` + +This command gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +You must configure outbound access with these endpoints. +For more information, see https://learn.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr + +## PARAMETERS + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IOutboundEnvironmentEndpoint + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md new file mode 100644 index 000000000000..1cbc3cd9e420 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md @@ -0,0 +1,189 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/get-azdatabricksvnetpeering +schema: 2.0.0 +--- + +# Get-AzDatabricksVNetPeering + +## SYNOPSIS +Gets the workspace vNet Peering. + +## SYNTAX + +### List (Default) +``` +Get-AzDatabricksVNetPeering -ResourceGroupName [-SubscriptionId ] -WorkspaceName + [-DefaultProfile ] [] +``` + +### Get +``` +Get-AzDatabricksVNetPeering -Name -ResourceGroupName [-SubscriptionId ] + -WorkspaceName [-DefaultProfile ] [-PassThru] + [] +``` + +### GetViaIdentity +``` +Get-AzDatabricksVNetPeering -InputObject [-DefaultProfile ] [-PassThru] + [] +``` + +## DESCRIPTION +Gets the workspace vNet Peering. + +## EXAMPLES + +### Example 1: List all vnet peering under a databricks. +```powershell +Get-AzDatabricksVNetPeering -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command lists all vnet peering under a databricks. + +### Example 2: Get a vnet peering. +```powershell +Get-AzDatabricksVNetPeering -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -Name vnet-peering-t1 +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command gets a vnet peering. + +## PARAMETERS + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: GetViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the workspace vNet peering. + +```yaml +Type: System.String +Parameter Sets: Get +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PassThru +Returns true when the command succeeds + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: Get, GetViaIdentity +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: List, Get +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String[] +Parameter Sets: List, Get +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: List, Get +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IVirtualNetworkPeering + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md new file mode 100644 index 000000000000..53ab130b9a0c --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md @@ -0,0 +1,181 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/get-azdatabricksworkspace +schema: 2.0.0 +--- + +# Get-AzDatabricksWorkspace + +## SYNOPSIS +Gets the workspace. + +## SYNTAX + +### List1 (Default) +``` +Get-AzDatabricksWorkspace [-SubscriptionId ] [-DefaultProfile ] + [] +``` + +### Get +``` +Get-AzDatabricksWorkspace -Name -ResourceGroupName [-SubscriptionId ] + [-DefaultProfile ] [] +``` + +### List +``` +Get-AzDatabricksWorkspace -ResourceGroupName [-SubscriptionId ] [-DefaultProfile ] + [] +``` + +### GetViaIdentity +``` +Get-AzDatabricksWorkspace -InputObject [-DefaultProfile ] + [] +``` + +## DESCRIPTION +Gets the workspace. + +## EXAMPLES + +### Example 1: Get a Databricks workspace with name. +```powershell +Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t3 +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +This command gets a Databricks workspace in a resource group. + +### Example 2: List all Databricks workspaces in a subscription. +```powershell +Get-AzDatabricksWorkspace +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t1 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t1 +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +This command lists all Databricks workspaces in a subscription. + +### Example 3: List all Databricks workspaces in a resource group. +```powershell +Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t1 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t1 +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +This command lists all Databricks workspaces in a resource group. + +## PARAMETERS + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: GetViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: Get +Aliases: WorkspaceName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Get, List +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String[] +Parameter Sets: List1, Get, List +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspace + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md new file mode 100644 index 000000000000..7d07a0d568ce --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md @@ -0,0 +1,239 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/new-azdatabricksaccessconnector +schema: 2.0.0 +--- + +# New-AzDatabricksAccessConnector + +## SYNOPSIS +Creates or updates Azure Databricks Access Connector. + +## SYNTAX + +``` +New-AzDatabricksAccessConnector -Name -ResourceGroupName [-SubscriptionId ] + -Location [-IdentityType ] [-Tag ] + [-UserAssignedIdentity ] [-DefaultProfile ] [-AsJob] [-NoWait] + [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Creates or updates Azure Databricks Access Connector. + +## EXAMPLES + +### Example 1: Creates or updates azure databricks accessConnector. +```powershell +New-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector -Location eastus -IdentityType 'SystemAssigned' +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command creates or updates azure databricks accessConnector. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -IdentityType +Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed). + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ManagedServiceIdentityType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Location +The geo-location where the resource lives + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Name +The name of the Azure Databricks Access Connector. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Tag +Resource tags. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UserAssignedIdentity +The set of user assigned identities associated with the resource. +The userAssignedIdentities dictionary keys will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. +The dictionary values can be empty objects ({}) in requests. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IAccessConnector + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md new file mode 100644 index 000000000000..7b8eabffa0fc --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md @@ -0,0 +1,317 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/new-azdatabricksvnetpeering +schema: 2.0.0 +--- + +# New-AzDatabricksVNetPeering + +## SYNOPSIS +Creates vNet Peering for workspace. + +## SYNTAX + +``` +New-AzDatabricksVNetPeering -Name -ResourceGroupName -WorkspaceName + [-SubscriptionId ] [-AllowForwardedTraffic] [-AllowGatewayTransit] [-AllowVirtualNetworkAccess] + [-DatabricksAddressSpacePrefix ] [-DatabricksVirtualNetworkId ] + [-RemoteAddressSpacePrefix ] [-RemoteVirtualNetworkId ] [-UseRemoteGateway] + [-DefaultProfile ] [-AsJob] [-NoWait] [-WhatIf] [-Confirm] + [] +``` + +## DESCRIPTION +Creates vNet Peering for workspace. + +## EXAMPLES + +### Example 1: Create a vnet peering for databricks. +```powershell +New-AzDatabricksVNetPeering -Name vnet-peering-t1 -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -RemoteVirtualNetworkId '/subscriptions/{subId}/resourceGroups/azps_test_gp_db/providers/Microsoft.Network/virtualNetworks/azps-VNnet-t1' +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command creates a vnet peering for databricks. + +## PARAMETERS + +### -AllowForwardedTraffic +Whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AllowGatewayTransit +If gateway links can be used in remote virtual networking to link to this virtual network. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AllowVirtualNetworkAccess +Whether the VMs in the local virtual network space would be able to access the VMs in remote virtual network space. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DatabricksAddressSpacePrefix +A list of address blocks reserved for this virtual network in CIDR notation. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DatabricksVirtualNetworkId +The Id of the databricks virtual network. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Name +The name of the workspace vNet peering. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RemoteAddressSpacePrefix +A list of address blocks reserved for this virtual network in CIDR notation. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RemoteVirtualNetworkId +The Id of the remote virtual network. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UseRemoteGateway +If remote gateways can be used on this virtual network. +If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. +Only one peering can have this flag set to true. +This flag cannot be set if virtual network already has a gateway. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IVirtualNetworkPeering + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md new file mode 100644 index 000000000000..2d17fb3e7d3b --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md @@ -0,0 +1,915 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/new-azdatabricksworkspace +schema: 2.0.0 +--- + +# New-AzDatabricksWorkspace + +## SYNOPSIS +Creates a new workspace. + +## SYNTAX + +``` +New-AzDatabricksWorkspace -Name -ResourceGroupName [-SubscriptionId ] + -Location [-ManagedResourceGroupName ] [-AmlWorkspaceId ] + [-Authorization ] [-DefaultCatalogInitialType ] + [-EnableNoPublicIP] [-EncryptionKeyName ] [-EncryptionKeySource ] + [-EncryptionKeyVaultUri ] [-EncryptionKeyVersion ] [-LoadBalancerBackendPoolName ] + [-LoadBalancerId ] [-ManagedDiskKeySource ] + [-ManagedDiskKeyVaultPropertiesKeyName ] [-ManagedDiskKeyVaultPropertiesKeyVaultUri ] + [-ManagedDiskKeyVaultPropertiesKeyVersion ] [-ManagedDiskRotationToLatestKeyVersionEnabled] + [-ManagedServiceKeySource ] [-ManagedServicesKeyVaultPropertiesKeyName ] + [-ManagedServicesKeyVaultPropertiesKeyVaultUri ] + [-ManagedServicesKeyVaultPropertiesKeyVersion ] [-NatGatewayName ] [-PrepareEncryption] + [-PrivateSubnetName ] [-PublicIPName ] [-PublicNetworkAccess ] + [-PublicSubnetName ] [-RequireInfrastructureEncryption] [-RequiredNsgRule ] + [-Sku ] [-SkuTier ] [-StorageAccountName ] [-StorageAccountSku ] + [-Tag ] [-UiDefinitionUri ] [-VirtualNetworkId ] [-VnetAddressPrefix ] + [-EnhancedSecurityMonitoring ] + [-AutomaticClusterUpdate ] [-ComplianceStandard ] + [-EnhancedSecurityCompliance ] [-AccessConnectorId ] + [-AccessConnectorIdentityType ] [-AccessConnectorUserAssignedIdentityId ] + [-DefaultStorageFirewall ] [-DefaultProfile ] [-AsJob] [-NoWait] + [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Creates a new workspace. + +## EXAMPLES + +### Example 1: Create a Databricks workspace. +```powershell +New-AzDatabricksWorkspace -Name azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -Location eastus -ManagedResourceGroupName azps_test_gp_kv_t1 -Sku Premium +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t1 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t1 +``` + +This command creates a Databricks workspace. + +### Example 2: Create a Databricks workspace with a customized virtual network. +```powershell +$dlg = New-AzDelegation -Name dbrdl -ServiceName "Microsoft.Databricks/workspaces" +$rdpRule = New-AzNetworkSecurityRuleConfig -Name azps-network-security-rule -Description "Allow RDP" -Access Allow -Protocol Tcp -Direction Inbound -Priority 100 -SourceAddressPrefix Internet -SourcePortRange * -DestinationAddressPrefix * -DestinationPortRange 3389 +$networkSecurityGroup = New-AzNetworkSecurityGroup -ResourceGroupName azps_test_gp_db -Location eastus -Name azps-network-security-group -SecurityRules $rdpRule +$kvSubnet = New-AzVirtualNetworkSubnetConfig -Name azps-vnetwork-sub-kv -AddressPrefix "110.0.1.0/24" -ServiceEndpoint "Microsoft.KeyVault" +$priSubnet = New-AzVirtualNetworkSubnetConfig -Name azps-vnetwork-sub-pri -AddressPrefix "110.0.2.0/24" -NetworkSecurityGroup $networkSecurityGroup -Delegation $dlg +$pubSubnet = New-AzVirtualNetworkSubnetConfig -Name azps-vnetwork-sub-pub -AddressPrefix "110.0.3.0/24" -NetworkSecurityGroup $networkSecurityGroup -Delegation $dlg +$testVN = New-AzVirtualNetwork -Name azps-virtual-network -ResourceGroupName azps_test_gp_db -Location eastus -AddressPrefix "110.0.0.0/16" -Subnet $kvSubnet,$priSubnet,$pubSubnet +$vNetResId = (Get-AzVirtualNetwork -Name azps-virtual-network -ResourceGroupName azps_test_gp_db).Subnets[0].Id +$ruleSet = New-AzKeyVaultNetworkRuleSetObject -DefaultAction Allow -Bypass AzureServices -IpAddressRange "110.0.1.0/24" -VirtualNetworkResourceId $vNetResId +New-AzKeyVault -ResourceGroupName azps_test_gp_db -VaultName azps-keyvault -NetworkRuleSet $ruleSet -Location eastus -Sku 'Premium' -EnablePurgeProtection +New-AzDatabricksWorkspace -Name azps-databricks-workspace-t2 -ResourceGroupName azps_test_gp_db -Location eastus -ManagedResourceGroupName azps_test_gp_kv_t2 -VirtualNetworkId $testVN.Id -PrivateSubnetName $priSubnet.Name -PublicSubnetName $pubSubnet.Name -Sku Premium +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +``` + +This command creates a Databricks workspace with customized virtual network in a resource group. + +### Example 3: Create a Databricks workspace with enable encryption. +```powershell +New-AzDatabricksWorkspace -Name azps-databricks-workspace-t3 -ResourceGroupName azps_test_gp_db -Location eastus -PrepareEncryption -ManagedResourceGroupName azps_test_gp_kv_t3 -Sku premium +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +This command creates a Databricks workspace and sets it to prepare for encryption. +Please refer to the examples of Update-AzDatabricksWorkspace for more settings to encryption. + +## PARAMETERS + +### -AccessConnectorId +The resource ID of Azure Databricks Access Connector Resource. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AccessConnectorIdentityType +The identity type of the Access Connector Resource. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.IdentityType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AccessConnectorUserAssignedIdentityId +The resource ID of the User Assigned Identity associated with the Access Connector Resource. +This is required for type 'UserAssigned' and not valid for type 'SystemAssigned'. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AmlWorkspaceId +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Authorization +The workspace provider authorizations. +To construct, see NOTES section for AUTHORIZATION properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspaceProviderAuthorization[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AutomaticClusterUpdate +Status of automated cluster updates feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.AutomaticClusterUpdateValue +Parameter Sets: (All) +Aliases: AutomaticClusterUpdateValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ComplianceStandard +Compliance standards associated with the workspace. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ComplianceStandard[] +Parameter Sets: (All) +Aliases: ComplianceSecurityProfileComplianceStandard + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultCatalogInitialType +Defines the initial type of the default catalog. +Possible values (case-insensitive): HiveMetastore, UnityCatalog + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.InitialType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultStorageFirewall +Gets or Sets Default Storage Firewall configuration information + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.DefaultStorageFirewall +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnableNoPublicIP +The value which should be used for this field. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Default, Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.KeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyVaultUri +The Uri of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnhancedSecurityCompliance +Status of Compliance Security Profile feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ComplianceSecurityProfileValue +Parameter Sets: (All) +Aliases: ComplianceSecurityProfileValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnhancedSecurityMonitoring +Status of Enhanced Security Monitoring feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EnhancedSecurityMonitoringValue +Parameter Sets: (All) +Aliases: EnhancedSecurityMonitoringValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -LoadBalancerBackendPoolName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -LoadBalancerId +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Location +The geo-location where the resource lives + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EncryptionKeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyVaultUri +The URI of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskRotationToLatestKeyVersionEnabled +Indicate whether the latest key version should be automatically used for Managed Disk Encryption. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedResourceGroupName +The managed resource group Id. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServiceKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EncryptionKeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyVaultUri +The Uri of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Name +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: WorkspaceName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NatGatewayName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PrepareEncryption +The value which should be used for this field. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PrivateSubnetName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PublicIPName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PublicNetworkAccess +The network access type for accessing workspace. +Set value to disabled to access workspace only via private link. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.PublicNetworkAccess +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PublicSubnetName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RequiredNsgRule +Gets or sets a value indicating whether data plane (clusters) to control plane communication happen over private endpoint. +Supported values are 'AllRules' and 'NoAzureDatabricksRules'. +'NoAzureServiceRules' value is for internal use only. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.RequiredNsgRules +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RequireInfrastructureEncryption +The value which should be used for this field. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Sku +The SKU name. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SkuTier +The SKU tier. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -StorageAccountName +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -StorageAccountSku +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Tag +Resource tags. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UiDefinitionUri +The blob URI where the UI definition file is located. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -VirtualNetworkId +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -VnetAddressPrefix +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspace + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md new file mode 100644 index 000000000000..1d83466872a2 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md @@ -0,0 +1,84 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/Az.Databricks/new-AzDatabricksWorkspaceProviderAuthorizationObject +schema: 2.0.0 +--- + +# New-AzDatabricksWorkspaceProviderAuthorizationObject + +## SYNOPSIS +Create an in-memory object for WorkspaceProviderAuthorization. + +## SYNTAX + +``` +New-AzDatabricksWorkspaceProviderAuthorizationObject -PrincipalId -RoleDefinitionId + [] +``` + +## DESCRIPTION +Create an in-memory object for WorkspaceProviderAuthorization. + +## EXAMPLES + +### Example 1: Create an in-memory object for WorkspaceProviderAuthorization. +```powershell +New-AzDatabricksWorkspaceProviderAuthorizationObject -PrincipalId 024d7367-0890-4ad3-8140-e37374722820 -RoleDefinitionId 2124844c-7e23-48cc-bc52-a3af25f7a4ae +``` + +```output +PrincipalId RoleDefinitionId +----------- ---------------- +024d7367-0890-4ad3-8140-e37374722820 2124844c-7e23-48cc-bc52-a3af25f7a4ae +``` + +Create an in-memory object for WorkspaceProviderAuthorization. + +## PARAMETERS + +### -PrincipalId +The provider's principal identifier. +This is the identity that the provider will use to call ARM to manage the workspace resources. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RoleDefinitionId +The provider's role definition identifier. +This role will define all the permissions that the provider must have on the workspace's container resource group. +This role definition cannot have permission to delete the resource group. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.WorkspaceProviderAuthorization + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md new file mode 100644 index 000000000000..ffeec43eeb83 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md @@ -0,0 +1,217 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/remove-azdatabricksaccessconnector +schema: 2.0.0 +--- + +# Remove-AzDatabricksAccessConnector + +## SYNOPSIS +Deletes the Azure Databricks Access Connector. + +## SYNTAX + +### Delete (Default) +``` +Remove-AzDatabricksAccessConnector -Name -ResourceGroupName [-SubscriptionId ] + [-DefaultProfile ] [-AsJob] [-NoWait] [-PassThru] [-WhatIf] + [-Confirm] [] +``` + +### DeleteViaIdentity +``` +Remove-AzDatabricksAccessConnector -InputObject [-DefaultProfile ] [-AsJob] + [-NoWait] [-PassThru] [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Deletes the Azure Databricks Access Connector. + +## EXAMPLES + +### Example 1: Deletes the azure databricks accessConnector. +```powershell +Remove-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector +``` + +This command deletes the azure databricks accessConnector. + +### Example 2: Deletes the azure databricks accessConnector by pipeline. +```powershell +Get-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector | Remove-AzDatabricksAccessConnector +``` + +This command deletes the azure databricks accessConnector by pipeline. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: DeleteViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the Azure Databricks Access Connector. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PassThru +Returns true when the command succeeds + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### System.Boolean + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md new file mode 100644 index 000000000000..4ec972648ab7 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md @@ -0,0 +1,232 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/remove-azdatabricksvnetpeering +schema: 2.0.0 +--- + +# Remove-AzDatabricksVNetPeering + +## SYNOPSIS +Deletes the workspace vNetPeering. + +## SYNTAX + +### Delete (Default) +``` +Remove-AzDatabricksVNetPeering -Name -ResourceGroupName [-SubscriptionId ] + -WorkspaceName [-DefaultProfile ] [-AsJob] [-NoWait] [-PassThru] + [-WhatIf] [-Confirm] [] +``` + +### DeleteViaIdentity +``` +Remove-AzDatabricksVNetPeering -InputObject [-DefaultProfile ] [-AsJob] + [-NoWait] [-PassThru] [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Deletes the workspace vNetPeering. + +## EXAMPLES + +### Example 1: Remove a vnet peering of databricks by name. +```powershell +Remove-AzDatabricksVNetPeering -Name vnet-peering-t1 -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db +``` + +This command removes a vnet peering of databricks by name. + +### Example 2: Remove a vnet peering of databricks by object. +```powershell +Get-AzDatabricksVNetPeering -Name vnet-peering-t1 -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db | Remove-AzDatabricksVNetPeering +``` + +This command removes a vnet peering of databricks by object. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: DeleteViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the workspace vNet peering. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PassThru +Returns true when the command succeeds + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### System.Boolean + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md new file mode 100644 index 000000000000..9fdfa9fe5a20 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md @@ -0,0 +1,233 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/remove-azdatabricksworkspace +schema: 2.0.0 +--- + +# Remove-AzDatabricksWorkspace + +## SYNOPSIS +Deletes the workspace. + +## SYNTAX + +### Delete (Default) +``` +Remove-AzDatabricksWorkspace -Name -ResourceGroupName [-SubscriptionId ] + [-ForceDeletion] [-DefaultProfile ] [-AsJob] [-NoWait] [-PassThru] + [-WhatIf] [-Confirm] [] +``` + +### DeleteViaIdentity +``` +Remove-AzDatabricksWorkspace -InputObject [-ForceDeletion] [-DefaultProfile ] + [-AsJob] [-NoWait] [-PassThru] [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Deletes the workspace. + +## EXAMPLES + +### Example 1: Remove a Databricks workspace. +```powershell +Remove-AzDatabricksWorkspace -Name azps-databricks-workspace -ResourceGroupName azps_test_gp_db +``` + +This command removes a Databricks workspace from a resource group. + +### Example 2: Remove a Databricks workspace by object. +```powershell +Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t3 | Remove-AzDatabricksWorkspace +``` + +This command removes a Databricks workspace from a resource group. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ForceDeletion +Optional parameter to retain default unity catalog data. +By default the data will retained if Uc is enabled on the workspace. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: DeleteViaIdentity +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: WorkspaceName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PassThru +Returns true when the command succeeds + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: Delete +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### System.Boolean + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md new file mode 100644 index 000000000000..f78a0c5a8782 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md @@ -0,0 +1,264 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/update-azdatabricksaccessconnector +schema: 2.0.0 +--- + +# Update-AzDatabricksAccessConnector + +## SYNOPSIS +Updates an Azure Databricks Access Connector. + +## SYNTAX + +### UpdateExpanded (Default) +``` +Update-AzDatabricksAccessConnector -Name -ResourceGroupName [-SubscriptionId ] + [-IdentityType ] [-IdentityUserAssignedIdentity ] [-Tag ] + [-DefaultProfile ] [-AsJob] [-NoWait] [-WhatIf] [-Confirm] + [] +``` + +### UpdateViaIdentityExpanded +``` +Update-AzDatabricksAccessConnector -InputObject + [-IdentityType ] [-IdentityUserAssignedIdentity ] [-Tag ] + [-DefaultProfile ] [-AsJob] [-NoWait] [-WhatIf] [-Confirm] + [] +``` + +## DESCRIPTION +Updates an Azure Databricks Access Connector. + +## EXAMPLES + +### Example 1: Updates an azure databricks accessConnector. +```powershell +Update-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector -Tag @{'key'='value'} +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command updates an azure databricks accessConnector. + +### Example 2: Updates an azure databricks accessConnector by pipeline. +```powershell +Get-AzDatabricksAccessConnector -ResourceGroupName azps_test_gp_db -Name azps-databricks-accessconnector | Update-AzDatabricksAccessConnector -Tag @{'key'='value'} +``` + +```output +Location Name ResourceGroupName +-------- ---- ----------------- +eastus azps-databricks-accessconnector azps_test_gp_db +``` + +This command updates an azure databricks accessConnector by pipeline. + +## PARAMETERS + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -IdentityType +Type of managed service identity (where both SystemAssigned and UserAssigned types are allowed). + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ManagedServiceIdentityType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -IdentityUserAssignedIdentity +The set of user assigned identities associated with the resource. +The userAssignedIdentities dictionary keys will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. +The dictionary values can be empty objects ({}) in requests. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity Parameter +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: UpdateViaIdentityExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the Azure Databricks Access Connector. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. +The value must be an UUID. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Tag +Resource tags. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IAccessConnector + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md new file mode 100644 index 000000000000..2db4e1fea5c4 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md @@ -0,0 +1,362 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/update-azdatabricksvnetpeering +schema: 2.0.0 +--- + +# Update-AzDatabricksVNetPeering + +## SYNOPSIS +Update vNet Peering for workspace. + +## SYNTAX + +### UpdateExpanded (Default) +``` +Update-AzDatabricksVNetPeering -Name -ResourceGroupName -WorkspaceName + [-SubscriptionId ] [-AllowForwardedTraffic ] [-AllowGatewayTransit ] + [-AllowVirtualNetworkAccess ] [-DatabricksAddressSpacePrefix ] + [-DatabricksVirtualNetworkId ] [-RemoteAddressSpacePrefix ] + [-RemoteVirtualNetworkId ] [-UseRemoteGateway ] [-DefaultProfile ] [-AsJob] + [-NoWait] [-WhatIf] [-Confirm] [] +``` + +### UpdateViaIdentityExpanded +``` +Update-AzDatabricksVNetPeering -InputObject [-AllowForwardedTraffic ] + [-AllowGatewayTransit ] [-AllowVirtualNetworkAccess ] + [-DatabricksAddressSpacePrefix ] [-DatabricksVirtualNetworkId ] + [-RemoteAddressSpacePrefix ] [-RemoteVirtualNetworkId ] [-UseRemoteGateway ] + [-DefaultProfile ] [-AsJob] [-NoWait] [-WhatIf] [-Confirm] + [] +``` + +## DESCRIPTION +Update vNet Peering for workspace. + +## EXAMPLES + +### Example 1: Update AllowForwardedTraffic of vnet peering. +```powershell +Update-AzDatabricksVNetPeering -Name vnet-peering-t1 -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -AllowForwardedTraffic $True +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command updates AllowForwardedTraffic of vnet peering. + +### Example 2: Update AllowForwardedTraffic of vnet peering by object. +```powershell +Get-AzDatabricksVNetPeering -WorkspaceName azps-databricks-workspace-t1 -ResourceGroupName azps_test_gp_db -Name vnet-peering-t1 | Update-AzDatabricksVNetPeering -AllowGatewayTransit $true +``` + +```output +Name ResourceGroupName +---- ----------------- +vnet-peering-t1 azps_test_gp_db +``` + +This command updates AllowForwardedTraffic of vnet peering by object. + +## PARAMETERS + +### -AllowForwardedTraffic +[System.Management.Automation.SwitchParameter] +Whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network. + +```yaml +Type: System.Boolean +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AllowGatewayTransit +[System.Management.Automation.SwitchParameter] +If gateway links can be used in remote virtual networking to link to this virtual network. + +```yaml +Type: System.Boolean +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AllowVirtualNetworkAccess +[System.Management.Automation.SwitchParameter] +Whether the VMs in the local virtual network space would be able to access the VMs in remote virtual network space. + +```yaml +Type: System.Boolean +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DatabricksAddressSpacePrefix +A list of address blocks reserved for this virtual network in CIDR notation. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DatabricksVirtualNetworkId +The Id of the databricks virtual network. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The DefaultProfile parameter is not functional. +Use the SubscriptionId parameter when available if executing the cmdlet against a different subscription. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity parameter. +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: UpdateViaIdentityExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -Name +The name of the VNetPeering. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: PeeringName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RemoteAddressSpacePrefix +A list of address blocks reserved for this virtual network in CIDR notation. + +```yaml +Type: System.String[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RemoteVirtualNetworkId +The Id of the remote virtual network. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UseRemoteGateway +[System.Management.Automation.SwitchParameter] +If remote gateways can be used on this virtual network. +If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. +Only one peering can have this flag set to true. +This flag cannot be set if virtual network already has a gateway. + +```yaml +Type: System.Boolean +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WorkspaceName +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IVirtualNetworkPeering + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md new file mode 100644 index 000000000000..174152dd46df --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md @@ -0,0 +1,799 @@ +--- +external help file: Az.Databricks-help.xml +Module Name: Az.Databricks +online version: https://learn.microsoft.com/powershell/module/az.databricks/update-azdatabricksworkspace +schema: 2.0.0 +--- + +# Update-AzDatabricksWorkspace + +## SYNOPSIS +Updates a workspace. + +## SYNTAX + +### UpdateExpanded (Default) +``` +Update-AzDatabricksWorkspace -Name -ResourceGroupName [-SubscriptionId ] + [-PrepareEncryption] [-EncryptionKeySource ] [-EncryptionKeyVaultUri ] + [-EncryptionKeyName ] [-EncryptionKeyVersion ] [-KeyVaultKeyName ] + [-KeyVaultKeyVersion ] [-KeyVaultUri ] [-AmlWorkspaceId ] [-SkuTier ] + [-Authorization ] [-DefaultCatalogInitialType ] + [-ManagedDiskKeySource ] [-ManagedDiskKeyVaultPropertiesKeyName ] + [-ManagedDiskKeyVaultPropertiesKeyVaultUri ] [-ManagedDiskKeyVaultPropertiesKeyVersion ] + [-ManagedDiskRotationToLatestKeyVersionEnabled] [-ManagedServiceKeySource ] + [-ManagedServicesKeyVaultPropertiesKeyName ] [-ManagedServicesKeyVaultPropertiesKeyVaultUri ] + [-ManagedServicesKeyVaultPropertiesKeyVersion ] [-UiDefinitionUri ] [-Tag ] + [-RequiredNsgRule ] [-PublicNetworkAccess ] [-EnableNoPublicIP] + [-EnhancedSecurityMonitoring ] + [-AutomaticClusterUpdate ] [-ComplianceStandard ] + [-EnhancedSecurityCompliance ] [-AccessConnectorId ] + [-AccessConnectorIdentityType ] [-AccessConnectorUserAssignedIdentityId ] + [-DefaultStorageFirewall ] [-DefaultProfile ] [-AsJob] [-NoWait] + [-WhatIf] [-Confirm] [] +``` + +### UpdateViaIdentityExpanded +``` +Update-AzDatabricksWorkspace -InputObject [-PrepareEncryption] + [-EncryptionKeySource ] [-EncryptionKeyVaultUri ] [-EncryptionKeyName ] + [-EncryptionKeyVersion ] [-KeyVaultKeyName ] [-KeyVaultKeyVersion ] + [-KeyVaultUri ] [-AmlWorkspaceId ] [-SkuTier ] + [-Authorization ] [-DefaultCatalogInitialType ] + [-ManagedDiskKeySource ] [-ManagedDiskKeyVaultPropertiesKeyName ] + [-ManagedDiskKeyVaultPropertiesKeyVaultUri ] [-ManagedDiskKeyVaultPropertiesKeyVersion ] + [-ManagedDiskRotationToLatestKeyVersionEnabled] [-ManagedServiceKeySource ] + [-ManagedServicesKeyVaultPropertiesKeyName ] [-ManagedServicesKeyVaultPropertiesKeyVaultUri ] + [-ManagedServicesKeyVaultPropertiesKeyVersion ] [-UiDefinitionUri ] [-Tag ] + [-RequiredNsgRule ] [-PublicNetworkAccess ] [-EnableNoPublicIP] + [-EnhancedSecurityMonitoring ] + [-AutomaticClusterUpdate ] [-ComplianceStandard ] + [-EnhancedSecurityCompliance ] [-AccessConnectorId ] + [-AccessConnectorIdentityType ] [-AccessConnectorUserAssignedIdentityId ] + [-DefaultStorageFirewall ] [-DefaultProfile ] [-AsJob] [-NoWait] + [-WhatIf] [-Confirm] [] +``` + +## DESCRIPTION +Updates a workspace. + +## EXAMPLES + +### Example 1: Updates the tags of a Databricks workspace. +```powershell +Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t1 | Update-AzDatabricksWorkspace -Tag @{"key"="value"} +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t1 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t1 +``` + +This command updates the tags of a Databricks workspace. + +### Example 2: Enable encryption on a Databricks workspace. +```powershell +Update-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t2 -PrepareEncryption +$updWsp = Get-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t2 +Set-AzKeyVaultAccessPolicy -VaultName azps-keyvault -ObjectId $updWsp.StorageAccountIdentityPrincipalId -PermissionsToKeys wrapkey,unwrapkey,get +Update-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t2 -EncryptionKeySource 'Microsoft.KeyVault' -EncryptionKeyVaultUri https://azps-keyvault.vault.azure.net/ -EncryptionKeyName azps-k1 -EncryptionKeyVersion a563a8021cba47109d93bd6d690621a7 +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +``` + +Enabling encryption on a Databricks workspace takes three steps: +1.Please make sure that KeyVault has Purge protection enabled. +2.Update the workspace with `-PrepareEncryption` (if it was not created so). +3.Find `StorageAccountIdentityPrincipalId` in the output of the last step and grant key permissions to the principal. +4.Update the workspace again to fill in information about the encryption key: + - `-EncryptionKeySource` + - `-EncryptionKeyVaultUri` + - `-EncryptionKeyName` + - `-EncryptionKeyVersion` +5.Important! Please read the information in the following document in detail: https://learn.microsoft.com/en-us/azure/databricks/security/keys/cmk-managed-services-azure/customer-managed-key-managed-services-azure?WT.mc_id=Portal-Microsoft_Azure_Databricks#--use-the-azure-portal + +### Example 3: Disable encryption on a Databricks workspace. +```powershell +Update-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t3 -EncryptionKeySource 'Default' +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t3 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t3 +``` + +To disable encryption, simply set `-EncryptionKeySource` to `'Default'`. + +### Example 4: Update NsgRule of the Databricks workspace. +```powershell +Update-AzDatabricksWorkspace -ResourceGroupName azps_test_gp_db -Name azps-databricks-workspace-t2 -RequiredNsgRule 'AllRules' +``` + +```output +Name ResourceGroupName Location Managed Resource Group ID +---- ----------------- -------- ------------------------- +azps-databricks-workspace-t2 azps_test_gp_db eastus /subscriptions/{subId}/resourceGroups/azps_test_gp_kv_t2 +``` + +This command updates NsgRule of the Databricks workspace. + +## PARAMETERS + +### -AccessConnectorId +The resource ID of Azure Databricks Access Connector Resource. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AccessConnectorIdentityType +The identity type of the Access Connector Resource. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.IdentityType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AccessConnectorUserAssignedIdentityId +The resource ID of the User Assigned Identity associated with the Access Connector Resource. +This is required for type 'UserAssigned' and not valid for type 'SystemAssigned'. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AmlWorkspaceId +The value which should be used for this field. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AsJob +Run the command as a job + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Authorization +The workspace provider authorizations. +To construct, see NOTES section for AUTHORIZATION properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspaceProviderAuthorization[] +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -AutomaticClusterUpdate +Status of automated cluster updates feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.AutomaticClusterUpdateValue +Parameter Sets: (All) +Aliases: AutomaticClusterUpdateValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ComplianceStandard +Compliance standards associated with the workspace. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ComplianceStandard[] +Parameter Sets: (All) +Aliases: ComplianceSecurityProfileComplianceStandard + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultCatalogInitialType +Defines the initial type of the default catalog. +Possible values (case-insensitive): HiveMetastore, UnityCatalog + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.InitialType +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultProfile +The credentials, account, tenant, and subscription used for communication with Azure. + +```yaml +Type: System.Management.Automation.PSObject +Parameter Sets: (All) +Aliases: AzureRMContext, AzureCredential + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -DefaultStorageFirewall +Gets or Sets Default Storage Firewall configuration information + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.DefaultStorageFirewall +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnableNoPublicIP +The value which should be used for this field. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyName +The name of Key Vault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Default, Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.KeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyVaultUri +The URI (DNS name) of the Key Vault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EncryptionKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnhancedSecurityCompliance +Status of Compliance Security Profile feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.ComplianceSecurityProfileValue +Parameter Sets: (All) +Aliases: ComplianceSecurityProfileValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -EnhancedSecurityMonitoring +Status of Enhanced Security Monitoring feature. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EnhancedSecurityMonitoringValue +Parameter Sets: (All) +Aliases: EnhancedSecurityMonitoringValue + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -InputObject +Identity parameter. +To construct, see NOTES section for INPUTOBJECT properties and create a hash table. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity +Parameter Sets: UpdateViaIdentityExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: True (ByValue) +Accept wildcard characters: False +``` + +### -KeyVaultKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -KeyVaultKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -KeyVaultUri +The Uri of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EncryptionKeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyVaultUri +The URI of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskKeyVaultPropertiesKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedDiskRotationToLatestKeyVersionEnabled +Indicate whether the latest key version should be automatically used for Managed Disk Encryption. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServiceKeySource +The encryption keySource (provider). +Possible values (case-insensitive): Microsoft.Keyvault + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.EncryptionKeySource +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyName +The name of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyVaultUri +The Uri of KeyVault. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ManagedServicesKeyVaultPropertiesKeyVersion +The version of KeyVault key. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Name +The name of the workspace. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: WorkspaceName + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -NoWait +Run the command asynchronously + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PrepareEncryption +Prepare the workspace for encryption. +Enables the Managed Identity for managed storage account. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -PublicNetworkAccess +The network access type for accessing workspace. +Set value to disabled to access workspace only via private link. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.PublicNetworkAccess +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -RequiredNsgRule +Gets or sets a value indicating whether data plane (clusters) to control plane communication happen over private endpoint. +Supported values are 'AllRules' and 'NoAzureDatabricksRules'. +'NoAzureServiceRules' value is for internal use only. + +```yaml +Type: Microsoft.Azure.PowerShell.Cmdlets.Databricks.Support.RequiredNsgRules +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -ResourceGroupName +The name of the resource group. +The name is case insensitive. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: True +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SkuTier +The SKU tier. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -SubscriptionId +The ID of the target subscription. + +```yaml +Type: System.String +Parameter Sets: UpdateExpanded +Aliases: + +Required: False +Position: Named +Default value: (Get-AzContext).Subscription.Id +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Tag +Resource tags. + +```yaml +Type: System.Collections.Hashtable +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -UiDefinitionUri +The blob URI where the UI definition file is located. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -Confirm +Prompts you for confirmation before running the cmdlet. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: cf + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### -WhatIf +Shows what would happen if the cmdlet runs. +The cmdlet is not run. + +```yaml +Type: System.Management.Automation.SwitchParameter +Parameter Sets: (All) +Aliases: wi + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + +### CommonParameters +This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216). + +## INPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.IDatabricksIdentity + +## OUTPUTS + +### Microsoft.Azure.PowerShell.Cmdlets.Databricks.Models.Api20240501.IWorkspace + +## NOTES + +## RELATED LINKS diff --git a/tools/Mcp/src/ideal-modules/Databricks/metadata.md b/tools/Mcp/src/ideal-modules/Databricks/metadata.md new file mode 100644 index 000000000000..51eb8e7928cb --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/metadata.md @@ -0,0 +1,57 @@ +--- +Module Name: Az.Databricks +Module Guid: fd603f36-03d8-47f4-9f7c-c13a78761936 +Download Help Link: https://learn.microsoft.com/powershell/module/az.databricks +Help Version: 1.0.0.0 +Locale: en-US +--- + +# Az.Databricks Module +## Description +Microsoft Azure PowerShell: Databricks cmdlets + +## Az.Databricks Cmdlets +### [Get-AzDatabricksAccessConnector](Get-AzDatabricksAccessConnector.md) +Gets an Azure Databricks Access Connector. + +### [Get-AzDatabricksOutboundNetworkDependenciesEndpoint](Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md) +Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. +You must configure outbound access with these endpoints. +For more information, see https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr + +### [Get-AzDatabricksVNetPeering](Get-AzDatabricksVNetPeering.md) +Gets the workspace vNet Peering. + +### [Get-AzDatabricksWorkspace](Get-AzDatabricksWorkspace.md) +Gets the workspace. + +### [New-AzDatabricksAccessConnector](New-AzDatabricksAccessConnector.md) +Creates or updates Azure Databricks Access Connector. + +### [New-AzDatabricksVNetPeering](New-AzDatabricksVNetPeering.md) +Creates vNet Peering for workspace. + +### [New-AzDatabricksWorkspace](New-AzDatabricksWorkspace.md) +Creates a new workspace. + +### [New-AzDatabricksWorkspaceProviderAuthorizationObject](New-AzDatabricksWorkspaceProviderAuthorizationObject.md) +Create an in-memory object for WorkspaceProviderAuthorization. + +### [Remove-AzDatabricksAccessConnector](Remove-AzDatabricksAccessConnector.md) +Deletes the Azure Databricks Access Connector. + +### [Remove-AzDatabricksVNetPeering](Remove-AzDatabricksVNetPeering.md) +Deletes the workspace vNetPeering. + +### [Remove-AzDatabricksWorkspace](Remove-AzDatabricksWorkspace.md) +Deletes the workspace. + +### [Update-AzDatabricksAccessConnector](Update-AzDatabricksAccessConnector.md) +Updates an Azure Databricks Access Connector. + +### [Update-AzDatabricksVNetPeering](Update-AzDatabricksVNetPeering.md) +Update vNet Peering for workspace. + +### [Update-AzDatabricksWorkspace](Update-AzDatabricksWorkspace.md) +Updates a workspace. + diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 new file mode 100644 index 000000000000..8b04726171bd --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 @@ -0,0 +1,65 @@ +if (($null -eq $TestName) -or ($TestName -contains 'AzDatabricksAccessConnector')) { + $loadEnvPath = Join-Path $PSScriptRoot 'loadEnv.ps1' + if (-Not (Test-Path -Path $loadEnvPath)) { + $loadEnvPath = Join-Path $PSScriptRoot '..\loadEnv.ps1' + } + . ($loadEnvPath) + $TestRecordingFile = Join-Path $PSScriptRoot 'AzDatabricksAccessConnector.Recording.json' + $currentPath = $PSScriptRoot + while (-not $mockingPath) { + $mockingPath = Get-ChildItem -Path $currentPath -Recurse -Include 'HttpPipelineMocking.ps1' -File + $currentPath = Split-Path -Path $currentPath -Parent + } + . ($mockingPath | Select-Object -First 1).FullName +} + +Describe 'AzDatabricksAccessConnector' { + It 'CreateExpanded' { + { + $config = New-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 -Location $env.location -IdentityType 'SystemAssigned' + $config.Name | Should -Be $env.accessConnectorName1 + } | Should -Not -Throw + } + + It 'List1' { + { + $config = Get-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'Get' { + { + $config = Get-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 + $config.Name | Should -Be $env.accessConnectorName1 + } | Should -Not -Throw + } + + It 'List' { + { + $config = Get-AzDatabricksAccessConnector + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'UpdateExpanded' { + { + $config = Update-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 -Tag @{'key' = 'value' } + $config.Name | Should -Be $env.accessConnectorName1 + } | Should -Not -Throw + } + + It 'UpdateViaIdentityExpanded' { + { + $config = Get-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 + $config = Update-AzDatabricksAccessConnector -InputObject $config -Tag @{'key' = 'value' } + $config.Name | Should -Be $env.accessConnectorName1 + } | Should -Not -Throw + } + + It 'Delete' { + { + Remove-AzDatabricksAccessConnector -ResourceGroupName $env.resourceGroup -Name $env.accessConnectorName1 + } | Should -Not -Throw + } +} \ No newline at end of file diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 new file mode 100644 index 000000000000..d23158b163bc --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 @@ -0,0 +1,58 @@ +if (($null -eq $TestName) -or ($TestName -contains 'AzDatabricksVNetPeering')) { + $loadEnvPath = Join-Path $PSScriptRoot 'loadEnv.ps1' + if (-Not (Test-Path -Path $loadEnvPath)) { + $loadEnvPath = Join-Path $PSScriptRoot '..\loadEnv.ps1' + } + . ($loadEnvPath) + $TestRecordingFile = Join-Path $PSScriptRoot 'AzDatabricksVNetPeering.Recording.json' + $currentPath = $PSScriptRoot + while (-not $mockingPath) { + $mockingPath = Get-ChildItem -Path $currentPath -Recurse -Include 'HttpPipelineMocking.ps1' -File + $currentPath = Split-Path -Path $currentPath -Parent + } + . ($mockingPath | Select-Object -First 1).FullName +} + +Describe 'AzDatabricksVNetPeering' { + It 'CreateExpanded' { + { + $config = New-AzDatabricksVNetPeering -Name $env.vNetName1 -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -RemoteVirtualNetworkId "/subscriptions/$($env.SubscriptionId)/resourceGroups/$($env.resourceGroup)/providers/Microsoft.Network/virtualNetworks/$($env.vNetName)" + $config.Name | Should -Be $env.vNetName1 + } | Should -Not -Throw + } + + It 'List' { + { + $config = Get-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'Get' -Skip { + { + $config = Get-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Name $env.vNetName1 + $config.Name | Should -Be $env.vNetName1 + } | Should -Not -Throw + } + + It 'UpdateExpanded' -Skip { + { + $config = Update-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Name $env.vNetName1 -AllowForwardedTraffic $True + $config.Name | Should -Be $env.vNetName1 + } | Should -Not -Throw + } + + It 'UpdateViaIdentityExpanded' -Skip { + { + $config = Get-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Name $env.vNetName1 + $config = Update-AzDatabricksVNetPeering -InputObject $config -AllowForwardedTraffic $True + $config.Name | Should -Be $env.vNetName1 + } | Should -Not -Throw + } + + It 'Delete' { + { + Remove-AzDatabricksVNetPeering -WorkspaceName $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Name $env.vNetName1 + } | Should -Not -Throw + } +} \ No newline at end of file diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 new file mode 100644 index 000000000000..62806d2d23b2 --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 @@ -0,0 +1,86 @@ +if (($null -eq $TestName) -or ($TestName -contains 'AzDatabricksWorkspace')) { + $loadEnvPath = Join-Path $PSScriptRoot 'loadEnv.ps1' + if (-Not (Test-Path -Path $loadEnvPath)) { + $loadEnvPath = Join-Path $PSScriptRoot '..\loadEnv.ps1' + } + . ($loadEnvPath) + $TestRecordingFile = Join-Path $PSScriptRoot 'AzDatabricksWorkspace.Recording.json' + $currentPath = $PSScriptRoot + while (-not $mockingPath) { + $mockingPath = Get-ChildItem -Path $currentPath -Recurse -Include 'HttpPipelineMocking.ps1' -File + $currentPath = Split-Path -Path $currentPath -Parent + } + . ($mockingPath | Select-Object -First 1).FullName +} + +Describe 'AzDatabricksWorkspace' { + It 'CreateExpanded' { + { + $config = New-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup -Location $env.location -Sku premium + $config.Name | Should -Be $env.workSpaceName2 + } | Should -Not -Throw + } + + It 'List' { + { + $config = Get-AzDatabricksWorkspace -ResourceGroupName $env.resourceGroup + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'List1' { + { + $config = Get-AzDatabricksWorkspace + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'Get' { + { + $config = Get-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup + $config.Name | Should -Be $env.workSpaceName2 + } | Should -Not -Throw + } + + It 'OutboundNetworkDependenciesEndpointList' { + { + $config = Get-AzDatabricksOutboundNetworkDependenciesEndpoint -WorkspaceName $env.workSpaceName1 -ResourceGroupName $env.resourceGroup + $config.Count | Should -BeGreaterThan 0 + } | Should -Not -Throw + } + + It 'UpdateExpanded' { + { + $config = Update-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup -Tag @{"key" = "value" } + $config.Name | Should -Be $env.workSpaceName2 + } | Should -Not -Throw + } + + It 'UpdateViaIdentityExpanded' { + { + $config = Get-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup + $config = Update-AzDatabricksWorkspace -InputObject $config -Tag @{"key" = "value" } + $config.Name | Should -Be $env.workSpaceName2 + } | Should -Not -Throw + } + + It 'UpdateRequiredNsgRule-EnableNoPublicIP-PublicNetworkAccess' { + { + $config = Update-AzDatabricksWorkspace -Name $env.workSpaceName1 -ResourceGroupName $env.resourceGroup -RequiredNsgRule 'AllRules' -EnableNoPublicIP:$false -PublicNetworkAccess 'Enabled' -Tag @{"key" = "value" } + $config.RequiredNsgRule | Should -Be 'AllRules' + $config.EnableNoPublicIP | Should -Be 'false' + $config.PublicNetworkAccess | Should -Be 'Enabled' + + $config = Update-AzDatabricksWorkspace -Name $env.workSpaceName1 -ResourceGroupName $env.resourceGroup -RequiredNsgRule 'NoAzureDatabricksRules' -EnableNoPublicIP:$true -PublicNetworkAccess 'Disabled' + $config.RequiredNsgRule | Should -Be 'NoAzureDatabricksRules' + $config.EnableNoPublicIP | Should -Be 'true' + $config.PublicNetworkAccess | Should -Be 'Disabled' + } + } + + It 'Delete' { + { + Remove-AzDatabricksWorkspace -Name $env.workSpaceName2 -ResourceGroupName $env.resourceGroup + } | Should -Not -Throw + } +} \ No newline at end of file diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 b/tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 new file mode 100644 index 000000000000..0f321ab21a7c --- /dev/null +++ b/tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 @@ -0,0 +1,114 @@ +function RandomString([bool]$allChars, [int32]$len) { + if ($allChars) { + return -join ((33..126) | Get-Random -Count $len | ForEach-Object {[char]$_}) + } else { + return -join ((48..57) + (97..122) | Get-Random -Count $len | ForEach-Object {[char]$_}) + } +} +function Start-TestSleep { + [CmdletBinding(DefaultParameterSetName = 'SleepBySeconds')] + param( + [parameter(Mandatory = $true, Position = 0, ParameterSetName = 'SleepBySeconds')] + [ValidateRange(0.0, 2147483.0)] + [double] $Seconds, + + [parameter(Mandatory = $true, ParameterSetName = 'SleepByMilliseconds')] + [ValidateRange('NonNegative')] + [Alias('ms')] + [int] $Milliseconds + ) + + if ($TestMode -ne 'playback') { + switch ($PSCmdlet.ParameterSetName) { + 'SleepBySeconds' { + Start-Sleep -Seconds $Seconds + } + 'SleepByMilliseconds' { + Start-Sleep -Milliseconds $Milliseconds + } + } + } +} + +$env = @{} +if ($UsePreviousConfigForRecord) { + $previousEnv = Get-Content (Join-Path $PSScriptRoot 'env.json') | ConvertFrom-Json + $previousEnv.psobject.properties | Foreach-Object { $env[$_.Name] = $_.Value } +} +# Add script method called AddWithCache to $env, when useCache is set true, it will try to get the value from the $env first. +# example: $val = $env.AddWithCache('key', $val, $true) +$env | Add-Member -Type ScriptMethod -Value { param( [string]$key, [object]$val, [bool]$useCache) if ($this.Contains($key) -and $useCache) { return $this[$key] } else { $this[$key] = $val; return $val } } -Name 'AddWithCache' +function setupEnv() { + # Preload subscriptionId and tenant from context, which will be used in test + # as default. You could change them if needed. + $env.SubscriptionId = (Get-AzContext).Subscription.Id + $env.Tenant = (Get-AzContext).Tenant.Id + + $workSpaceName1 = RandomString -allChars $false -len 6 + $workSpaceName2 = RandomString -allChars $false -len 6 + $workSpaceName3 = RandomString -allChars $false -len 6 + $vNetName1 = RandomString -allChars $false -len 6 + $accessConnectorName1 = RandomString -allChars $false -len 6 + + $env.Add("workSpaceName1", $workSpaceName1) + $env.Add("workSpaceName2", $workSpaceName2) + $env.Add("workSpaceName3", $workSpaceName3) + $env.Add("vNetName1", $vNetName1) + $env.Add("accessConnectorName1", $accessConnectorName1) + + $networkSecurityRuleName = RandomString -allChars $false -len 6 + $networkSecurityGroupName = RandomString -allChars $false -len 6 + $vNetSubnetName1 = RandomString -allChars $false -len 6 + $vNetSubnetName2 = RandomString -allChars $false -len 6 + $vNetSubnetName3 = RandomString -allChars $false -len 6 + $vNetName = RandomString -allChars $false -len 6 + $keyVaultName = "azps" + (RandomString -allChars $false -len 6) + + $env.Add("networkSecurityRuleName", $networkSecurityRuleName) + $env.Add("networkSecurityGroupName", $networkSecurityGroupName) + $env.Add("vNetSubnetName1", $vNetSubnetName1) + $env.Add("vNetSubnetName2", $vNetSubnetName2) + $env.Add("vNetSubnetName3", $vNetSubnetName3) + $env.Add("vNetName", $vNetName) + $env.Add("keyVaultName", $keyVaultName) + + write-host "start to create test group" + $env.Add("location", "eastus") + $resourceGroup = "auto-test-databricks-" + (RandomString -allChars $false -len 2) + $env.Add("resourceGroup", $resourceGroup) + New-AzResourceGroup -Name $env.resourceGroup -Location $env.location + + $dlg = New-AzDelegation -Name dbrdl -ServiceName "Microsoft.Databricks/workspaces" + + write-host "start to create NetworkSecurity env" + $rdpRule = New-AzNetworkSecurityRuleConfig -Name $env.networkSecurityRuleName -Description "Allow RDP" -Access Allow -Protocol Tcp -Direction Inbound -Priority 100 -SourceAddressPrefix Internet -SourcePortRange * -DestinationAddressPrefix * -DestinationPortRange 3389 + $networkSecurityGroup = New-AzNetworkSecurityGroup -ResourceGroupName $env.resourceGroup -Location $env.location -Name $env.networkSecurityGroupName -SecurityRules $rdpRule + $kvSubnet = New-AzVirtualNetworkSubnetConfig -Name $env.vNetSubnetName1 -AddressPrefix "110.0.1.0/24" -ServiceEndpoint "Microsoft.KeyVault" + $priSubnet = New-AzVirtualNetworkSubnetConfig -Name $env.vNetSubnetName2 -AddressPrefix "110.0.2.0/24" -NetworkSecurityGroup $networkSecurityGroup -Delegation $dlg + $pubSubnet = New-AzVirtualNetworkSubnetConfig -Name $env.vNetSubnetName3 -AddressPrefix "110.0.3.0/24" -NetworkSecurityGroup $networkSecurityGroup -Delegation $dlg + + write-host "start to create VirtualNetwork env" + $testVN = New-AzVirtualNetwork -Name $env.vNetName -ResourceGroupName $env.resourceGroup -Location $env.location -AddressPrefix "110.0.0.0/16" -Subnet $kvSubnet,$priSubnet,$pubSubnet + $vNetResId = (Get-AzVirtualNetwork -Name $env.vNetName -ResourceGroupName $env.resourceGroup).Subnets[0].Id + $ruleSet = New-AzKeyVaultNetworkRuleSetObject -DefaultAction Allow -Bypass AzureServices -IpAddressRange "110.0.1.0/24" -VirtualNetworkResourceId $vNetResId + + write-host "start to create KeyVault env" + New-AzKeyVault -ResourceGroupName $env.resourceGroup -VaultName $env.keyVaultName -NetworkRuleSet $ruleSet -Location $env.location -Sku 'Premium' -EnablePurgeProtection + + write-host "start to create Databricks(have vNet) env" + New-AzDatabricksWorkspace -Name $env.workSpaceName1 -ResourceGroupName $env.resourceGroup -Location $env.location -VirtualNetworkId $testVN.Id -PrivateSubnetName $priSubnet.Name -PublicSubnetName $pubSubnet.Name -Sku Premium + + write-host "start to create Databricks env" + New-AzDatabricksWorkspace -Name $env.workSpaceName3 -ResourceGroupName $env.resourceGroup -Location $env.location -Sku premium + + # For any resources you created for test, you should add it to $env here. + $envFile = 'env.json' + if ($TestMode -eq 'live') { + $envFile = 'localEnv.json' + } + set-content -Path (Join-Path $PSScriptRoot $envFile) -Value (ConvertTo-Json $env) +} +function cleanupEnv() { + # Clean resources you create for testing + # Remove-AzResourceGroup -Name $env.resourceGroup +} diff --git a/tools/Mcp/src/services/resourcesService.ts b/tools/Mcp/src/services/resourcesService.ts index e69de29bb2d1..89c8e433e08c 100644 --- a/tools/Mcp/src/services/resourcesService.ts +++ b/tools/Mcp/src/services/resourcesService.ts @@ -0,0 +1,60 @@ +import { z, ZodRawShape } from "zod"; +import { resourceSchema } from "../types.js"; +import { CodegenServer } from "../CodegenServer.js"; + +export class ResourcesService { + private static _instance: ResourcesService; + private _server: CodegenServer | null = null; + private constructor() {} + + static getInstance(): ResourcesService { + if (!ResourcesService._instance) { + ResourcesService._instance = new ResourcesService(); + } + return ResourcesService._instance; + } + + setServer(server: CodegenServer): ResourcesService { + this._server = server; + return this; + } + + getResources(name: string, responseTemplate: string | undefined) { + let func; + switch (name) { + case "autorestReadmeTemplate": + func = this.autorestReadmeTemplate; + break; + default: + throw new Error(`Resource ${name} not found`); + } + return this.constructCallback(func, responseTemplate); + } + + constructCallback(fn: (arr: Args) => Promise, responseTemplate: string | undefined) { + return async (args: Args) => { + const content = await fn(args); + return { + contents: [ + { + uri: `resource://template`, + mimeType: "text/plain", + text: content + } + ] + }; + }; + } + + createResourceParametersFromSchema(schemas: any[]) { + // Resources typically don't have parameters in MCP, but keeping for consistency + const parameter: { [k: string]: any } = {}; + return parameter; + } + + autorestReadmeTemplate = async (args: Args): Promise => { + const template = this._server?.getResponseTemplate('autorest-readme-template'); + return template || "Template Not Found!"; + }; + +} \ No newline at end of file diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index d7d2b13b9dfa..3701c7777d2c 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -48,17 +48,8 @@ export class ToolsService { case "createTestsFromSpecs": func = this.createTestsFromSpecs; break; - case "listSpecModules": - func = this.toolListSpecModules; - break; - case "listProvidersForService": - func = this.toolListProvidersForService; - break; - case "listApiVersions": - func = this.toolListApiVersions; - break; - case "resolveAutorestInputs": - func = this.toolResolveAutorestInputs; + case "setupModuleStructure": + func = this.setupModuleStructure; break; default: throw new Error(`Tool ${name} not found`); @@ -186,31 +177,167 @@ export class ToolsService { return [exampleSpecsPath, testPath]; } - toolListSpecModules = async (_args: Args): Promise => { - const modules = await listSpecModules(); - return [JSON.stringify(modules)]; - } + setupModuleStructure = async (args: Args): Promise => { + try { + // List available services with dropdown + const modules = await listSpecModules(); + const serviceResponse = await this._server!.elicitInput({ + message: `Select an Azure service from the dropdown below:`, + requestedSchema: { + type: "object", + properties: { + service: { + type: "string", + description: "Select a service from the dropdown", + enum: modules + } + }, + required: ["service"] + } + }); - toolListProvidersForService = async (args: Args): Promise => { - const service = z.string().parse(Object.values(args)[0]); - const providers = await listProvidersForService(service); - return [service, JSON.stringify(providers)]; - } + const selectedService = serviceResponse.content?.service as string; + if (!selectedService) { + throw new Error("No service selected"); + } - toolListApiVersions = async (args: Args): Promise => { - const service = z.string().parse(Object.values(args)[0]); - const provider = z.string().parse(Object.values(args)[1]); - const res = await listApiVersions(service, provider); - return [service, provider, JSON.stringify(res.stable), JSON.stringify(res.preview)]; - } + // List providers for the selected service with dropdown + const providers = await listProvidersForService(selectedService); + if (providers.length === 0) { + throw new Error(`No providers found for service '${selectedService}'`); + } + + const providerResponse = await this._server!.elicitInput({ + message: `Select a provider for ${selectedService} from the dropdown below:`, + requestedSchema: { + type: "object", + properties: { + provider: { + type: "string", + description: "Select a provider from the dropdown", + enum: providers + } + }, + required: ["provider"] + } + }); + + const selectedProvider = providerResponse.content?.provider as string; + if (!selectedProvider) { + throw new Error("No provider selected"); + } + + // List API versions with dropdown combining version and stability + const apiVersions = await listApiVersions(selectedService, selectedProvider); + const allVersions = [ + ...apiVersions.stable.map(v => ({ version: v, stability: 'stable' as const })), + ...apiVersions.preview.map(v => ({ version: v, stability: 'preview' as const })) + ]; + + if (allVersions.length === 0) { + throw new Error(`No API versions found for ${selectedService}/${selectedProvider}`); + } + + const versionOptions = allVersions.map(v => `${v.version} (${v.stability})`); + + const versionResponse = await this._server!.elicitInput({ + message: `Select an API version for ${selectedService}/${selectedProvider} from the dropdown below:`, + requestedSchema: { + type: "object", + properties: { + versionWithStability: { + type: "string", + description: "Select an API version with stability level", + enum: versionOptions + } + }, + required: ["versionWithStability"] + } + }); + + const selectedVersionWithStability = versionResponse.content?.versionWithStability as string; + if (!selectedVersionWithStability) { + throw new Error("Version not selected"); + } + + const versionMatch = selectedVersionWithStability.match(/^(.+) \((stable|preview)\)$/); + if (!versionMatch) { + throw new Error("Invalid version format selected"); + } + + const selectedVersion = versionMatch[1]; + const selectedStability = versionMatch[2] as 'stable' | 'preview'; - toolResolveAutorestInputs = async (args: Args): Promise => { - const service = z.string().parse(Object.values(args)[0]); - const provider = z.string().parse(Object.values(args)[1]); - const stability = z.enum(['stable','preview']).parse(Object.values(args)[2]); - const version = z.string().parse(Object.values(args)[3]); - const swaggerPath = Object.values(args)[4] ? z.string().parse(Object.values(args)[4]) : undefined; - const resolved = await resolveAutorestInputs({ service, provider, stability, version, swaggerPath }); - return [resolved.serviceName, resolved.commitId, resolved.serviceSpecs, resolved.swaggerFileSpecs]; + // Resolve Readme placeholder values based on Responses + const resolved = await resolveAutorestInputs({ + service: selectedService, + provider: selectedProvider, + stability: selectedStability, + version: selectedVersion + }); + + const moduleNameResponse = await this._server!.elicitInput({ + message: `Configuration resolved:\n- Service: ${selectedService}\n- Provider: ${selectedProvider}\n- Version: ${selectedVersion} (${selectedStability})\n- Service Name: ${resolved.serviceName}\n- Commit ID: ${resolved.commitId}\n- Service Specs: ${resolved.serviceSpecs}\n- Swagger File: ${resolved.swaggerFileSpecs}`, + requestedSchema: { + type: "object", + properties: { + moduleName: { + type: "string", + description: "Enter the PowerShell module name (e.g., 'HybridConnectivity')" + } + }, + required: ["moduleName"] + } + }); + + const moduleName = moduleNameResponse.content?.moduleName as string; + if (!moduleName) { + throw new Error("No module name provided"); + } + + // Create folder structure and README.md + const mcpPath = process.cwd(); // Current working directory is tools/Mcp + const azurePowerShellRoot = path.resolve(mcpPath, '..', '..'); // Go up two levels to azure-powershell root + const srcPath = path.join(azurePowerShellRoot, 'src'); + const modulePath = path.join(srcPath, moduleName); + const autorestPath = path.join(modulePath, `${moduleName}.Autorest`); + const readmePath = path.join(autorestPath, 'README.md'); + + await utils.createDirectoryIfNotExists(modulePath); + await utils.createDirectoryIfNotExists(autorestPath); + + let readmeContent = this._server!.getResponseTemplate('autorest-readme-template'); + if (!readmeContent) { + throw new Error('README template not found in server responses'); + } + + // Replace placeholders + readmeContent = readmeContent + .replace('{commitId}', resolved.commitId) + .replace('{serviceSpecs}', resolved.serviceSpecs) + .replace(/\{serviceSpecs\}/g, resolved.serviceSpecs) + .replace('{swaggerFileSpecs}', resolved.swaggerFileSpecs) + .replace(/\{moduleName\}/g, moduleName); + + // Write README.md file + await utils.writeFileIfNotExists(readmePath, readmeContent); + + return [ + selectedService, + selectedProvider, + selectedVersion, + selectedStability, + resolved.serviceName, + resolved.commitId, + resolved.serviceSpecs, + resolved.swaggerFileSpecs, + moduleName, + autorestPath + ]; + + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + return [`Error during setup: ${errorMessage}`]; + } } } \ No newline at end of file diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index 4c14c7133138..4627b9ad0d10 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -364,6 +364,32 @@ export function unflattenJsonObject(keyValuePairs: Array<{ key: string; value: a return result; } +export async function createDirectoryIfNotExists(dirPath: string): Promise { + try { + if (!fs.existsSync(dirPath)) { + fs.mkdirSync(dirPath, { recursive: true }); + console.log(`Created directory: ${dirPath}`); + } + } catch (error) { + console.error(`Error creating directory ${dirPath}:`, error); + throw error; + } +} + +export async function writeFileIfNotExists(filePath: string, content: string): Promise { + try { + if (!fs.existsSync(filePath)) { + fs.writeFileSync(filePath, content, 'utf8'); + console.log(`Created file: ${filePath}`); + } else { + console.log(`File already exists: ${filePath}`); + } + } catch (error) { + console.error(`Error writing file ${filePath}:`, error); + throw error; + } +} + diff --git a/tools/Mcp/src/specs/autorest-readme-template.md b/tools/Mcp/src/specs/autorest-readme-template.md new file mode 100644 index 000000000000..d24e3d1fb91d --- /dev/null +++ b/tools/Mcp/src/specs/autorest-readme-template.md @@ -0,0 +1,63 @@ + +# Az.{moduleName} +This directory contains the PowerShell module for the {moduleName} service. + +--- +## Info +- Modifiable: yes +- Generated: all +- Committed: yes +- Packaged: yes + +--- +## Detail +This module was primarily generated via [AutoRest](https://github.com/Azure/autorest) using the [PowerShell](https://github.com/Azure/autorest.powershell) extension. + +## Module Requirements +- [Az.Accounts module](https://www.powershellgallery.com/packages/Az.Accounts/), version 2.7.5 or greater + +## Authentication +AutoRest does not generate authentication code for the module. Authentication is handled via Az.Accounts by altering the HTTP payload before it is sent. + +## Development +For information on how to develop for `Az.{moduleName}`, see [how-to.md](how-to.md). + + +--- +### AutoRest Configuration +> see https://aka.ms/autorest + +```yaml + +commit: {commitId} + +require: + - $(this-folder)/../../readme.azure.noprofile.md + - $(repo)/specification/{serviceSpecs}/readme.md + +try-require: + - $(repo)/specification/{serviceSpecs}/readme.powershell.md + +input-file: + - $(repo)/{swaggerFileSpecs} + +module-version: 0.1.0 + +title: {moduleName} +service-name: {moduleName} +subject-prefix: $(service-name) + +directive: + + - where: + variant: ^(Create|Update)(?!.*?(Expanded|JsonFilePath|JsonString)) + remove: true + + - where: + variant: ^CreateViaIdentity$|^CreateViaIdentityExpanded$ + remove: true + + - where: + verb: Set + remove: true +``` diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 255a7cfbf7bc..345afb5dc6d4 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -12,33 +12,21 @@ # Instructions -## Stage 1: Interactive spec selection and autorest resolution -- Ask the user for their desired **PowerShell module name** (e.g., "HybridConnectivity") -- Call the MCP tool "list-spec-modules" to fetch all available specification folders from azure-rest-api-specs/specification. -- From the full list, present 10 most relevant spec options to the user based on their PowerShell module name, or show a representative sample if no clear match. -- Ask the user to choose which specification they want to use from the presented options, or ask if they want to see more options. -- **Confirm the spec choice**: Once user selects a spec, ask them to confirm this is the correct specification for their needs (show the spec name clearly). -- Call the MCP tool "list-providers" with the chosen spec folder to retrieve available provider namespaces. -- Present the list of providers to the user: - - If multiple providers are returned, ask the user to pick one - - If only one provider exists, select it automatically but confirm with the user -- **Confirm the provider choice**: Ask the user to confirm this is the correct provider namespace. -- Call the MCP tool "list-api-versions" with the chosen spec folder and provider to get available versions, separated by Stable and Preview. -- Present the API version options to the user and ask them to choose: - 1. **Stability**: stable or preview - 2. **API version**: specific version from the available list -- **Confirm the API version choice**: Ask the user to confirm their stability and version selection. -- Call the MCP tool "resolve-autorest-inputs" with the chosen spec folder, provider, stability, and version to compute the 4 autorest inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. -- Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. +## Stage 1: Interactive specification selection and autorest resolution +- Call the MCP tool "setupModuleStructure" with no parameters +- This tool will interactively guide you through: + 1. Selecting the specification from available azure-rest-api-specs + 2. Choosing the provider namespace + 3. Selecting the API version (stable or preview) + 4. Getting the module name from the user + 5. Automatically creating the module structure and README.md file +- The tool will create the folder structure under the correct src directory and generate the README.md with proper autorest configuration +- Mark Stage 1 complete once the setupModuleStructure tool finishes successfully ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. -- Navigate to the `src` folder in the home "azure-powershell" directory. -- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) -- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. -- Create a new file `README.md`. (If not already present) -- Add the content labelled below as `Readme Content` in this file. -- Use the "generate-autorest" mcp tool to generate the module. +- Navigate to the newly created module directory (should be under `src//.Autorest`) +- Use the "generate-autorest" mcp tool to generate the module using the README.md that was created by setupModuleStructure - Stage 2 Complete. ## Stage 3: Updating Example Files @@ -50,7 +38,7 @@ - Once all example files are updated, mark stage 3 as complete. ## Stage 4: Updating Test Files -- Use the "test-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- Use the "create-test" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. - Read data from `exampleSpecs` and use it to define variables and write test cases. - Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. - Use those variables in the actual test case content. @@ -64,43 +52,3 @@ - Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. - This is a mandatory finalization step before pushing to GitHub. - Do not skip this regeneration even if the module was generated earlier. - -# Readme Content - -### AutoRest Configuration -> see https://aka.ms/autorest - -```yaml - -commit: - -require: - - $(this-folder)/../../readme.azure.noprofile.md - - $(repo)/specification//readme.md - -try-require: - - $(repo)/specification//readme.powershell.md - -input-file: - - $(repo)/specification/ - -module-version: 0.1.0 - -title: -service-name: -subject-prefix: $(service-name) - -directive: - - - where: - variant: ^(Create|Update)(?!.*?(Expanded|JsonFilePath|JsonString)) - remove: true - - - where: - variant: ^CreateViaIdentity$|^CreateViaIdentityExpanded$ - remove: true - - - where: - verb: Set - remove: true -``` diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index f2fd99e18987..ca1be795f75a 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -29,26 +29,6 @@ "type": "tool", "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " }, - { - "name": "list-spec-modules", - "type": "tool", - "text": "Available modules under azure-rest-api-specs/specification: {0}" - }, - { - "name": "list-providers", - "type": "tool", - "text": "Providers for service {0}: {1}" - }, - { - "name": "list-api-versions", - "type": "tool", - "text": "API versions for {0}/{1} — Stable: {2} | Preview: {3}" - }, - { - "name": "resolve-autorest-inputs", - "type": "tool", - "text": "Resolved inputs — serviceName: {0}, commitId: {1}, serviceSpecs: {2}, swaggerFileSpecs: {3}" - }, { "name": "create-greeting", "type": "prompt", @@ -58,5 +38,10 @@ "name": "partner-module-workflow", "type": "prompt", "text": "@file:prompts/partner-module-workflow.md" + }, + { + "name": "autorest-readme-template", + "type": "resource", + "text": "@file:autorest-readme-template.md" } ] \ No newline at end of file diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 6f38aeef45a6..17e2bf74e432 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -73,38 +73,10 @@ "callbackName": "createTestsFromSpecs" }, { - "name": "list-spec-modules", - "description": "List all top-level modules (service folders) under azure-rest-api-specs/specification.", + "name": "setupModuleStructure", + "description": "Setup Azure PowerShell module structure by selecting service, provider, and API version through interactive dropdowns", "parameters": [], - "callbackName": "listSpecModules" - }, - { - "name": "list-providers", - "description": "List provider namespaces for a given service under resource-manager.", - "parameters": [ - { "name": "service", "description": "Service folder name under specification (e.g., hybridconnectivity)", "type": "string" } - ], - "callbackName": "listProvidersForService" - }, - { - "name": "list-api-versions", - "description": "List available API versions for a given service and provider (stable/preview).", - "parameters": [ - { "name": "service", "description": "Service folder name under specification", "type": "string" }, - { "name": "provider", "description": "Provider namespace folder under the service (e.g., Microsoft.HybridConnectivity)", "type": "string" } - ], - "callbackName": "listApiVersions" - }, - { - "name": "resolve-autorest-inputs", - "description": "Resolve the four Autorest inputs (serviceName, commitId, serviceSpecs, swaggerFileSpecs) from service/provider/version.", - "parameters": [ - { "name": "service", "description": "Service folder name under specification", "type": "string" }, - { "name": "provider", "description": "Provider namespace under the service", "type": "string" }, - { "name": "stability", "description": "'stable' or 'preview'", "type": "string" }, - { "name": "version", "description": "API version (e.g., 2024-12-01)", "type": "string" } - ], - "callbackName": "resolveAutorestInputs" + "callbackName": "setupModuleStructure" } ], "prompts": [ diff --git a/tools/Mcp/src/types.ts b/tools/Mcp/src/types.ts index f578b44abc04..559fbcdf7385 100644 --- a/tools/Mcp/src/types.ts +++ b/tools/Mcp/src/types.ts @@ -25,6 +25,13 @@ export interface promptSchema { callbackName: string; } +export interface resourceSchema { + name: string; + description: string; + parameters?: any[]; + callbackName: string; +} + export interface responseSchema { name: string; type: string; From 8a16ffe01c96cd6ae25a21d0cabe78324507718d Mon Sep 17 00:00:00 2001 From: Yash Date: Sun, 21 Sep 2025 02:55:53 +1000 Subject: [PATCH 15/24] Basic Syntax Fixing --- tools/Mcp/src/services/toolsService.ts | 14 ++++---------- tools/Mcp/src/services/utils.ts | 20 -------------------- 2 files changed, 4 insertions(+), 30 deletions(-) diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index 3701c7777d2c..4c21b51ec1be 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -5,12 +5,6 @@ import path from 'path'; import { get, RequestOptions } from 'http'; import { toolParameterSchema } from '../types.js'; import { CodegenServer } from '../CodegenServer.js'; -import { - listSpecModules, - listProvidersForService, - listApiVersions, - resolveAutorestInputs -} from './utils.js'; export class ToolsService { private static _instance: ToolsService; @@ -180,7 +174,7 @@ export class ToolsService { setupModuleStructure = async (args: Args): Promise => { try { // List available services with dropdown - const modules = await listSpecModules(); + const modules = await utils.listSpecModules(); const serviceResponse = await this._server!.elicitInput({ message: `Select an Azure service from the dropdown below:`, requestedSchema: { @@ -202,7 +196,7 @@ export class ToolsService { } // List providers for the selected service with dropdown - const providers = await listProvidersForService(selectedService); + const providers = await utils.listProvidersForService(selectedService); if (providers.length === 0) { throw new Error(`No providers found for service '${selectedService}'`); } @@ -228,7 +222,7 @@ export class ToolsService { } // List API versions with dropdown combining version and stability - const apiVersions = await listApiVersions(selectedService, selectedProvider); + const apiVersions = await utils.listApiVersions(selectedService, selectedProvider); const allVersions = [ ...apiVersions.stable.map(v => ({ version: v, stability: 'stable' as const })), ...apiVersions.preview.map(v => ({ version: v, stability: 'preview' as const })) @@ -269,7 +263,7 @@ export class ToolsService { const selectedStability = versionMatch[2] as 'stable' | 'preview'; // Resolve Readme placeholder values based on Responses - const resolved = await resolveAutorestInputs({ + const resolved = await utils.resolveAutorestInputs({ service: selectedService, provider: selectedProvider, stability: selectedStability, diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index 4627b9ad0d10..e6e148727231 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -82,9 +82,6 @@ export async function getSwaggerContentFromUrl(swaggerUrl: string): Promise } } -/** - * GitHub helper: get latest commit SHA for azure-rest-api-specs main branch - */ export async function getSpecsHeadCommitSha(branch: string = 'main'): Promise { const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/branches/${branch}`; const res = await fetch(url); @@ -95,9 +92,6 @@ export async function getSpecsHeadCommitSha(branch: string = 'main'): Promise { const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/specification`; const res = await fetch(url); @@ -111,9 +105,6 @@ export async function listSpecModules(): Promise { .sort((a: string, b: string) => a.localeCompare(b)); } -/** - * Given a service (spec folder), list provider namespaces under resource-manager. - */ export async function listProvidersForService(service: string): Promise { const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/specification/${service}/resource-manager`; const res = await fetch(url); @@ -128,10 +119,6 @@ export async function listProvidersForService(service: string): Promise a.localeCompare(b)); } -/** - * For service + provider, list API version directories under stable/ and preview/. - * Returns map: { stable: string[], preview: string[] } - */ export async function listApiVersions(service: string, provider: string): Promise<{ stable: string[]; preview: string[] }> { const base = `specification/${service}/resource-manager/${provider}`; const folders = ['stable', 'preview'] as const; @@ -153,10 +140,6 @@ export async function listApiVersions(service: string, provider: string): Promis return result; } -/** - * For a given service/provider/version, find likely swagger files (.json) under that version path. - * Returns array of repo-relative file paths (starting with specification/...). - */ export async function listSwaggerFiles(service: string, provider: string, stability: 'stable'|'preview', version: string): Promise { const dir = `specification/${service}/resource-manager/${provider}/${stability}/${version}`; const url = `${GITHUB_API_BASE}/repos/${REST_API_SPECS_OWNER}/${REST_API_SPECS_REPO}/contents/${dir}`; @@ -173,9 +156,6 @@ export async function listSwaggerFiles(service: string, provider: string, stabil return ordered; } -/** - * Resolve the four Autorest inputs given service, provider, and version path. - */ export async function resolveAutorestInputs(params: { service: string; provider: string; From 6c796b428a1cd01c0161528abba69d678c169ff6 Mon Sep 17 00:00:00 2001 From: Yash Date: Sun, 21 Sep 2025 03:12:27 +1000 Subject: [PATCH 16/24] Updated return Spec --- tools/Mcp/src/services/toolsService.ts | 13 +------------ .../src/specs/prompts/partner-module-workflow.md | 2 +- tools/Mcp/src/specs/responses.json | 5 +++++ tools/Mcp/src/specs/specs.json | 2 +- 4 files changed, 8 insertions(+), 14 deletions(-) diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index 4c21b51ec1be..f84c43fc64a0 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -316,18 +316,7 @@ export class ToolsService { // Write README.md file await utils.writeFileIfNotExists(readmePath, readmeContent); - return [ - selectedService, - selectedProvider, - selectedVersion, - selectedStability, - resolved.serviceName, - resolved.commitId, - resolved.serviceSpecs, - resolved.swaggerFileSpecs, - moduleName, - autorestPath - ]; + return [moduleName]; } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error); diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 345afb5dc6d4..5ed2ae4e4e23 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -25,7 +25,7 @@ ## Stage 2: Generating partner powershell module - FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. -- Navigate to the newly created module directory (should be under `src//.Autorest`) +- Navigate to the created module directory, you can find this from the input given by the user using ellicitation for the input "moduleName" (should be under `src//.Autorest`) - Use the "generate-autorest" mcp tool to generate the module using the README.md that was created by setupModuleStructure - Stage 2 Complete. diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index ca1be795f75a..143ec5639adf 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -29,6 +29,11 @@ "type": "tool", "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " }, + { + "name": "setup-module-structure", + "type": "tool", + "text": "Created the module structure under the folder: {0}." + }, { "name": "create-greeting", "type": "prompt", diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 17e2bf74e432..44a8b484a32e 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -73,7 +73,7 @@ "callbackName": "createTestsFromSpecs" }, { - "name": "setupModuleStructure", + "name": "setup-module-structure", "description": "Setup Azure PowerShell module structure by selecting service, provider, and API version through interactive dropdowns", "parameters": [], "callbackName": "setupModuleStructure" From acebef04bde3d413b58d9da3c19f15328bc0a769 Mon Sep 17 00:00:00 2001 From: Yash Date: Sun, 21 Sep 2025 04:37:43 +1000 Subject: [PATCH 17/24] Enhanced Quality Generation --- tools/CreateMappings_rules.json | 40 ++++++ tools/Mcp/src/services/toolsService.ts | 8 +- tools/Mcp/src/services/utils.ts | 45 +++++++ .../specs/prompts/partner-module-workflow.md | 123 ++++++++++-------- tools/Mcp/src/specs/prompts/workflow-old.md | 54 ++++++++ tools/Mcp/src/specs/responses.json | 4 +- 6 files changed, 215 insertions(+), 59 deletions(-) create mode 100644 tools/Mcp/src/specs/prompts/workflow-old.md diff --git a/tools/CreateMappings_rules.json b/tools/CreateMappings_rules.json index 92e2a18850ed..504db9cd57f7 100644 --- a/tools/CreateMappings_rules.json +++ b/tools/CreateMappings_rules.json @@ -979,5 +979,45 @@ { "module": "DependencyMap", "alias": "DependencyMap" + }, + { + "alias": "YashMaps", + "module": "YashMaps" + }, + { + "alias": "YashMaps2", + "module": "YashMaps2" + }, + { + "alias": "TestArizeAI", + "module": "TestArizeAI" + }, + { + "alias": "TestData", + "module": "TestData" + }, + { + "alias": "YashMaps3", + "module": "YashMaps3" + }, + { + "alias": "YashMaps4", + "module": "YashMaps4" + }, + { + "alias": "YashMaps5", + "module": "YashMaps5" + }, + { + "alias": "YashMaps6", + "module": "YashMaps6" + }, + { + "alias": "TestPinecone", + "module": "TestPinecone" + }, + { + "module": "YashMaps7", + "alias": "YashMaps7" } ] diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index f84c43fc64a0..a702020ceea0 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -161,14 +161,16 @@ export class ToolsService { console.error(`Error eliciting input for example ${name}:`, error); } } - return [exampleSpecsPath, examplePath]; + const idealExamplePaths = utils.getIdealModuleExamplePaths(); + return [exampleSpecsPath, examplePath, idealExamplePaths]; } createTestsFromSpecs = async (args: Args): Promise => { const workingDirectory = z.string().parse(Object.values(args)[0]); const testPath = path.join(workingDirectory, "test"); const exampleSpecsPath = await utils.getExamplesFromSpecs(workingDirectory); - return [exampleSpecsPath, testPath]; + const idealTestPaths = utils.getIdealModuleTestPaths(); + return [exampleSpecsPath, testPath, idealTestPaths]; } setupModuleStructure = async (args: Args): Promise => { @@ -271,7 +273,7 @@ export class ToolsService { }); const moduleNameResponse = await this._server!.elicitInput({ - message: `Configuration resolved:\n- Service: ${selectedService}\n- Provider: ${selectedProvider}\n- Version: ${selectedVersion} (${selectedStability})\n- Service Name: ${resolved.serviceName}\n- Commit ID: ${resolved.commitId}\n- Service Specs: ${resolved.serviceSpecs}\n- Swagger File: ${resolved.swaggerFileSpecs}`, + message: `What would you like call the powershell module? \n\n Configuration resolved:\n- Service: ${selectedService}\n- Provider: ${selectedProvider}\n- Version: ${selectedVersion} (${selectedStability})\n- Service Name: ${resolved.serviceName}\n- Commit ID: ${resolved.commitId}\n- Service Specs: ${resolved.serviceSpecs}\n- Swagger File: ${resolved.swaggerFileSpecs}`, requestedSchema: { type: "object", properties: { diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index e6e148727231..2af6ddab731f 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -3,6 +3,7 @@ import yaml from "js-yaml"; import { yamlContent } from '../types.js'; import { execSync } from 'child_process'; import path from 'path'; +import { Dirent } from 'fs'; const GITHUB_API_BASE = 'https://api.github.com'; const REST_API_SPECS_OWNER = 'Azure'; @@ -370,6 +371,50 @@ export async function writeFileIfNotExists(filePath: string, content: string): P } } +export function getIdealModuleExamplePaths(): string { + const idealModulesRoot = path.join(process.cwd(), 'src', 'ideal-modules'); + try { + if (!fs.existsSync(idealModulesRoot)) { + return ''; + } + const modules: Dirent[] = fs.readdirSync(idealModulesRoot, { withFileTypes: true }); + const exampleDirs: string[] = []; + for (const mod of modules) { + if (!mod.isDirectory()) continue; + const candidate = path.join(idealModulesRoot, mod.name, 'examples'); + if (fs.existsSync(candidate)) { + exampleDirs.push(candidate); + } + } + return exampleDirs.join(';'); + } catch (err) { + console.error('Error collecting ideal module example paths:', err); + return ''; + } +} + +export function getIdealModuleTestPaths(): string { + const idealModulesRoot = path.join(process.cwd(), 'src', 'ideal-modules'); + try { + if (!fs.existsSync(idealModulesRoot)) { + return ''; + } + const modules: Dirent[] = fs.readdirSync(idealModulesRoot, { withFileTypes: true }); + const testDirs: string[] = []; + for (const mod of modules) { + if (!mod.isDirectory()) continue; + const candidate = path.join(idealModulesRoot, mod.name, 'tests'); + if (fs.existsSync(candidate)) { + testDirs.push(candidate); + } + } + return testDirs.join(';'); + } catch (err) { + console.error('Error collecting ideal module test paths:', err); + return ''; + } +} + diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 5ed2ae4e4e23..6b164540d75d 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -1,54 +1,69 @@ -# Execution rules -- Do not ask before running the command, just go ahead and run. - -# Role and Objective -- You are an autonomous agent that generates Azure PowerShell modules using Autorest for partners. -- Execute commands confidently without asking for confirmation. -- Follow all steps carefully and halt if any MCP tool is missing or fails. -- Think independently using your full context and reset if needed. -- No need to provide status updates at each step. -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. DO NOT MISS ANY STEPS. -- If an mcp tool is not found or the tool fails then please halt execution. - -# Instructions - -## Stage 1: Interactive specification selection and autorest resolution -- Call the MCP tool "setupModuleStructure" with no parameters -- This tool will interactively guide you through: - 1. Selecting the specification from available azure-rest-api-specs - 2. Choosing the provider namespace - 3. Selecting the API version (stable or preview) - 4. Getting the module name from the user - 5. Automatically creating the module structure and README.md file -- The tool will create the folder structure under the correct src directory and generate the README.md with proper autorest configuration -- Mark Stage 1 complete once the setupModuleStructure tool finishes successfully - -## Stage 2: Generating partner powershell module -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. -- Navigate to the created module directory, you can find this from the input given by the user using ellicitation for the input "moduleName" (should be under `src//.Autorest`) -- Use the "generate-autorest" mcp tool to generate the module using the README.md that was created by setupModuleStructure -- Stage 2 Complete. - -## Stage 3: Updating Example Files -- Use the "create-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- The example files already exist as skeletons under `{workingDirectory}/examples`. -- Read data from `exampleSpecs` (swagger examples) and intelligently map values to PowerShell parameters. -- Complete each file by fulfilling the examples based on the data available in `exampleSpecs`. -- Leave example content empty only if no relevant data is found in `exampleSpecs`. -- Once all example files are updated, mark stage 3 as complete. - -## Stage 4: Updating Test Files -- Use the "create-test" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- Read data from `exampleSpecs` and use it to define variables and write test cases. -- Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. -- Use those variables in the actual test case content. -- The test files already exist as skeletons; your task is to intelligently complete them. -- Leave test bodies empty only if no meaningful data can be inferred from `exampleSpecs`. -- Once all test files are updated, mark stage 4 as complete. - -## Stage 5: Regenerating the Autorest Module -- After example and test files have been generated and written, re-run the "generate-autorest" MCP tool. -- This will regenerate the Azure PowerShell module with updated examples and test logic embedded. -- Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. -- This is a mandatory finalization step before pushing to GitHub. -- Do not skip this regeneration even if the module was generated earlier. +# Partner Module Workflow (Simplified) + +Goal: Generate an Azure PowerShell module via Autorest, then populate examples and tests deterministically with minimal ambiguity. + +Core Principle: Derive the working directory once and reuse it. Do not guess or fabricate paths. + +## 1. Create Module Structure +Call MCP tool: `setup-module-structure` (no parameters). +It returns `{0}` = `ModuleName` (from user input inside the tool). +Derive `workingDirectory` strictly as: +`/src/{ModuleName}/{ModuleName}.Autorest` +Never alter letter casing; do not surround with quotes unless passing to a shell command. + +## 2. Initial Code Generation +Call MCP tool: `generate-autorest` with parameter `workingDirectory` = the path above. +Do not call Autorest directly; always use the MCP tool. +If generation fails, STOP. + +## 3. Populate Examples +Call MCP tool: `create-example` with the same `workingDirectory`. +Tool returns: + `{0}` = specs example source dir (downloaded JSON) + `{1}` = target examples dir in module + `{2}` = reference ideal examples (semicolon separated; may be empty) +Use reference examples (if any) for style: parameter ordering (required first), consistent variable placeholder patterns, and advanced options inclusion. +Leave an example file empty only if no relevant swagger example data exists. +Quality guidance (concise): + - Provide both minimal (required params only) and enriched (common optional params) variants if the skeleton expects multiple examples. + - Use stable placeholder naming: ``, ``, `` etc., unless a deterministic value is required by an existing test. + - Do not invent unsupported parameters; omit deprecated or preview-only params unless the swagger version explicitly requires them. + - Maintain consistent casing and ordering: required first, then alphabetized optional. + - Prefer realistic value shapes (GUID formats, URIs) over lorem ipsum. +MANDATORY: After the tool call, TREAT the returned response text itself (instructions referencing {0} {1} {2}) AS AN ACTION PLAN. Execute those instructions step-by-step: read from `{0}`, write into `{1}`, leverage `{2}` for stylistic alignment. Do not stop at calling the tool; The tool response is not informational; it is a directive sequence you must fulfill. + +## 4. Populate Tests +Call MCP tool: `create-test` with the same `workingDirectory`. +Tool returns: + `{0}` = specs example source dir + `{1}` = target test dir + `{2}` = reference ideal test dirs +Fill test stubs using variables defined in `utils.ps1` `setupEnv` function. +IMPORTANT: You can completely ignore the generated Test files as they only have the skeleton. Instead, you can group these test files into groups of 1 particular resource & then write 1 new file combining these 3-4 test file in the CRUD format. +Add: create -> get -> assert -> (optional cleanup). Add a negative test when feasible. +Leave a test empty only if genuinely no meaningful data can be inferred. +Quality guidance (concise): + - CRUD Pattern: Create (or Update) => Get => Assert essential fields => (List => Assert presence) => (Delete => Confirm absence) when resource lifecycle is supported. + - Assertions: Prefer specific property checks (Name, Id segment patterns, ProvisioningState) over broad object dumps. + - Negative Case: Intentionally supply one invalid parameter or missing required field; assert error category/message pattern. + - Reuse Variables: All names & IDs defined once in `setupEnv` to avoid duplication and enable future parameterization. + - Idempotency: Avoid destructive operations unless they clean up what this test created. +MANDATORY: After the tool call, EXECUTE the returned test instructions: interpret response text referencing `{0}`, `{1}`, `{2}`, open each stub under `{1}`, populate bodies using data from `{0}`, and mirror style from `{2}`. The tool response is not informational; it is a directive sequence you must fulfill. + +## 5. Regenerate Module +Call `generate-autorest` again with identical `workingDirectory` to ensure examples/tests are integrated. Do not modify the README.yaml block except via directives inserted earlier. + +## 6. Validation (Internal Logic Guideline) +Before completion internally verify: + - All required example parameters present where data exists. + - No unknown parameters introduced. + - Tests assert at least one key property per created resource. +If any check fails, refine the affected file(s) then proceed. + +## Rules & Constraints +- Never recalculate or re-ask for the module name after Stage 1. +- Never invent alternative directory paths. +- Do not skip steps 1–5. +- Halt immediately if an MCP tool is unavailable or errors. + +End of workflow. \ No newline at end of file diff --git a/tools/Mcp/src/specs/prompts/workflow-old.md b/tools/Mcp/src/specs/prompts/workflow-old.md new file mode 100644 index 000000000000..5ed2ae4e4e23 --- /dev/null +++ b/tools/Mcp/src/specs/prompts/workflow-old.md @@ -0,0 +1,54 @@ +# Execution rules +- Do not ask before running the command, just go ahead and run. + +# Role and Objective +- You are an autonomous agent that generates Azure PowerShell modules using Autorest for partners. +- Execute commands confidently without asking for confirmation. +- Follow all steps carefully and halt if any MCP tool is missing or fails. +- Think independently using your full context and reset if needed. +- No need to provide status updates at each step. +- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. DO NOT MISS ANY STEPS. +- If an mcp tool is not found or the tool fails then please halt execution. + +# Instructions + +## Stage 1: Interactive specification selection and autorest resolution +- Call the MCP tool "setupModuleStructure" with no parameters +- This tool will interactively guide you through: + 1. Selecting the specification from available azure-rest-api-specs + 2. Choosing the provider namespace + 3. Selecting the API version (stable or preview) + 4. Getting the module name from the user + 5. Automatically creating the module structure and README.md file +- The tool will create the folder structure under the correct src directory and generate the README.md with proper autorest configuration +- Mark Stage 1 complete once the setupModuleStructure tool finishes successfully + +## Stage 2: Generating partner powershell module +- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. +- Navigate to the created module directory, you can find this from the input given by the user using ellicitation for the input "moduleName" (should be under `src//.Autorest`) +- Use the "generate-autorest" mcp tool to generate the module using the README.md that was created by setupModuleStructure +- Stage 2 Complete. + +## Stage 3: Updating Example Files +- Use the "create-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- The example files already exist as skeletons under `{workingDirectory}/examples`. +- Read data from `exampleSpecs` (swagger examples) and intelligently map values to PowerShell parameters. +- Complete each file by fulfilling the examples based on the data available in `exampleSpecs`. +- Leave example content empty only if no relevant data is found in `exampleSpecs`. +- Once all example files are updated, mark stage 3 as complete. + +## Stage 4: Updating Test Files +- Use the "create-test" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. +- Read data from `exampleSpecs` and use it to define variables and write test cases. +- Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. +- Use those variables in the actual test case content. +- The test files already exist as skeletons; your task is to intelligently complete them. +- Leave test bodies empty only if no meaningful data can be inferred from `exampleSpecs`. +- Once all test files are updated, mark stage 4 as complete. + +## Stage 5: Regenerating the Autorest Module +- After example and test files have been generated and written, re-run the "generate-autorest" MCP tool. +- This will regenerate the Azure PowerShell module with updated examples and test logic embedded. +- Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. +- This is a mandatory finalization step before pushing to GitHub. +- Do not skip this regeneration even if the module was generated earlier. diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index 143ec5639adf..9373e3b0e394 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -22,12 +22,12 @@ { "name": "create-example", "type": "tool", - "text": "Read examples from specs under {0}. Fulfill examples under {1}. You are expert in Azure-PowerShell and Autorest.PowerShell. Leave example as empty if you don't find any matches. You know how to map data from {0} to {1}" + "text": "Read examples from specs under {0}. Fulfill examples under {1}. Also leverage high-quality reference examples located in directories: {2} (semicolon-separated). When generating, mirror parameter naming, structure, and advanced option usage patterns seen in those reference examples when applicable. Produce minimal yet complete runnable examples; omit unknown or deprecated params. Leave example empty only if no relevant mapping exists." }, { "name": "create-test", "type": "tool", - "text": "Read examples from specs are under {0}. Implement empty test stubs under {1}. Test stubs are named as '.Test.ps1'. Define variables in function 'setupEnv' in 'utils.ps1' under {1}, and use these variables for test cases. Value of these variables are from {0}. Leave test cases as empty if you don't find any matches. You are expert in Azure-PowerShell and Autorest.PowerShell, You know how to map data from {0} to {1}. " + "text": "Read examples from specs under {0}. Implement test stubs under {1}. Reference high-quality existing tests from directories: {2} (semicolon-separated) to replicate assertion style, variable patterns, and setup/teardown conventions. Test stubs are named '.Test.ps1'. Populate 'setupEnv' in 'utils.ps1' with variables derived from {0} examples; reuse them across tests. For each CRUD operation: (1) Create/Update then Get and assert key properties, (2) List and validate presence, (3) Clean up if destructive. Add one negative test if feasible (invalid parameter) asserting specific error type/message. Leave a stub empty only if absolutely no relevant example data exists." }, { "name": "setup-module-structure", From 22a3a265877c00a55514439842d6f58bd6f6b785 Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 22 Sep 2025 21:16:45 +1000 Subject: [PATCH 18/24] Updated Workflow --- tools/Mcp/src/specs/prompts/partner-module-workflow.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 6b164540d75d..881377f5da7f 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -39,7 +39,7 @@ Tool returns: `{1}` = target test dir `{2}` = reference ideal test dirs Fill test stubs using variables defined in `utils.ps1` `setupEnv` function. -IMPORTANT: You can completely ignore the generated Test files as they only have the skeleton. Instead, you can group these test files into groups of 1 particular resource & then write 1 new file combining these 3-4 test file in the CRUD format. +IMPORTANT: You can completely ignore the generated Test files, do not fill these as they only have the skeleton. Instead, you can group these test files into groups of a particular resource & then write new file for each group combining these 3-4 test file in the CRUD format. Add: create -> get -> assert -> (optional cleanup). Add a negative test when feasible. Leave a test empty only if genuinely no meaningful data can be inferred. Quality guidance (concise): From 1d15166321cf0910f3ce16c49d745184fbde3630 Mon Sep 17 00:00:00 2001 From: Yash Date: Wed, 24 Sep 2025 13:50:04 +1000 Subject: [PATCH 19/24] Deleted mcpprompt.md --- tools/Mcp/test/vscode/mcpprompt.md | 106 ----------------------------- 1 file changed, 106 deletions(-) delete mode 100644 tools/Mcp/test/vscode/mcpprompt.md diff --git a/tools/Mcp/test/vscode/mcpprompt.md b/tools/Mcp/test/vscode/mcpprompt.md deleted file mode 100644 index 255a7cfbf7bc..000000000000 --- a/tools/Mcp/test/vscode/mcpprompt.md +++ /dev/null @@ -1,106 +0,0 @@ -# Execution rules -- Do not ask before running the command, just go ahead and run. - -# Role and Objective -- You are an autonomous agent that generates Azure PowerShell modules using Autorest for partners. -- Execute commands confidently without asking for confirmation. -- Follow all steps carefully and halt if any MCP tool is missing or fails. -- Think independently using your full context and reset if needed. -- No need to provide status updates at each step. -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. DO NOT MISS ANY STEPS. -- If an mcp tool is not found or the tool fails then please halt execution. - -# Instructions - -## Stage 1: Interactive spec selection and autorest resolution -- Ask the user for their desired **PowerShell module name** (e.g., "HybridConnectivity") -- Call the MCP tool "list-spec-modules" to fetch all available specification folders from azure-rest-api-specs/specification. -- From the full list, present 10 most relevant spec options to the user based on their PowerShell module name, or show a representative sample if no clear match. -- Ask the user to choose which specification they want to use from the presented options, or ask if they want to see more options. -- **Confirm the spec choice**: Once user selects a spec, ask them to confirm this is the correct specification for their needs (show the spec name clearly). -- Call the MCP tool "list-providers" with the chosen spec folder to retrieve available provider namespaces. -- Present the list of providers to the user: - - If multiple providers are returned, ask the user to pick one - - If only one provider exists, select it automatically but confirm with the user -- **Confirm the provider choice**: Ask the user to confirm this is the correct provider namespace. -- Call the MCP tool "list-api-versions" with the chosen spec folder and provider to get available versions, separated by Stable and Preview. -- Present the API version options to the user and ask them to choose: - 1. **Stability**: stable or preview - 2. **API version**: specific version from the available list -- **Confirm the API version choice**: Ask the user to confirm their stability and version selection. -- Call the MCP tool "resolve-autorest-inputs" with the chosen spec folder, provider, stability, and version to compute the 4 autorest inputs: serviceName, commitId, serviceSpecs, swaggerFileSpecs. -- Store the resolved values for later steps (README generation and Autorest). Mark Stage 1 complete. - -## Stage 2: Generating partner powershell module -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. -- Navigate to the `src` folder in the home "azure-powershell" directory. -- Create a new folder named and within it a new folder named `.Autorest`. (If not already present) -- Move into the new folder `/.Autorest`, using the command `cd /.Autorest`. -- Create a new file `README.md`. (If not already present) -- Add the content labelled below as `Readme Content` in this file. -- Use the "generate-autorest" mcp tool to generate the module. -- Stage 2 Complete. - -## Stage 3: Updating Example Files -- Use the "create-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- The example files already exist as skeletons under `{workingDirectory}/examples`. -- Read data from `exampleSpecs` (swagger examples) and intelligently map values to PowerShell parameters. -- Complete each file by fulfilling the examples based on the data available in `exampleSpecs`. -- Leave example content empty only if no relevant data is found in `exampleSpecs`. -- Once all example files are updated, mark stage 3 as complete. - -## Stage 4: Updating Test Files -- Use the "test-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- Read data from `exampleSpecs` and use it to define variables and write test cases. -- Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. -- Use those variables in the actual test case content. -- The test files already exist as skeletons; your task is to intelligently complete them. -- Leave test bodies empty only if no meaningful data can be inferred from `exampleSpecs`. -- Once all test files are updated, mark stage 4 as complete. - -## Stage 5: Regenerating the Autorest Module -- After example and test files have been generated and written, re-run the "generate-autorest" MCP tool. -- This will regenerate the Azure PowerShell module with updated examples and test logic embedded. -- Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. -- This is a mandatory finalization step before pushing to GitHub. -- Do not skip this regeneration even if the module was generated earlier. - -# Readme Content - -### AutoRest Configuration -> see https://aka.ms/autorest - -```yaml - -commit: - -require: - - $(this-folder)/../../readme.azure.noprofile.md - - $(repo)/specification//readme.md - -try-require: - - $(repo)/specification//readme.powershell.md - -input-file: - - $(repo)/specification/ - -module-version: 0.1.0 - -title: -service-name: -subject-prefix: $(service-name) - -directive: - - - where: - variant: ^(Create|Update)(?!.*?(Expanded|JsonFilePath|JsonString)) - remove: true - - - where: - variant: ^CreateViaIdentity$|^CreateViaIdentityExpanded$ - remove: true - - - where: - verb: Set - remove: true -``` From 45b8a1c1b20dc3dfe48b3536fdc8534c37714204 Mon Sep 17 00:00:00 2001 From: Yash Date: Tue, 30 Sep 2025 17:38:40 +1000 Subject: [PATCH 20/24] Removed Example Ellicitation, Improved Examples (Real Params) --- tools/CreateMappings_rules.json | 40 ------------- tools/Mcp/src/services/toolsService.ts | 24 +------- .../specs/prompts/partner-module-workflow.md | 56 +++++++++++-------- tools/Mcp/src/specs/responses.json | 4 +- tools/Mcp/src/specs/specs.json | 9 --- 5 files changed, 37 insertions(+), 96 deletions(-) diff --git a/tools/CreateMappings_rules.json b/tools/CreateMappings_rules.json index 504db9cd57f7..92e2a18850ed 100644 --- a/tools/CreateMappings_rules.json +++ b/tools/CreateMappings_rules.json @@ -979,45 +979,5 @@ { "module": "DependencyMap", "alias": "DependencyMap" - }, - { - "alias": "YashMaps", - "module": "YashMaps" - }, - { - "alias": "YashMaps2", - "module": "YashMaps2" - }, - { - "alias": "TestArizeAI", - "module": "TestArizeAI" - }, - { - "alias": "TestData", - "module": "TestData" - }, - { - "alias": "YashMaps3", - "module": "YashMaps3" - }, - { - "alias": "YashMaps4", - "module": "YashMaps4" - }, - { - "alias": "YashMaps5", - "module": "YashMaps5" - }, - { - "alias": "YashMaps6", - "module": "YashMaps6" - }, - { - "alias": "TestPinecone", - "module": "TestPinecone" - }, - { - "module": "YashMaps7", - "alias": "YashMaps7" } ] diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index a702020ceea0..4acfe80e249b 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -138,29 +138,7 @@ export class ToolsService { const workingDirectory = z.string().parse(Object.values(args)[0]); const examplePath = path.join(workingDirectory, "examples"); const exampleSpecsPath = await utils.getExamplesFromSpecs(workingDirectory); - const exampleSpecs = await utils.getExampleJsonContent(exampleSpecsPath); - for (const {name, content} of exampleSpecs) { - const example = await utils.flattenJsonObject(content['parameters']); - try { - const response = await this._server!.elicitInput({ - "message": `Please review example data for ${name}: ${example.map(({key: k, value:v}) => ` \n${k}: ${v}`)}`, - "requestedSchema": { - "type": "object", - "properties": { - "skipAll": { - "type": "boolean", - "description": "If true, skip the review of all examples and proceed to the next step." - } - }, - } - }); - if (response.content && response.content['skipAll'] === true) { - break; - } - } catch (error) { - console.error(`Error eliciting input for example ${name}:`, error); - } - } + // Interactive elicitation removed previously; also parameter export removed (simplified workflow). const idealExamplePaths = utils.getIdealModuleExamplePaths(); return [exampleSpecsPath, examplePath, idealExamplePaths]; } diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index 881377f5da7f..b73b79a2741c 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -16,39 +16,51 @@ Call MCP tool: `generate-autorest` with parameter `workingDirectory` = the path Do not call Autorest directly; always use the MCP tool. If generation fails, STOP. -## 3. Populate Examples +## 3. Populate Examples (Help-Driven Parameters) Call MCP tool: `create-example` with the same `workingDirectory`. Tool returns: `{0}` = specs example source dir (downloaded JSON) `{1}` = target examples dir in module `{2}` = reference ideal examples (semicolon separated; may be empty) -Use reference examples (if any) for style: parameter ordering (required first), consistent variable placeholder patterns, and advanced options inclusion. -Leave an example file empty only if no relevant swagger example data exists. -Quality guidance (concise): - - Provide both minimal (required params only) and enriched (common optional params) variants if the skeleton expects multiple examples. - - Use stable placeholder naming: ``, ``, `` etc., unless a deterministic value is required by an existing test. - - Do not invent unsupported parameters; omit deprecated or preview-only params unless the swagger version explicitly requires them. - - Maintain consistent casing and ordering: required first, then alphabetized optional. - - Prefer realistic value shapes (GUID formats, URIs) over lorem ipsum. -MANDATORY: After the tool call, TREAT the returned response text itself (instructions referencing {0} {1} {2}) AS AN ACTION PLAN. Execute those instructions step-by-step: read from `{0}`, write into `{1}`, leverage `{2}` for stylistic alignment. Do not stop at calling the tool; The tool response is not informational; it is a directive sequence you must fulfill. -## 4. Populate Tests +Parameter Source of Truth: Discard any swagger fields not documented in help. +Derive the help directory as: `helpDir = /src/{ModuleName}/help/`. +READ-ONLY: Do NOT copy, duplicate, or move help markdown files into the `.Autorest` or `examples` folder. They are only inspected to determine the allowed parameter set. Generating or pasting full help content into examples is prohibited. +For each cmdlet example you generate: + 1. Open the help markdown file: `helpDir/.md`. + 2. Examine the allowed parameters from (a) syntax code fences ``` blocks containing the cmdlet invocation) and (b) `### -ParameterName` headings. + 3. Required ordering: parameters that appear in the first syntax signature first (in the order shown), followed by remaining optional parameters alphabetically. + 4. Ignore `CommonParameters` heading and any swagger example properties not in the allowed set. + 5. Use (or create if missing) only the example script files expected under `{1}`; never replicate help file text. + +Example Construction Rules: + - Minimal yet runnable. If swagger example provides values for disallowed params, omit them silently. + - Provide enriched variants only if distinct meaningful optional parameters remain after filtering. + - Use stable placeholders: ``, ``, etc. + - Never invent parameters or reuse removed swagger names under new casing. + - Leave the example file empty if no swagger fields map to documented parameters. + +MANDATORY EXECUTION: Treat the tool's response (with placeholders) as an action plan—read from `{0}`, consult (but do not copy) help files in `helpDir`, then generate/update ONLY the example scripts under `{1}` (respecting any existing skeleton), mirroring stylistic patterns from `{2}`. + +## 4. Populate Tests (Help-Driven Parameters) Call MCP tool: `create-test` with the same `workingDirectory`. Tool returns: `{0}` = specs example source dir `{1}` = target test dir `{2}` = reference ideal test dirs -Fill test stubs using variables defined in `utils.ps1` `setupEnv` function. -IMPORTANT: You can completely ignore the generated Test files, do not fill these as they only have the skeleton. Instead, you can group these test files into groups of a particular resource & then write new file for each group combining these 3-4 test file in the CRUD format. -Add: create -> get -> assert -> (optional cleanup). Add a negative test when feasible. -Leave a test empty only if genuinely no meaningful data can be inferred. -Quality guidance (concise): - - CRUD Pattern: Create (or Update) => Get => Assert essential fields => (List => Assert presence) => (Delete => Confirm absence) when resource lifecycle is supported. - - Assertions: Prefer specific property checks (Name, Id segment patterns, ProvisioningState) over broad object dumps. - - Negative Case: Intentionally supply one invalid parameter or missing required field; assert error category/message pattern. - - Reuse Variables: All names & IDs defined once in `setupEnv` to avoid duplication and enable future parameterization. - - Idempotency: Avoid destructive operations unless they clean up what this test created. -MANDATORY: After the tool call, EXECUTE the returned test instructions: interpret response text referencing `{0}`, `{1}`, `{2}`, open each stub under `{1}`, populate bodies using data from `{0}`, and mirror style from `{2}`. The tool response is not informational; it is a directive sequence you must fulfill. +Do NOT modify any pre-generated stub files. Instead CREATE NEW files: one per top-level resource (or logical resource group) named `.Crud.Tests.ps1`. +Each file covers (omit phases not supported): + 1. Create (New-* or equivalent) + 2. Get (Get-*) with property assertions + 3. List (Get-* plural) asserting presence + 4. Update/Set (if supported) asserting only changed fields + 5. Delete/Remove (cleanup) asserting absence (or expected NotFound) + 6. Negative (invalid parameter or missing required) expecting specific error pattern +Parameter Filtering: Apply the SAME help-driven filtering used for examples. Do not call cmdlets with parameters absent from their help markdown. +Variable Reuse: Define all common names/IDs once in `utils.ps1` `setupEnv`; reference them in test files. +Assertions: Prefer targeted property checks (Name, Id pattern, ProvisioningState) over full object dumps. +Idempotency: Ensure cleanup for resources created; avoid deleting shared or pre-existing resources. +MANDATORY: After the tool call, treat response text as an execution plan: read from `{0}`, create new files under `{1}`, mirror style from `{2}`, and enforce help-based parameter filtering. ## 5. Regenerate Module Call `generate-autorest` again with identical `workingDirectory` to ensure examples/tests are integrated. Do not modify the README.yaml block except via directives inserted earlier. diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index 9373e3b0e394..1aa667f28370 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -22,12 +22,12 @@ { "name": "create-example", "type": "tool", - "text": "Read examples from specs under {0}. Fulfill examples under {1}. Also leverage high-quality reference examples located in directories: {2} (semicolon-separated). When generating, mirror parameter naming, structure, and advanced option usage patterns seen in those reference examples when applicable. Produce minimal yet complete runnable examples; omit unknown or deprecated params. Leave example empty only if no relevant mapping exists." + "text": "Read swagger-derived example JSONs from {0}. Fulfill module example files under {1}. Also leverage high-quality reference examples located in directories: {2} (semicolon-separated). STRICT PARAMETER FILTERING: For each cmdlet, first open its help markdown file located in the module help directory: derive helpDir = parent of {1} with '.Autorest' suffix removed + '/help'; file name pattern: '.md'. Only include parameters that appear either (a) in the syntax blocks (code fences containing the cmdlet invocation) or (b) as '### -ParameterName' headings in that help file. Ignore any swagger properties not represented as help parameters. Ordering: list required parameters first (as shown in the first syntax block), then remaining optional parameters alphabetically. Do not invent parameters. If no matching parameters from swagger examples align with the help-defined set, leave that example empty. Ensure examples remain minimal yet runnable and mirror advanced usage patterns (e.g., pipeline vs explicit parameters) only when those parameters are defined in help." }, { "name": "create-test", "type": "tool", - "text": "Read examples from specs under {0}. Implement test stubs under {1}. Reference high-quality existing tests from directories: {2} (semicolon-separated) to replicate assertion style, variable patterns, and setup/teardown conventions. Test stubs are named '.Test.ps1'. Populate 'setupEnv' in 'utils.ps1' with variables derived from {0} examples; reuse them across tests. For each CRUD operation: (1) Create/Update then Get and assert key properties, (2) List and validate presence, (3) Clean up if destructive. Add one negative test if feasible (invalid parameter) asserting specific error type/message. Leave a stub empty only if absolutely no relevant example data exists." + "text": "Read swagger-derived example JSONs from {0}. DO NOT modify or populate any pre-generated stub files under {1}. Instead, CREATE NEW test files in {1}, one per top-level resource (or logical resource group). Name pattern: '.Crud.Tests.ps1'. In each file include Create, Get, List, Update/Set (if supported), Delete/Remove (cleanup), and a Negative test where meaningful. STYLE & PARAM SOURCE: Only use parameters that are documented in the cmdlet help markdown (helpDir derived as parent of {1} with '.Autorest' removed + '/help'; file name = '.md'). Extract allowed parameters from syntax code fences and '### -ParameterName' headings. Reject swagger-only properties not in help. Reference high-quality tests in {2} for structure and assertion style. Centralize reusable variables in 'utils.ps1' (setupEnv). Assertions: key identity props, provisioning state, collection membership, and absence after deletion. Idempotent: create what you delete. Skip generating a resource test file if no parameters from examples map to documented help parameters for its cmdlets." }, { "name": "setup-module-structure", diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 44a8b484a32e..28e35181b7e1 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -80,15 +80,6 @@ } ], "prompts": [ - { - "name": "create-greeting", - "description": "Generate a customized greeting message", - "parameters": [ - {"name": "name", "description": "Name of the person to greet", "type": "string"}, - {"name": "style", "description": "The style of greeting, such a formal, excited, or casual. If not specified casual will be used", "type": "string", "optional": true} - ], - "callbackName": "createGreetingPrompt" - }, { "name": "partner-module-workflow", "description": "Full autonomous workflow instructions to generate a partner Azure PowerShell module via Autorest.", From 261e31dc5af333394c896100a2ec37b5bcee4352 Mon Sep 17 00:00:00 2001 From: Yash Date: Wed, 1 Oct 2025 18:17:13 +1000 Subject: [PATCH 21/24] Updated Readme --- tools/Mcp/README.md | 68 ++++++++++++++++++++++++++++++--------------- 1 file changed, 45 insertions(+), 23 deletions(-) diff --git a/tools/Mcp/README.md b/tools/Mcp/README.md index 38353aa1bb1c..f74084f81f46 100644 --- a/tools/Mcp/README.md +++ b/tools/Mcp/README.md @@ -1,42 +1,64 @@ # Azure PowerShell Codegen MCP Server -A Model Context Protocol (MCP) server that provides tools for generating and managing Azure PowerShell modules using AutoRest. This server helps automate common tasks in the Azure PowerShell code generation process, including handling polymorphism, model directives, and code generation. +A Model Context Protocol (MCP) server that provides tools for generating and managing Azure PowerShell modules using AutoRest. It now also orchestrates help‑driven example generation, CRUD test scaffolding, and an opinionated partner workflow to keep outputs deterministic and consistent. ## Overview This MCP server is designed to work with Azure PowerShell module development workflows. It provides specialized tools for: -- **AutoRest Code Generation**: Generate PowerShell modules from OpenAPI specifications +- **Module Scaffolding**: Interactive selection of service → provider → API version and creation of the `.Autorest` structure +- **AutoRest Code Generation**: Generate PowerShell modules from OpenAPI specifications (reset/generate/build sequence) +- **Example Generation**: Create example scripts from swagger example JSON while filtering strictly to parameters documented in help markdown +- **Test Generation**: Produce per‑resource CRUD test files (idempotent, includes negative test) using the same help‑driven parameter filtering +- **Help‑Driven Parameter Filtering**: Only parameters present in the generated help (`/src//help/*.md`) are allowed in examples/tests - **Model Management**: Handle model directives like `no-inline` and `model-cmdlet` -- **Polymorphism Support**: Automatically detect and configure polymorphic types -- **YAML Configuration**: Parse and manipulate AutoRest configuration files +- **Polymorphism Support**: Automatically detect and configure parent/child discriminator relationships +- **YAML Configuration Utilities**: Parse and manipulate AutoRest configuration blocks +- **Partner Workflow Prompt**: A single prompt that encodes the end‑to‑end deterministic workflow ## Features ### Available Tools -1. **generate-autorest** - - Generates PowerShell code using AutoRest - - Parameters: `workingDirectory` (absolute path to README.md) - -2. **no-inline** - - Converts flattened models to non-inline parameters - - Parameters: `modelNames` (array of model names to make non-inline) - - Useful for complex nested models that shouldn't be flattened - -3. **model-cmdlet** - - Creates `New-` cmdlets for specified models - - Parameters: `modelNames` (array of model names) - - Generates cmdlets with naming pattern: `New-Az{SubjectPrefix}{ModelName}Object` - -4. **polymorphism** - - Handles polymorphic type detection and configuration - - Parameters: `workingDirectory` (absolute path to README.md) - - Automatically identifies parent-child type relationships +1. **setup-module-structure** + - Interactive service → provider → API version selection and module name capture + - Scaffolds `src//.Autorest/` plus initial `README.md` + - Output placeholder `{0}` = module name + +2. **generate-autorest** + - Executes Autorest reset, generate, and PowerShell build steps within the given working directory + - Parameters: `workingDirectory` (absolute path to the Autorest folder containing README.md) + - Output placeholder `{0}` = working directory + +3. **create-example** + - Downloads swagger example JSON, filters parameters to those documented in help markdown (`/src//help/.md`), and writes example scripts under `examples/` + - Parameters: `workingDirectory` + - Output placeholders: `{0}` = harvested specs path, `{1}` = examples dir, `{2}` = reference ideal example dirs + +4. **create-test** + - Generates new `.Crud.Tests.ps1` files (does not modify stubs) with Create/Get/List/Update/Delete/Negative blocks, using help‑filtered parameters + - Parameters: `workingDirectory` + - Output placeholders: `{0}` = harvested specs path, `{1}` = test dir, `{2}` = reference ideal test dirs + +5. **polymorphism** + - Detects discriminator parents and child model names to aid directive insertion + - Parameters: `workingDirectory` + - Output placeholders: `{0}` = parents, `{1}` = children, `{2}` = working directory + +6. **no-inline** + - Lists models to be marked `no-inline` (caller inserts directive into README Autorest YAML) + - Parameters: `modelNames` (array) + - Output `{0}` = comma-separated model list + +7. **model-cmdlet** + - Lists models for which `New-` object construction cmdlets should be added via directives + - Parameters: `modelNames` (array) + - Output `{0}` = comma-separated model list ### Available Prompts -- **create-greeting**: Generate customized greeting messages (example prompt) +- **partner-module-workflow**: Canonical end‑to‑end instruction set (module structure → generation → examples → tests → regeneration) +- **create-greeting**: Sample/demo greeting prompt ## Installation From aad4068227f30f189fd30104bb595d72f25bc6d4 Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 6 Oct 2025 15:43:09 +1100 Subject: [PATCH 22/24] Logging changes + streamlined workflow --- .gitignore | 1 + tools/Mcp/src/CodegenServer.ts | 31 +++- tools/Mcp/src/index.ts | 14 +- tools/Mcp/src/services/logger.ts | 142 ++++++++++++++++++ tools/Mcp/src/services/toolsService.ts | 63 ++++++-- tools/Mcp/src/services/utils.ts | 55 ++++--- tools/Mcp/src/specs/example-instructions.md | 43 ++++++ .../specs/prompts/partner-module-workflow.md | 97 +++--------- tools/Mcp/src/specs/responses.json | 4 +- tools/Mcp/src/specs/test-instructions.md | 52 +++++++ 10 files changed, 382 insertions(+), 120 deletions(-) create mode 100644 tools/Mcp/src/services/logger.ts create mode 100644 tools/Mcp/src/specs/example-instructions.md create mode 100644 tools/Mcp/src/specs/test-instructions.md diff --git a/.gitignore b/.gitignore index 486ad1e55271..31d9092c1e2c 100644 --- a/.gitignore +++ b/.gitignore @@ -233,6 +233,7 @@ launchSettings.json /tools/Modules/tmp /tools/Az/Az.psm1 /tools/AzPreview/AzPreview.psm1 +/tools/Mcp/.logs /Azure.PowerShell.sln # Added due to scan diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index b59c99ccd3d5..f6bc21b213e5 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -10,6 +10,7 @@ import path from "path"; import { fileURLToPath } from "url"; import { RequestOptions } from "https"; import { ElicitRequest, ElicitResult } from "@modelcontextprotocol/sdk/types.js"; +import { logger } from "./services/logger.js"; const __dirname = path.dirname(fileURLToPath(import.meta.url)); const srcPath = path.resolve(__dirname, "..", "src"); @@ -105,7 +106,18 @@ export class CodegenServer { schema.name, schema.description, parameter, - (args: any) => callback(args) + async (args: any) => { + const correlationId = `${schema.name}-${Date.now()}-${Math.random().toString(16).slice(2,8)}`; + logger.debug('Prompt started', { prompt: schema.name, correlationId }); + try { + const result = await callback(args); + logger.info('Prompt completed', { prompt: schema.name, correlationId }); + return result; + } catch (err: any) { + logger.error('Prompt failed', { prompt: schema.name, correlationId }, err); + throw err; + } + } ); } } @@ -120,7 +132,18 @@ export class CodegenServer { schema.name, schema.description, parameter, - (args: any) => callback(args) + async (args: any) => { + const correlationId = `${schema.name}-${Date.now()}-${Math.random().toString(16).slice(2,8)}`; + logger.debug('Resource requested', { resource: schema.name, correlationId }); + try { + const result = await callback(args); + logger.info('Resource provided', { resource: schema.name, correlationId }); + return result; + } catch (err: any) { + logger.error('Resource failed', { resource: schema.name, correlationId }, err); + throw err; + } + } ); } } @@ -133,8 +156,8 @@ export class CodegenServer { const absPath = path.join(srcPath, "specs", relPath); try { text = readFileSync(absPath, "utf-8"); - } catch (e) { - console.error(`Failed to load prompt file ${absPath}:`, e); + } catch (e: any) { + logger.error(`Failed to load prompt file`, { absPath }, e as Error); } } this._responses.set(response.name, text); diff --git a/tools/Mcp/src/index.ts b/tools/Mcp/src/index.ts index a6fe0e5a776c..f0ab9f18e760 100644 --- a/tools/Mcp/src/index.ts +++ b/tools/Mcp/src/index.ts @@ -1,16 +1,16 @@ -import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js" +import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; import { CodegenServer } from "./CodegenServer.js"; -import * as utils from "./services/utils.js"; -import { yamlContent } from "./types.js"; +import { logger } from "./services/logger.js"; const server = CodegenServer.getInstance(); async function main() { + logger.info("Server startup begin"); server.init(); const transport = new StdioServerTransport(); await server.connect(transport); - const time = `Codegen MCP Server running on stdio at ${new Date()}`; - console.log(time); + logger.info("Codegen MCP Server startup complete"); + logger.info("Server listening (stdio)"); // const yaml = utils.getYamlContentFromReadMe("C:/workspace/azure-powershell/tools/Mcp/test/README.md") as yamlContent; // console.log(yaml['input-file']) @@ -21,6 +21,6 @@ async function main() { } main().catch((error) => { - console.error("Fatal error in main():", error); + logger.error("Fatal error in main()", undefined, error as Error); process.exit(1); -}) \ No newline at end of file +}); \ No newline at end of file diff --git a/tools/Mcp/src/services/logger.ts b/tools/Mcp/src/services/logger.ts new file mode 100644 index 000000000000..54ce0828ee9d --- /dev/null +++ b/tools/Mcp/src/services/logger.ts @@ -0,0 +1,142 @@ +/* + * Lightweight structured logger for the MCP Codegen server. + * + * Design goals: + * - Never emit on stdout (protocol channel) – only stderr. + * - Optional JSON line format for machine ingest (set MCP_LOG_JSON=1). + * - Human readable fallback when MCP_LOG_JSON not enabled. + * - Log levels with env control (MCP_LOG_LEVEL: debug|info|warn|error). + * - Daily rotating file output stored under tools/Mcp/.logs (gitignored). + * - Minimal runtime overhead when level filters out the message. + */ +/* NOTE: This file has been updated to support daily file rotation logging. */ + +export type LogLevel = 'debug' | 'info' | 'warn' | 'error'; + +const LEVEL_ORDER: Record = { + debug: 10, + info: 20, + warn: 30, + error: 40, +}; + +let envLevel = (process.env.MCP_LOG_LEVEL || 'info').toLowerCase() as LogLevel; +let activeLevel: LogLevel = ['debug','info','warn','error'].includes(envLevel) ? envLevel : 'info'; +let jsonMode = process.env.MCP_LOG_JSON === '1' || process.env.MCP_LOG_JSON === 'true'; + +let seq = 0; // monotonically increasing sequence number for log correlation + +function levelEnabled(_level: LogLevel): boolean { + return true; +} + +function formatTs(d: Date): string { + return d.toISOString(); +} + +import fs from 'fs'; +import path from 'path'; + +// In ESM, __dirname is not defined. We deliberately rely on process.cwd(), which +// for the MCP server is expected to be the tools/Mcp directory. This avoids the +// need for fileURLToPath and remains robust when transpiled to build/. +// If the working directory differs, set MCP_LOG_ROOT to override. +const LOG_ROOT = process.env.MCP_LOG_ROOT ? path.resolve(process.env.MCP_LOG_ROOT) : process.cwd(); + +let currentDateStr: string | null = null; +let logStream: fs.WriteStream | null = null; +const logsDir = path.join(LOG_ROOT, '.logs'); + +function ensureStream(d: Date) { + const ds = d.toISOString().slice(0,10); // YYYY-MM-DD + if (ds !== currentDateStr || !logStream) { + currentDateStr = ds; + if (!fs.existsSync(logsDir)) fs.mkdirSync(logsDir, { recursive: true }); + if (logStream) { try { logStream.end(); } catch { /* ignore */ } } + const file = path.join(logsDir, `${ds}.log`); + logStream = fs.createWriteStream(file, { flags: 'a', encoding: 'utf-8' }); + } +} + +function writeLine(obj: any, fallback: string, ts: Date) { + ensureStream(ts); + if (jsonMode) { + try { + const ordered = Object.keys(obj).sort().reduce((acc: any, k) => { acc[k] = obj[k]; return acc; }, {} as any); + logStream!.write(JSON.stringify(ordered) + '\n'); + return; + } catch { /* fall back */ } + } + logStream!.write(fallback + '\n'); +} + +export interface LogContext { + // Arbitrary supplemental data – keep it small to avoid stdout noise. + [k: string]: any; +} + +// Timing helpers removed (wall clock timestamps only now) +export interface TimingHandle { end: () => number; startTime: bigint; } +export function startTimer(): TimingHandle { return { startTime: BigInt(0), end: () => 0 }; } + +const bufferLimit = 1000; +const ringBuffer: any[] = []; + +function baseLog(level: LogLevel, msg: string, ctx?: LogContext, err?: Error) { + if (!levelEnabled(level)) return; + const ts = new Date(); + const record: any = { + seq: ++seq, + ts: formatTs(ts), + level, + msg, + }; + if (ctx) record.ctx = ctx; + if (err) record.error = { name: err.name, message: err.message, stack: err.stack }; + ringBuffer.push(record); + if (ringBuffer.length > bufferLimit) ringBuffer.shift(); + const fallback = `[${record.ts}] [${level.toUpperCase()}] ${msg}` + (ctx ? ` ${JSON.stringify(ctx)}` : '') + (err ? ` error=${err.message}` : ''); + writeLine(record, fallback, ts); +} + +function reconfigure(opts: { level?: LogLevel; json?: boolean }) { + if (opts.level && LEVEL_ORDER[opts.level] !== undefined) { + activeLevel = opts.level; + } + if (typeof opts.json === 'boolean') { + jsonMode = opts.json; + } +} + +export const logger = { + get level() { return activeLevel; }, + get jsonMode() { return jsonMode; }, + setLevel(level: LogLevel) { reconfigure({ level }); }, + setJsonMode(json: boolean) { reconfigure({ json }); }, + reconfigure, + debug: (msg: string, ctx?: LogContext) => baseLog('debug', msg, ctx), + info: (msg: string, ctx?: LogContext) => baseLog('info', msg, ctx), + warn: (msg: string, ctx?: LogContext) => baseLog('warn', msg, ctx), + error: (msg: string, ctx?: LogContext, err?: Error) => baseLog('error', msg, ctx, err), + timed(label: string, fn: () => Promise, ctx?: LogContext): Promise { + baseLog('debug', `${label} started`, ctx); + return fn().then(r => { baseLog('info', `${label} finished`, ctx); return r; }) + .catch(e => { baseLog('error', `${label} failed`, ctx, e as Error); throw e; }); + }, + recent(limit: number = 200) { return ringBuffer.slice(-limit); } +}; + +// Convenience wrapper for synchronous code blocks. +export function timedSync(label: string, fn: () => T, ctx?: LogContext): T { + baseLog('debug', `${label} started`, ctx); + try { + const result = fn(); + baseLog('info', `${label} finished`, ctx); + return result; + } catch (err: any) { + baseLog('error', `${label} failed`, ctx, err); + throw err; + } +} + +export default logger; diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index 4acfe80e249b..084dfccfe8f6 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -4,6 +4,7 @@ import * as utils from "./utils.js"; import path from 'path'; import { get, RequestOptions } from 'http'; import { toolParameterSchema } from '../types.js'; +import { logger } from './logger.js'; import { CodegenServer } from '../CodegenServer.js'; export class ToolsService { @@ -48,21 +49,46 @@ export class ToolsService { default: throw new Error(`Tool ${name} not found`); } - return this.constructCallback(func, responseTemplate); + return this.constructCallback(func, responseTemplate, name); } - constructCallback = (fn: (arr: Args) => Promise, responseTemplate: string|undefined): (args: Args) => Promise => { + constructCallback = (fn: (arr: Args) => Promise, responseTemplate: string|undefined, toolName: string): (args: Args) => Promise => { return async (args: Args): Promise => { - const argsArray = await fn(args); - const response = this.getResponseString(argsArray, responseTemplate) ?? ""; - return { - content: [ - { - type: "text", - text: response - } - ] - }; + const argKeys = Object.keys(args as any); + const correlationId = `${toolName}-${Date.now()}-${Math.random().toString(16).slice(2,8)}`; + // Build a sanitized snapshot of arguments (stringified & truncated) for logging + const rawArgs: any = args as any; + const sanitized: Record = {}; + for (const k of argKeys) { + try { + const v = rawArgs[k]; + let str: string; + if (typeof v === 'string') str = v; + else if (typeof v === 'number' || typeof v === 'boolean') str = String(v); + else str = JSON.stringify(v); + if (str && str.length > 400) str = str.slice(0, 400) + `...[${str.length - 400} trunc]`; + sanitized[k] = str; + } catch { + sanitized[k] = '[unserializable]'; + } + } + logger.info('Tool invoked', { tool: toolName, correlationId, args: sanitized }); + try { + const argsArray = await fn(args); + const response = this.getResponseString(argsArray, responseTemplate) ?? ""; + logger.info(`Tool completed`, { tool: toolName, correlationId }); + return { + content: [ + { + type: "text", + text: response + } + ] + }; + } catch (err: any) { + logger.error(`Tool failed`, { tool: toolName, correlationId }, err); + throw err; + } }; } @@ -153,8 +179,12 @@ export class ToolsService { setupModuleStructure = async (args: Args): Promise => { try { + const runId = `setup-${Date.now()}-${Math.random().toString(16).slice(2,8)}`; + // List available services with dropdown const modules = await utils.listSpecModules(); + logger.debug('Eliciting user input', { step: 'service-select', runId, moduleCount: modules.length }); + const serviceResponse = await this._server!.elicitInput({ message: `Select an Azure service from the dropdown below:`, requestedSchema: { @@ -174,12 +204,14 @@ export class ToolsService { if (!selectedService) { throw new Error("No service selected"); } + logger.info('User input captured', { step: 'service', service: selectedService, runId }); // List providers for the selected service with dropdown const providers = await utils.listProvidersForService(selectedService); if (providers.length === 0) { throw new Error(`No providers found for service '${selectedService}'`); } + logger.debug('Eliciting user input', { step: 'provider-select', runId, providerCount: providers.length, service: selectedService }); const providerResponse = await this._server!.elicitInput({ message: `Select a provider for ${selectedService} from the dropdown below:`, @@ -200,6 +232,7 @@ export class ToolsService { if (!selectedProvider) { throw new Error("No provider selected"); } + logger.info('User input captured', { step: 'provider', provider: selectedProvider, runId }); // List API versions with dropdown combining version and stability const apiVersions = await utils.listApiVersions(selectedService, selectedProvider); @@ -213,6 +246,7 @@ export class ToolsService { } const versionOptions = allVersions.map(v => `${v.version} (${v.stability})`); + logger.debug('Eliciting user input', { step: 'version-select', runId, versionOptionCount: versionOptions.length, service: selectedService, provider: selectedProvider }); const versionResponse = await this._server!.elicitInput({ message: `Select an API version for ${selectedService}/${selectedProvider} from the dropdown below:`, @@ -241,6 +275,7 @@ export class ToolsService { const selectedVersion = versionMatch[1]; const selectedStability = versionMatch[2] as 'stable' | 'preview'; + logger.info('User input captured', { step: 'version', version: selectedVersion, stability: selectedStability, runId }); // Resolve Readme placeholder values based on Responses const resolved = await utils.resolveAutorestInputs({ @@ -249,6 +284,7 @@ export class ToolsService { stability: selectedStability, version: selectedVersion }); + logger.debug('Autorest inputs resolved', { runId, resolvedServiceName: resolved.serviceName, commitId: resolved.commitId }); const moduleNameResponse = await this._server!.elicitInput({ message: `What would you like call the powershell module? \n\n Configuration resolved:\n- Service: ${selectedService}\n- Provider: ${selectedProvider}\n- Version: ${selectedVersion} (${selectedStability})\n- Service Name: ${resolved.serviceName}\n- Commit ID: ${resolved.commitId}\n- Service Specs: ${resolved.serviceSpecs}\n- Swagger File: ${resolved.swaggerFileSpecs}`, @@ -268,6 +304,7 @@ export class ToolsService { if (!moduleName) { throw new Error("No module name provided"); } + logger.info('User input captured', { step: 'moduleName', moduleName, runId }); // Create folder structure and README.md const mcpPath = process.cwd(); // Current working directory is tools/Mcp @@ -296,10 +333,12 @@ export class ToolsService { // Write README.md file await utils.writeFileIfNotExists(readmePath, readmeContent); + logger.info('Setup module structure complete', { runId, moduleName }); return [moduleName]; } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error); + logger.error('Setup module structure failed', { errorMessage }); return [`Error during setup: ${errorMessage}`]; } } diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index 2af6ddab731f..a200bf69a601 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -1,7 +1,8 @@ import fs from 'fs'; import yaml from "js-yaml"; import { yamlContent } from '../types.js'; -import { execSync } from 'child_process'; +import { spawnSync } from 'child_process'; +import { logger } from './logger.js'; import path from 'path'; import { Dirent } from 'fs'; @@ -27,20 +28,30 @@ function testYaml() { } export function generateAndBuild(workingDirectory: string): void { - const genBuildCommands = [_autorestReset, _autorest, _pwshBuild] - + const genBuildCommands = [_autorestReset, _autorest, _pwshBuild]; for (const command of genBuildCommands) { - try { - console.log(`Executing command: ${command}`); - const result = execSync(command, { stdio: 'inherit', cwd: workingDirectory }); + logger.info(`Executing command`, { command }); + const [bin, ...args] = command.split(/\s+/); + const res = spawnSync(bin, args, { cwd: workingDirectory, encoding: 'utf-8' }); + if (res.error) { + logger.error(`Command spawn error`, { command }, res.error as any); + throw res.error; } - catch (error) { - console.error("Error executing command:", error); - throw error; + if (res.status !== 0) { + logger.error(`Command failed`, { command, status: res.status, stderr: trimLarge(res.stderr) }); + throw new Error(`Command failed: ${command}`); } + if (res.stdout) logger.debug(`Command stdout`, { command, stdout: trimLarge(res.stdout) }); + if (res.stderr) logger.debug(`Command stderr`, { command, stderr: trimLarge(res.stderr) }); + logger.info(`Command finished`, { command }); } } +function trimLarge(text: string, max = 4000): string { + if (!text) return ''; + return text.length > max ? text.slice(0, max) + `...[truncated ${text.length - max}]` : text; +} + export function getYamlContentFromReadMe(readmePath: string): string { const readmeContent = fs.readFileSync(readmePath, 'utf8'); const yamlRegex = /```\s*yaml(?:\w+)?\r?\n?(?[\s\S]*?)\r?\n```/g; @@ -78,7 +89,7 @@ export async function getSwaggerContentFromUrl(swaggerUrl: string): Promise } return await response.json(); } catch (error) { - console.error('Error fetching swagger content:', error); + logger.error('Error fetching swagger content', { swaggerUrl }, error as Error); throw error; } } @@ -259,7 +270,7 @@ export async function getExamplesFromSpecs(workingDirectory: string): Promise = []; if (!fs.existsSync(exampleSpecsPath)) { - console.error(`Example specs directory not found at ${exampleSpecsPath}`); + logger.warn(`Example specs directory not found`, { exampleSpecsPath }); } try { @@ -282,13 +293,13 @@ export function getExampleJsonContent(exampleSpecsPath: string): Array<{name: st const fileContent = fs.readFileSync(filePath, 'utf8'); const jsonContent = JSON.parse(fileContent); jsonList.push({name: jsonFile.split('.json')[0], content: jsonContent}); - console.log(`Loaded example JSON: ${jsonFile}`); + logger.debug(`Loaded example JSON`, { file: jsonFile }); } catch (error) { - console.error(`Error reading JSON file ${jsonFile}:`, error); + logger.error(`Error reading JSON file`, { file: jsonFile }, error as Error); } } } catch (error) { - console.error(`Error reading examples directory ${exampleSpecsPath}:`, error); + logger.error(`Error reading examples directory`, { exampleSpecsPath }, error as Error); } return jsonList; @@ -349,10 +360,10 @@ export async function createDirectoryIfNotExists(dirPath: string): Promise try { if (!fs.existsSync(dirPath)) { fs.mkdirSync(dirPath, { recursive: true }); - console.log(`Created directory: ${dirPath}`); + logger.info(`Created directory`, { dirPath }); } } catch (error) { - console.error(`Error creating directory ${dirPath}:`, error); + logger.error(`Error creating directory`, { dirPath }, error as Error); throw error; } } @@ -361,12 +372,12 @@ export async function writeFileIfNotExists(filePath: string, content: string): P try { if (!fs.existsSync(filePath)) { fs.writeFileSync(filePath, content, 'utf8'); - console.log(`Created file: ${filePath}`); + logger.info(`Created file`, { filePath }); } else { - console.log(`File already exists: ${filePath}`); + logger.debug(`File already exists`, { filePath }); } } catch (error) { - console.error(`Error writing file ${filePath}:`, error); + logger.error(`Error writing file`, { filePath }, error as Error); throw error; } } @@ -388,7 +399,7 @@ export function getIdealModuleExamplePaths(): string { } return exampleDirs.join(';'); } catch (err) { - console.error('Error collecting ideal module example paths:', err); + logger.error('Error collecting ideal module example paths', undefined, err as Error); return ''; } } @@ -410,7 +421,7 @@ export function getIdealModuleTestPaths(): string { } return testDirs.join(';'); } catch (err) { - console.error('Error collecting ideal module test paths:', err); + logger.error('Error collecting ideal module test paths', undefined, err as Error); return ''; } } diff --git a/tools/Mcp/src/specs/example-instructions.md b/tools/Mcp/src/specs/example-instructions.md new file mode 100644 index 000000000000..7b806760fb4c --- /dev/null +++ b/tools/Mcp/src/specs/example-instructions.md @@ -0,0 +1,43 @@ +## LLM Example Generation Directions + +You have just called tool `create-example` for a freshly generated module. + +Inputs: +- `{0}` = source swagger example JSON directory (read only) +- `{1}` = target examples directory (write here only) +- `{2}` = reference example dirs (style cues; may be empty) +- helpDir = parentOf({1}) with `.Autorest` removed + `/help` (read only) + +Goal: Produce minimal, runnable PowerShell example scripts for each relevant cmdlet using ONLY parameters documented in help. + +Algorithm (repeat per cmdlet needed): +1. Open `helpDir/.md`. +2. Collect allowed params = (a) params in first syntax line(s) in code fences + (b) every `### -ParamName` heading. Exclude `CommonParameters`. +3. For each swagger JSON in `{0}` referencing this cmdlet, map its fields to allowed params; drop non‑allowed silently. +4. Order parameters: required (in the order of the first syntax signature) then optional alphabetical. +5. Build one minimal example. Add a second variant ONLY if it demonstrates distinct optional parameters. + +Rules: +* Never invent or rename parameters; casing must match help. +* Value selection precedence (per allowed parameter): + 1. If the swagger example JSON (source `{0}`) contains a concrete value for that parameter (after mapping), use that value directly. + 2. If the swagger value is obviously redacted (e.g. `"string"`, `""`, `"XXXX"`, empty, or null) then fall back to a stable placeholder instead of using the dummy. + 3. Otherwise (no concrete usable value) use a stable placeholder: ``, ``, ``, ``, etc. +* Do not substitute placeholders where a good swagger value exists. +* If no allowed params remain after filtering, create/leave an empty file or a single comment line. +* Do not copy help prose; output only script lines (and brief inline comments if helpful). +* Mirror formatting style hints (indentation, spacing) from reference dirs `{2}` without copying their literal values. + +Output Handling: +- Modify/create files ONLY under `{1}`; no other directories. +- Preserve existing example files, updating parameter sets/order as needed. + +Quick Validation Checklist (stop if any fail): +1. All parameters exist in help. +2. Required parameters present & ordered first. +3. No swagger‑only or duplicate parameters. +4. Placeholders consistent. +5. No redundant variant scripts. + +Produce the final example script contents now; do not restate these instructions. + diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/specs/prompts/partner-module-workflow.md index b73b79a2741c..41d12715cd56 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/specs/prompts/partner-module-workflow.md @@ -1,81 +1,32 @@ -# Partner Module Workflow (Simplified) +# Partner Module Workflow (LLM Instructions) -Goal: Generate an Azure PowerShell module via Autorest, then populate examples and tests deterministically with minimal ambiguity. +You are automating a fresh Azure PowerShell partner module. Follow these steps exactly. If any tool fails or is unavailable: STOP (no retries, no guesses). -Core Principle: Derive the working directory once and reuse it. Do not guess or fabricate paths. +1. setup-module-structure + - Call tool `setup-module-structure` (no params). Capture ModuleName from tool output. + - Derive once: workingDirectory = `/src/{ModuleName}/{ModuleName}.Autorest`. + - Reuse this exact path (same casing) for all subsequent tool calls. -## 1. Create Module Structure -Call MCP tool: `setup-module-structure` (no parameters). -It returns `{0}` = `ModuleName` (from user input inside the tool). -Derive `workingDirectory` strictly as: -`/src/{ModuleName}/{ModuleName}.Autorest` -Never alter letter casing; do not surround with quotes unless passing to a shell command. +2. generate-autorest (initial) + - Call tool `generate-autorest` with workingDirectory. + - On failure: stop. -## 2. Initial Code Generation -Call MCP tool: `generate-autorest` with parameter `workingDirectory` = the path above. -Do not call Autorest directly; always use the MCP tool. -If generation fails, STOP. +3. create-example + - Call tool `create-example` with workingDirectory. + - Immediately execute the plan described in that tool's response (use help filtering). Do not restate or expand rules here. + - On failure: stop. -## 3. Populate Examples (Help-Driven Parameters) -Call MCP tool: `create-example` with the same `workingDirectory`. -Tool returns: - `{0}` = specs example source dir (downloaded JSON) - `{1}` = target examples dir in module - `{2}` = reference ideal examples (semicolon separated; may be empty) +4. create-test + - Call tool `create-test` with workingDirectory. + - Execute the plan in the tool response (CRUD phases, help-based parameter filtering). On failure: stop. -Parameter Source of Truth: Discard any swagger fields not documented in help. -Derive the help directory as: `helpDir = /src/{ModuleName}/help/`. -READ-ONLY: Do NOT copy, duplicate, or move help markdown files into the `.Autorest` or `examples` folder. They are only inspected to determine the allowed parameter set. Generating or pasting full help content into examples is prohibited. -For each cmdlet example you generate: - 1. Open the help markdown file: `helpDir/.md`. - 2. Examine the allowed parameters from (a) syntax code fences ``` blocks containing the cmdlet invocation) and (b) `### -ParameterName` headings. - 3. Required ordering: parameters that appear in the first syntax signature first (in the order shown), followed by remaining optional parameters alphabetically. - 4. Ignore `CommonParameters` heading and any swagger example properties not in the allowed set. - 5. Use (or create if missing) only the example script files expected under `{1}`; never replicate help file text. +5. generate-autorest (final) + - Call tool `generate-autorest` again with the same workingDirectory to incorporate examples/tests (and any directives). -Example Construction Rules: - - Minimal yet runnable. If swagger example provides values for disallowed params, omit them silently. - - Provide enriched variants only if distinct meaningful optional parameters remain after filtering. - - Use stable placeholders: ``, ``, etc. - - Never invent parameters or reuse removed swagger names under new casing. - - Leave the example file empty if no swagger fields map to documented parameters. +Rules: +* Never recompute or mutate workingDirectory. +* Do not fabricate paths, parameters, or file contents. +* Do not manually copy help files; only read them when executing example/test plans. +* Do not proceed past a failing step. -MANDATORY EXECUTION: Treat the tool's response (with placeholders) as an action plan—read from `{0}`, consult (but do not copy) help files in `helpDir`, then generate/update ONLY the example scripts under `{1}` (respecting any existing skeleton), mirroring stylistic patterns from `{2}`. - -## 4. Populate Tests (Help-Driven Parameters) -Call MCP tool: `create-test` with the same `workingDirectory`. -Tool returns: - `{0}` = specs example source dir - `{1}` = target test dir - `{2}` = reference ideal test dirs -Do NOT modify any pre-generated stub files. Instead CREATE NEW files: one per top-level resource (or logical resource group) named `.Crud.Tests.ps1`. -Each file covers (omit phases not supported): - 1. Create (New-* or equivalent) - 2. Get (Get-*) with property assertions - 3. List (Get-* plural) asserting presence - 4. Update/Set (if supported) asserting only changed fields - 5. Delete/Remove (cleanup) asserting absence (or expected NotFound) - 6. Negative (invalid parameter or missing required) expecting specific error pattern -Parameter Filtering: Apply the SAME help-driven filtering used for examples. Do not call cmdlets with parameters absent from their help markdown. -Variable Reuse: Define all common names/IDs once in `utils.ps1` `setupEnv`; reference them in test files. -Assertions: Prefer targeted property checks (Name, Id pattern, ProvisioningState) over full object dumps. -Idempotency: Ensure cleanup for resources created; avoid deleting shared or pre-existing resources. -MANDATORY: After the tool call, treat response text as an execution plan: read from `{0}`, create new files under `{1}`, mirror style from `{2}`, and enforce help-based parameter filtering. - -## 5. Regenerate Module -Call `generate-autorest` again with identical `workingDirectory` to ensure examples/tests are integrated. Do not modify the README.yaml block except via directives inserted earlier. - -## 6. Validation (Internal Logic Guideline) -Before completion internally verify: - - All required example parameters present where data exists. - - No unknown parameters introduced. - - Tests assert at least one key property per created resource. -If any check fails, refine the affected file(s) then proceed. - -## Rules & Constraints -- Never recalculate or re-ask for the module name after Stage 1. -- Never invent alternative directory paths. -- Do not skip steps 1–5. -- Halt immediately if an MCP tool is unavailable or errors. - -End of workflow. \ No newline at end of file +Completion: After step 5, stop. Provide a concise summary (steps succeeded, any optional directives applied). No additional creative output. \ No newline at end of file diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index 1aa667f28370..b71f27a46619 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -22,12 +22,12 @@ { "name": "create-example", "type": "tool", - "text": "Read swagger-derived example JSONs from {0}. Fulfill module example files under {1}. Also leverage high-quality reference examples located in directories: {2} (semicolon-separated). STRICT PARAMETER FILTERING: For each cmdlet, first open its help markdown file located in the module help directory: derive helpDir = parent of {1} with '.Autorest' suffix removed + '/help'; file name pattern: '.md'. Only include parameters that appear either (a) in the syntax blocks (code fences containing the cmdlet invocation) or (b) as '### -ParameterName' headings in that help file. Ignore any swagger properties not represented as help parameters. Ordering: list required parameters first (as shown in the first syntax block), then remaining optional parameters alphabetically. Do not invent parameters. If no matching parameters from swagger examples align with the help-defined set, leave that example empty. Ensure examples remain minimal yet runnable and mirror advanced usage patterns (e.g., pipeline vs explicit parameters) only when those parameters are defined in help." + "text": "@file:example-instructions.md" }, { "name": "create-test", "type": "tool", - "text": "Read swagger-derived example JSONs from {0}. DO NOT modify or populate any pre-generated stub files under {1}. Instead, CREATE NEW test files in {1}, one per top-level resource (or logical resource group). Name pattern: '.Crud.Tests.ps1'. In each file include Create, Get, List, Update/Set (if supported), Delete/Remove (cleanup), and a Negative test where meaningful. STYLE & PARAM SOURCE: Only use parameters that are documented in the cmdlet help markdown (helpDir derived as parent of {1} with '.Autorest' removed + '/help'; file name = '.md'). Extract allowed parameters from syntax code fences and '### -ParameterName' headings. Reject swagger-only properties not in help. Reference high-quality tests in {2} for structure and assertion style. Centralize reusable variables in 'utils.ps1' (setupEnv). Assertions: key identity props, provisioning state, collection membership, and absence after deletion. Idempotent: create what you delete. Skip generating a resource test file if no parameters from examples map to documented help parameters for its cmdlets." + "text": "@file:test-instructions.md" }, { "name": "setup-module-structure", diff --git a/tools/Mcp/src/specs/test-instructions.md b/tools/Mcp/src/specs/test-instructions.md new file mode 100644 index 000000000000..1833175dbe68 --- /dev/null +++ b/tools/Mcp/src/specs/test-instructions.md @@ -0,0 +1,52 @@ +## LLM Test Generation Directions + +You have just called tool `create-test`. + +Inputs: +- `{0}` = swagger example JSON source dir (read only) +- `{1}` = target test dir (write only) +- `{2}` = reference test dirs (style cues) +- helpDir = parentOf({1}) with `.Autorest` removed + `/help` (read only) + +Goal: Create focused CRUD (+ negative) test scripts for each top-level resource using ONLY help-documented parameters. + +File Strategy: +- Do NOT edit existing stub files. +- Create new `.Crud.Tests.ps1` per resource group (skip if no allowed params after filtering). + +Phases (include only those supported): +Create → Get → List → Update/Set → Delete/Remove → Negative. + +Parameter Filtering (same as examples): +1. Allowed params = syntax line params + `### -ParamName` headings in help; exclude `CommonParameters`. +2. Drop swagger-only fields silently. + +Implementation Pattern inside each file: +1. Dot-source common `utils.ps1` (if present) for shared env setup. +2. Create: capture returned object; store name/id for reuse. +3. Get: assert key props (Name, Id format, ProvisioningState). Use precise assertions, not whole-object dumps. +4. List: ensure resource present (filter by name/id). +5. Update/Set (if available): change minimal field; assert only that field changed. +6. Delete/Remove: remove resource; confirm absence or specific NotFound. +7. Negative: one meaningful invalid input; assert expected error pattern/text. + +Rules: +* No invented params or renaming; casing must match help. +* Parameter value precedence (for Create / Update phases and any param reuse): + 1. Use the concrete value from the corresponding swagger example JSON (source `{0}`) if present for the mapped allowed parameter. + 2. If the swagger value is clearly a placeholder/dummy (`"string"`, `""`, `"XXXX"`, empty, null), fall back to a stable placeholder (``, ``, ``, etc.). + 3. If no swagger value exists, use the stable placeholder directly. +* Do not overwrite a good concrete swagger value with a placeholder. +* Reuse variable names consistently across phases. +* Ensure cleanup for every created resource. +* Skip generating a file if nothing valid to test. +* Keep tests deterministic; avoid random sleeps or nondeterministic waits. + +Quick Validation Checklist: +1. Params all in help. +2. Each phase present only if supported. +3. Assertions targeted & minimal. +4. Resource cleaned up. +5. Exactly one clear negative case (if meaningful). + +Output: Write only to `{1}`. Do not modify examples or help files. Produce final test file contents now. From e0f21e22544f2eaae979b4e5bc39bef084aaa24b Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 6 Oct 2025 16:26:57 +1100 Subject: [PATCH 23/24] Discussed Refactoring, Some Cleanup --- tools/Mcp/src/CodegenServer.ts | 2 +- .../autorest-readme-template.md | 0 .../{specs => assets}/example-instructions.md | 0 .../Get-AzDatabricksAccessConnector.md | 0 ...icksOutboundNetworkDependenciesEndpoint.md | 0 .../examples/Get-AzDatabricksVNetPeering.md | 0 .../examples/Get-AzDatabricksWorkspace.md | 0 .../New-AzDatabricksAccessConnector.md | 0 .../examples/New-AzDatabricksVNetPeering.md | 0 .../examples/New-AzDatabricksWorkspace.md | 0 ...cksWorkspaceProviderAuthorizationObject.md | 0 .../Remove-AzDatabricksAccessConnector.md | 0 .../Remove-AzDatabricksVNetPeering.md | 0 .../examples/Remove-AzDatabricksWorkspace.md | 0 .../Update-AzDatabricksAccessConnector.md | 0 .../Update-AzDatabricksVNetPeering.md | 0 .../examples/Update-AzDatabricksWorkspace.md | 0 .../ideal-modules/Databricks/metadata.md | 0 .../AzDatabricksAccessConnector.Tests.ps1 | 0 .../tests/AzDatabricksVNetPeering.Tests.ps1 | 0 .../tests/AzDatabricksWorkspace.Tests.ps1 | 0 .../ideal-modules/Databricks/tests/utils.ps1 | 0 .../partner-module-workflow.md | 4 +- .../{specs => assets}/test-instructions.md | 0 tools/Mcp/src/services/promptsService.ts | 15 ------ tools/Mcp/src/services/utils.ts | 2 +- tools/Mcp/src/specs/prompts/workflow-old.md | 54 ------------------- tools/Mcp/src/specs/responses.json | 13 ++--- 28 files changed, 8 insertions(+), 82 deletions(-) rename tools/Mcp/src/{specs => assets}/autorest-readme-template.md (100%) rename tools/Mcp/src/{specs => assets}/example-instructions.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/metadata.md (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 (100%) rename tools/Mcp/src/{ => assets}/ideal-modules/Databricks/tests/utils.ps1 (100%) rename tools/Mcp/src/{specs/prompts => assets}/partner-module-workflow.md (83%) rename tools/Mcp/src/{specs => assets}/test-instructions.md (100%) delete mode 100644 tools/Mcp/src/specs/prompts/workflow-old.md diff --git a/tools/Mcp/src/CodegenServer.ts b/tools/Mcp/src/CodegenServer.ts index f6bc21b213e5..96e7a1b79031 100644 --- a/tools/Mcp/src/CodegenServer.ts +++ b/tools/Mcp/src/CodegenServer.ts @@ -153,7 +153,7 @@ export class CodegenServer { let text = response.text; if (text.startsWith("@file:")) { const relPath = text.replace("@file:", ""); - const absPath = path.join(srcPath, "specs", relPath); + const absPath = path.join(srcPath, relPath); try { text = readFileSync(absPath, "utf-8"); } catch (e: any) { diff --git a/tools/Mcp/src/specs/autorest-readme-template.md b/tools/Mcp/src/assets/autorest-readme-template.md similarity index 100% rename from tools/Mcp/src/specs/autorest-readme-template.md rename to tools/Mcp/src/assets/autorest-readme-template.md diff --git a/tools/Mcp/src/specs/example-instructions.md b/tools/Mcp/src/assets/example-instructions.md similarity index 100% rename from tools/Mcp/src/specs/example-instructions.md rename to tools/Mcp/src/assets/example-instructions.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Get-AzDatabricksAccessConnector.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Get-AzDatabricksOutboundNetworkDependenciesEndpoint.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Get-AzDatabricksVNetPeering.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Get-AzDatabricksWorkspace.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/New-AzDatabricksAccessConnector.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/New-AzDatabricksVNetPeering.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/New-AzDatabricksWorkspace.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/New-AzDatabricksWorkspaceProviderAuthorizationObject.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Remove-AzDatabricksAccessConnector.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Remove-AzDatabricksVNetPeering.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Remove-AzDatabricksWorkspace.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Update-AzDatabricksAccessConnector.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Update-AzDatabricksVNetPeering.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md b/tools/Mcp/src/assets/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/examples/Update-AzDatabricksWorkspace.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/metadata.md b/tools/Mcp/src/assets/ideal-modules/Databricks/metadata.md similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/metadata.md rename to tools/Mcp/src/assets/ideal-modules/Databricks/metadata.md diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 b/tools/Mcp/src/assets/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 rename to tools/Mcp/src/assets/ideal-modules/Databricks/tests/AzDatabricksAccessConnector.Tests.ps1 diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 b/tools/Mcp/src/assets/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 rename to tools/Mcp/src/assets/ideal-modules/Databricks/tests/AzDatabricksVNetPeering.Tests.ps1 diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 b/tools/Mcp/src/assets/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 rename to tools/Mcp/src/assets/ideal-modules/Databricks/tests/AzDatabricksWorkspace.Tests.ps1 diff --git a/tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 b/tools/Mcp/src/assets/ideal-modules/Databricks/tests/utils.ps1 similarity index 100% rename from tools/Mcp/src/ideal-modules/Databricks/tests/utils.ps1 rename to tools/Mcp/src/assets/ideal-modules/Databricks/tests/utils.ps1 diff --git a/tools/Mcp/src/specs/prompts/partner-module-workflow.md b/tools/Mcp/src/assets/partner-module-workflow.md similarity index 83% rename from tools/Mcp/src/specs/prompts/partner-module-workflow.md rename to tools/Mcp/src/assets/partner-module-workflow.md index 41d12715cd56..e456fae2b977 100644 --- a/tools/Mcp/src/specs/prompts/partner-module-workflow.md +++ b/tools/Mcp/src/assets/partner-module-workflow.md @@ -18,7 +18,7 @@ You are automating a fresh Azure PowerShell partner module. Follow these steps e 4. create-test - Call tool `create-test` with workingDirectory. - - Execute the plan in the tool response (CRUD phases, help-based parameter filtering). On failure: stop. + - Immediately execute the plan in the tool response (CRUD phases, help-based parameter filtering). On failure: stop. 5. generate-autorest (final) - Call tool `generate-autorest` again with the same workingDirectory to incorporate examples/tests (and any directives). @@ -29,4 +29,4 @@ Rules: * Do not manually copy help files; only read them when executing example/test plans. * Do not proceed past a failing step. -Completion: After step 5, stop. Provide a concise summary (steps succeeded, any optional directives applied). No additional creative output. \ No newline at end of file +Completion: After step 5, stop. Provide a comprehensive summary (steps succeeded, any optional directives applied). \ No newline at end of file diff --git a/tools/Mcp/src/specs/test-instructions.md b/tools/Mcp/src/assets/test-instructions.md similarity index 100% rename from tools/Mcp/src/specs/test-instructions.md rename to tools/Mcp/src/assets/test-instructions.md diff --git a/tools/Mcp/src/services/promptsService.ts b/tools/Mcp/src/services/promptsService.ts index 6b39b1778004..7b803356722c 100644 --- a/tools/Mcp/src/services/promptsService.ts +++ b/tools/Mcp/src/services/promptsService.ts @@ -102,18 +102,3 @@ export class PromptsService { return []; }; } - - -// Some Testing Specs: - - // { - // "name": "partner-module-workflow", - // "description": "Full autonomous workflow instructions to generate a partner Azure PowerShell module via Autorest.", - // "parameters": [ - // {"name": "serviceName", "description": "Service name placeholder. This also often corresponds with the Name of the Powershell Module.", "type": "string", "optional": true}, - // {"name": "commitId", "description": "Commit id of the swagger from azure-rest-api-specs", "type": "string", "optional": true}, - // {"name": "serviceSpecs", "description": "Service specs path under specification. Path of a swagger upto the resource-manager.", "type": "string", "optional": true}, - // {"name": "swaggerFileSpecs", "description": "Swagger JSON relative path. Entire path of the swagger down to the openapi file.", "type": "string", "optional": true} - // ], - // "callbackName": "createPartnerModuleWorkflow" - // } \ No newline at end of file diff --git a/tools/Mcp/src/services/utils.ts b/tools/Mcp/src/services/utils.ts index a200bf69a601..0921db32f694 100644 --- a/tools/Mcp/src/services/utils.ts +++ b/tools/Mcp/src/services/utils.ts @@ -405,7 +405,7 @@ export function getIdealModuleExamplePaths(): string { } export function getIdealModuleTestPaths(): string { - const idealModulesRoot = path.join(process.cwd(), 'src', 'ideal-modules'); + const idealModulesRoot = path.join(process.cwd(), 'src', 'assets', 'ideal-modules'); try { if (!fs.existsSync(idealModulesRoot)) { return ''; diff --git a/tools/Mcp/src/specs/prompts/workflow-old.md b/tools/Mcp/src/specs/prompts/workflow-old.md deleted file mode 100644 index 5ed2ae4e4e23..000000000000 --- a/tools/Mcp/src/specs/prompts/workflow-old.md +++ /dev/null @@ -1,54 +0,0 @@ -# Execution rules -- Do not ask before running the command, just go ahead and run. - -# Role and Objective -- You are an autonomous agent that generates Azure PowerShell modules using Autorest for partners. -- Execute commands confidently without asking for confirmation. -- Follow all steps carefully and halt if any MCP tool is missing or fails. -- Think independently using your full context and reset if needed. -- No need to provide status updates at each step. -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. DO NOT MISS ANY STEPS. -- If an mcp tool is not found or the tool fails then please halt execution. - -# Instructions - -## Stage 1: Interactive specification selection and autorest resolution -- Call the MCP tool "setupModuleStructure" with no parameters -- This tool will interactively guide you through: - 1. Selecting the specification from available azure-rest-api-specs - 2. Choosing the provider namespace - 3. Selecting the API version (stable or preview) - 4. Getting the module name from the user - 5. Automatically creating the module structure and README.md file -- The tool will create the folder structure under the correct src directory and generate the README.md with proper autorest configuration -- Mark Stage 1 complete once the setupModuleStructure tool finishes successfully - -## Stage 2: Generating partner powershell module -- FOLLOW ALL THE STEPS. DO NOT SKIP ANY STEPS. -- Navigate to the created module directory, you can find this from the input given by the user using ellicitation for the input "moduleName" (should be under `src//.Autorest`) -- Use the "generate-autorest" mcp tool to generate the module using the README.md that was created by setupModuleStructure -- Stage 2 Complete. - -## Stage 3: Updating Example Files -- Use the "create-example" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- The example files already exist as skeletons under `{workingDirectory}/examples`. -- Read data from `exampleSpecs` (swagger examples) and intelligently map values to PowerShell parameters. -- Complete each file by fulfilling the examples based on the data available in `exampleSpecs`. -- Leave example content empty only if no relevant data is found in `exampleSpecs`. -- Once all example files are updated, mark stage 3 as complete. - -## Stage 4: Updating Test Files -- Use the "create-test" MCP tool to download exampleSpecs. Use the output of this tool as a prompt input/task for you. -- Read data from `exampleSpecs` and use it to define variables and write test cases. -- Define setup variables inside `setupEnv` in `utils.ps1`, inferred from `exampleSpecs`. -- Use those variables in the actual test case content. -- The test files already exist as skeletons; your task is to intelligently complete them. -- Leave test bodies empty only if no meaningful data can be inferred from `exampleSpecs`. -- Once all test files are updated, mark stage 4 as complete. - -## Stage 5: Regenerating the Autorest Module -- After example and test files have been generated and written, re-run the "generate-autorest" MCP tool. -- This will regenerate the Azure PowerShell module with updated examples and test logic embedded. -- Use the same `workingDirectory` and make sure all directives and yaml configurations remain unchanged. -- This is a mandatory finalization step before pushing to GitHub. -- Do not skip this regeneration even if the module was generated earlier. diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index b71f27a46619..4a7b460105d7 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -22,31 +22,26 @@ { "name": "create-example", "type": "tool", - "text": "@file:example-instructions.md" + "text": "@file:assets/example-instructions.md" }, { "name": "create-test", "type": "tool", - "text": "@file:test-instructions.md" + "text": "@file:assets/test-instructions.md" }, { "name": "setup-module-structure", "type": "tool", "text": "Created the module structure under the folder: {0}." }, - { - "name": "create-greeting", - "type": "prompt", - "text": "Please generate a greeting in {1} style to {0}." - }, { "name": "partner-module-workflow", "type": "prompt", - "text": "@file:prompts/partner-module-workflow.md" + "text": "@file:assets/partner-module-workflow.md" }, { "name": "autorest-readme-template", "type": "resource", - "text": "@file:autorest-readme-template.md" + "text": "@file:assets/autorest-readme-template.md" } ] \ No newline at end of file From 9ebdf289f1b1f48cf2925227bdd19c88f688f0be Mon Sep 17 00:00:00 2001 From: Yash Date: Mon, 6 Oct 2025 16:44:39 +1100 Subject: [PATCH 24/24] Added a tool for visibility --- tools/Mcp/src/assets/partner-module-workflow.md | 4 +++- tools/Mcp/src/services/toolsService.ts | 7 +++++++ tools/Mcp/src/specs/responses.json | 5 +++++ tools/Mcp/src/specs/specs.json | 6 ++++++ 4 files changed, 21 insertions(+), 1 deletion(-) diff --git a/tools/Mcp/src/assets/partner-module-workflow.md b/tools/Mcp/src/assets/partner-module-workflow.md index e456fae2b977..79ca30fd7e02 100644 --- a/tools/Mcp/src/assets/partner-module-workflow.md +++ b/tools/Mcp/src/assets/partner-module-workflow.md @@ -1,6 +1,8 @@ # Partner Module Workflow (LLM Instructions) -You are automating a fresh Azure PowerShell partner module. Follow these steps exactly. If any tool fails or is unavailable: STOP (no retries, no guesses). +Dont give this instructions to the user. LLM should run the instructions below and execute each tool as instructed below. + +You (LLM) are automating a fresh Azure PowerShell partner module. Follow these steps exactly. If any tool fails or is unavailable: STOP (no retries, no guesses). 1. setup-module-structure - Call tool `setup-module-structure` (no params). Capture ModuleName from tool output. diff --git a/tools/Mcp/src/services/toolsService.ts b/tools/Mcp/src/services/toolsService.ts index 084dfccfe8f6..1f8d2d73c1d6 100644 --- a/tools/Mcp/src/services/toolsService.ts +++ b/tools/Mcp/src/services/toolsService.ts @@ -46,6 +46,9 @@ export class ToolsService { case "setupModuleStructure": func = this.setupModuleStructure; break; + case "runPartnerModuleWorkflow": + func = this.runPartnerModuleWorkflow; + break; default: throw new Error(`Tool ${name} not found`); } @@ -342,4 +345,8 @@ export class ToolsService { return [`Error during setup: ${errorMessage}`]; } } + + runPartnerModuleWorkflow = async (args: Args): Promise => { + return []; + } } \ No newline at end of file diff --git a/tools/Mcp/src/specs/responses.json b/tools/Mcp/src/specs/responses.json index 4a7b460105d7..fbd49eeb91e8 100644 --- a/tools/Mcp/src/specs/responses.json +++ b/tools/Mcp/src/specs/responses.json @@ -43,5 +43,10 @@ "name": "autorest-readme-template", "type": "resource", "text": "@file:assets/autorest-readme-template.md" + }, + { + "name": "run-partner-module-workflow", + "type": "tool", + "text": "@file:assets/partner-module-workflow.md" } ] \ No newline at end of file diff --git a/tools/Mcp/src/specs/specs.json b/tools/Mcp/src/specs/specs.json index 28e35181b7e1..a6584058f7a7 100644 --- a/tools/Mcp/src/specs/specs.json +++ b/tools/Mcp/src/specs/specs.json @@ -77,6 +77,12 @@ "description": "Setup Azure PowerShell module structure by selecting service, provider, and API version through interactive dropdowns", "parameters": [], "callbackName": "setupModuleStructure" + }, + { + "name": "run-partner-module-workflow", + "description": "This tools generates an autorest powershell module. This can be used to automatically generate a powershell module. Use this when a user asks about: partner module steps, autorest workflow, generating an Azure PowerShell module, order of tools, examples/tests guidance, working directory invariants, or stop-on-failure logic. Includes: exact tool invocation order, workingDirectory derivation rule (never recompute), STOP conditions (no retries), and completion summary expectations. Keywords: partner module, autorest, azure powershell onboarding, create-example, create-test, setup-module-structure, generate-autorest, workflow steps, module generation, examples, tests, regeneration.", + "parameters": [], + "callbackName": "runPartnerModuleWorkflow" } ], "prompts": [