Skip to content

Commit 5c19018

Browse files
Changes default language model to llama3.2 (#1011)
Improves prompt for operationId Adds support for completion temperature
1 parent a3b74c3 commit 5c19018

File tree

5 files changed

+47
-8
lines changed

5 files changed

+47
-8
lines changed
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
// Licensed to the .NET Foundation under one or more agreements.
2+
// The .NET Foundation licenses this file to you under the MIT license.
3+
// See the LICENSE file in the project root for more information.
4+
5+
using System.Text.Json.Serialization;
6+
7+
namespace DevProxy.Abstractions.LanguageModel;
8+
9+
public class CompletionOptions
10+
{
11+
[JsonPropertyName("temperature")]
12+
public double? Temperature { get; set; }
13+
}

dev-proxy-abstractions/LanguageModel/ILanguageModelClient.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,6 @@ namespace DevProxy.Abstractions.LanguageModel;
77
public interface ILanguageModelClient
88
{
99
Task<ILanguageModelCompletionResponse?> GenerateChatCompletionAsync(ILanguageModelChatCompletionMessage[] messages);
10-
Task<ILanguageModelCompletionResponse?> GenerateCompletionAsync(string prompt);
10+
Task<ILanguageModelCompletionResponse?> GenerateCompletionAsync(string prompt, CompletionOptions? options = null);
1111
Task<bool> IsEnabledAsync();
1212
}

dev-proxy-abstractions/LanguageModel/LanguageModelConfiguration.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,6 @@ public class LanguageModelConfiguration
99
public bool Enabled { get; set; } = false;
1010
// default Ollama URL
1111
public string? Url { get; set; } = "http://localhost:11434";
12-
public string? Model { get; set; } = "phi3";
12+
public string? Model { get; set; } = "llama3.2";
1313
public bool CacheResponses { get; set; } = true;
1414
}

dev-proxy-abstractions/LanguageModel/OllamaLanguageModelClient.cs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ private async Task<bool> IsEnabledInternalAsync()
7676
}
7777
}
7878

79-
public async Task<ILanguageModelCompletionResponse?> GenerateCompletionAsync(string prompt)
79+
public async Task<ILanguageModelCompletionResponse?> GenerateCompletionAsync(string prompt, CompletionOptions? options = null)
8080
{
8181
using var scope = _logger.BeginScope(nameof(OllamaLanguageModelClient));
8282

@@ -102,7 +102,7 @@ private async Task<bool> IsEnabledInternalAsync()
102102
return cachedResponse;
103103
}
104104

105-
var response = await GenerateCompletionInternalAsync(prompt);
105+
var response = await GenerateCompletionInternalAsync(prompt, options);
106106
if (response == null)
107107
{
108108
return null;
@@ -123,7 +123,7 @@ private async Task<bool> IsEnabledInternalAsync()
123123
}
124124
}
125125

126-
private async Task<OllamaLanguageModelCompletionResponse?> GenerateCompletionInternalAsync(string prompt)
126+
private async Task<OllamaLanguageModelCompletionResponse?> GenerateCompletionInternalAsync(string prompt, CompletionOptions? options = null)
127127
{
128128
Debug.Assert(_configuration != null, "Configuration is null");
129129

@@ -138,7 +138,8 @@ private async Task<bool> IsEnabledInternalAsync()
138138
{
139139
prompt,
140140
model = _configuration.Model,
141-
stream = false
141+
stream = false,
142+
options
142143
}
143144
);
144145
_logger.LogDebug("Response: {response}", response.StatusCode);

dev-proxy-plugins/RequestLogs/OpenApiSpecGeneratorPlugin.cs

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -480,11 +480,36 @@ private static string GetLastNonTokenSegment(string[] segments)
480480

481481
private async Task<string> GetOperationIdAsync(string method, string serverUrl, string parametrizedPath)
482482
{
483-
var prompt = $"For the specified request, generate an operation ID, compatible with an OpenAPI spec. Respond with just the ID in plain-text format. For example, for request such as `GET https://api.contoso.com/books/{{books-id}}` you return `getBookById`. For a request like `GET https://api.contoso.com/books/{{books-id}}/authors` you return `getAuthorsForBookById`. Request: {method.ToUpper()} {serverUrl}{parametrizedPath}";
483+
var prompt = @"**Prompt:**
484+
Generate an operation ID for an OpenAPI specification based on the HTTP method and URL provided. Follow these rules:
485+
- The operation ID should be in camelCase format.
486+
- Start with a verb that matches the HTTP method (e.g., `get`, `create`, `update`, `delete`).
487+
- Use descriptive words from the URL path.
488+
- Replace path parameters (e.g., `{userId}`) with relevant nouns in singular form (e.g., `User`).
489+
- Do not provide explanations or any other text; respond only with the operation ID.
490+
491+
Example:
492+
**Request:** `GET https://api.contoso.com/books/{books-id}`
493+
getBook
494+
495+
Example:
496+
**Request:** `GET https://api.contoso.com/books/{books-id}/authors`
497+
getBookAuthors
498+
499+
Example:
500+
**Request:** `GET https://api.contoso.com/books/{books-id}/authors/{authors-id}`
501+
getBookAuthor
502+
503+
Example:
504+
**Request:** `POST https://api.contoso.com/books/{books-id}/authors`
505+
addBookAuthor
506+
507+
Now, generate the operation ID for the following:
508+
**Request:** `{request}`".Replace("{request}", $"{method.ToUpper()} {serverUrl}{parametrizedPath}");
484509
ILanguageModelCompletionResponse? id = null;
485510
if (await Context.LanguageModelClient.IsEnabledAsync())
486511
{
487-
id = await Context.LanguageModelClient.GenerateCompletionAsync(prompt);
512+
id = await Context.LanguageModelClient.GenerateCompletionAsync(prompt, new() { Temperature = 1 });
488513
}
489514
return id?.Response ?? $"{method}{parametrizedPath.Replace('/', '.')}";
490515
}

0 commit comments

Comments
 (0)