Skip to content

Commit 3cb3c00

Browse files
authored
Merge pull request #279722 from mrbullwinkle/mrb_06_30_2024_go_updates
[Azure OpenAI] Go quickstart updates
2 parents d0ed797 + b7a3e7b commit 3cb3c00

File tree

1 file changed

+69
-24
lines changed

1 file changed

+69
-24
lines changed

articles/ai-services/openai/includes/chat-go.md

Lines changed: 69 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ ms.service: azure-ai-openai
77
ms.topic: include
88
author: mrbullwinkle
99
ms.author: mbullwin
10-
ms.date: 08/30/2023
10+
ms.date: 06/30/2024
1111
---
1212

1313
[Source code](https://github.com/Azure/azure-sdk-for-go/tree/main/sdk/ai/azopenai) | [Package (Go)](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai)| [Samples](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai#pkg-examples)
@@ -40,13 +40,15 @@ import (
4040
"os"
4141

4242
"github.com/Azure/azure-sdk-for-go/sdk/ai/azopenai"
43+
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
4344
"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
4445
)
4546

4647
func main() {
4748
azureOpenAIKey := os.Getenv("AZURE_OPENAI_API_KEY")
48-
//modelDeploymentID = deployment name, if model name and deployment name do not match change this value to name chosen when you deployed the model.
49-
modelDeploymentID := "gpt-35-turbo"
49+
modelDeploymentID := os.Getenv("YOUR_MODEL_DEPLOYMENT_NAME")
50+
maxTokens:= int32(400)
51+
5052

5153
// Ex: "https://<your-azure-openai-host>.openai.azure.com"
5254
azureOpenAIEndpoint := os.Getenv("AZURE_OPENAI_ENDPOINT")
@@ -56,55 +58,84 @@ func main() {
5658
return
5759
}
5860

59-
keyCredential, err := azopenai.NewKeyCredential(azureOpenAIKey)
60-
61-
if err != nil {
62-
// TODO: Update the following line with your application specific error handling logic
63-
log.Fatalf("ERROR: %s", err)
64-
}
61+
keyCredential := azcore.NewKeyCredential(azureOpenAIKey)
6562

63+
// In Azure OpenAI you must deploy a model before you can use it in your client. For more information
64+
// see here: https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource
6665
client, err := azopenai.NewClientWithKeyCredential(azureOpenAIEndpoint, keyCredential, nil)
6766

6867
if err != nil {
69-
// TODO: Update the following line with your application specific error handling logic
70-
log.Fatalf("ERROR: %s", err)
68+
// TODO: Update the following line with your application specific error handling logic
69+
log.Printf("ERROR: %s", err)
70+
return
7171
}
7272

7373
// This is a conversation in progress.
7474
// NOTE: all messages, regardless of role, count against token usage for this API.
75-
messages := []azopenai.ChatMessage{
75+
messages := []azopenai.ChatRequestMessageClassification{
7676
// You set the tone and rules of the conversation with a prompt as the system role.
77-
{Role: to.Ptr(azopenai.ChatRoleSystem), Content: to.Ptr("You are a helpful assistant.")},
77+
&azopenai.ChatRequestSystemMessage{Content: to.Ptr("You are a helpful assistant.")},
7878

7979
// The user asks a question
80-
{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr("Does Azure OpenAI support customer managed keys?")},
80+
&azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent("Does Azure OpenAI support customer managed keys?")},
8181

82-
// The reply would come back from the Azure OpenAI model. You'd add it to the conversation so we can maintain context.
83-
{Role: to.Ptr(azopenai.ChatRoleAssistant), Content: to.Ptr("Yes, customer managed keys are supported by Azure OpenAI")},
82+
// The reply would come back from the model. You'd add it to the conversation so we can maintain context.
83+
&azopenai.ChatRequestAssistantMessage{Content: to.Ptr("Yes, customer managed keys are supported by Azure OpenAI")},
8484

8585
// The user answers the question based on the latest reply.
86-
{Role: to.Ptr(azopenai.ChatRoleUser), Content: to.Ptr("Do other Azure AI services support this too?")},
86+
&azopenai.ChatRequestUserMessage{Content: azopenai.NewChatRequestUserMessageContent("What other Azure Services support customer managed keys?")},
8787

88-
// from here you'd keep iterating, sending responses back from the chat completions API
88+
// from here you'd keep iterating, sending responses back from ChatGPT
8989
}
9090

91+
gotReply := false
92+
9193
resp, err := client.GetChatCompletions(context.TODO(), azopenai.ChatCompletionsOptions{
9294
// This is a conversation in progress.
9395
// NOTE: all messages count against token usage for this API.
94-
Messages: messages,
95-
Deployment: modelDeploymentID,
96+
Messages: messages,
97+
DeploymentName: &modelDeploymentID,
98+
MaxTokens: &maxTokens,
9699
}, nil)
97100

98101
if err != nil {
99-
// TODO: Update the following line with your application specific error handling logic
100-
log.Fatalf("ERROR: %s", err)
102+
// TODO: Update the following line with your application specific error handling logic
103+
log.Printf("ERROR: %s", err)
104+
return
101105
}
102106

103107
for _, choice := range resp.Choices {
104-
fmt.Fprintf(os.Stderr, "Content[%d]: %s\n", *choice.Index, *choice.Message.Content)
108+
gotReply = true
109+
110+
if choice.ContentFilterResults != nil {
111+
fmt.Fprintf(os.Stderr, "Content filter results\n")
112+
113+
if choice.ContentFilterResults.Error != nil {
114+
fmt.Fprintf(os.Stderr, " Error:%v\n", choice.ContentFilterResults.Error)
115+
}
116+
117+
fmt.Fprintf(os.Stderr, " Hate: sev: %v, filtered: %v\n", *choice.ContentFilterResults.Hate.Severity, *choice.ContentFilterResults.Hate.Filtered)
118+
fmt.Fprintf(os.Stderr, " SelfHarm: sev: %v, filtered: %v\n", *choice.ContentFilterResults.SelfHarm.Severity, *choice.ContentFilterResults.SelfHarm.Filtered)
119+
fmt.Fprintf(os.Stderr, " Sexual: sev: %v, filtered: %v\n", *choice.ContentFilterResults.Sexual.Severity, *choice.ContentFilterResults.Sexual.Filtered)
120+
fmt.Fprintf(os.Stderr, " Violence: sev: %v, filtered: %v\n", *choice.ContentFilterResults.Violence.Severity, *choice.ContentFilterResults.Violence.Filtered)
121+
}
122+
123+
if choice.Message != nil && choice.Message.Content != nil {
124+
fmt.Fprintf(os.Stderr, "Content[%d]: %s\n", *choice.Index, *choice.Message.Content)
125+
}
126+
127+
if choice.FinishReason != nil {
128+
// this choice's conversation is complete.
129+
fmt.Fprintf(os.Stderr, "Finish reason[%d]: %s\n", *choice.Index, *choice.FinishReason)
130+
}
131+
}
132+
133+
if gotReply {
134+
fmt.Fprintf(os.Stderr, "Received chat completions reply\n")
105135
}
106136

107137
}
138+
108139
```
109140

110141
> [!IMPORTANT]
@@ -126,7 +157,21 @@ go run chat_completions.go
126157
## Output
127158

128159
```output
129-
Content[0]: Yes, many Azure AI services also support customer managed keys. These services enable you to bring your own encryption keys for data at rest, which provides you with more control over the security of your data.
160+
Content filter results
161+
Hate: sev: safe, filtered: false
162+
SelfHarm: sev: safe, filtered: false
163+
Sexual: sev: safe, filtered: false
164+
Violence: sev: safe, filtered: false
165+
Content[0]: As of my last update in early 2023, in Azure, several AI services support the use of customer-managed keys (CMKs) through Azure Key Vault. This allows customers to have control over the encryption keys used to secure their data at rest. The services that support this feature typically fall under Azure's range of cognitive services and might include:
166+
167+
1. Azure Cognitive Search: It supports using customer-managed keys to encrypt the index data.
168+
2. Azure Form Recognizer: For data at rest, you can use customer-managed keys for added security.
169+
3. Azure Text Analytics: CMKs can be used for encrypting your data at rest.
170+
4. Azure Blob Storage: While not exclusively an AI service, it's often used in conjunction with AI services to store data, and it supports customer-managed keys for encrypting blob data.
171+
172+
Note that the support for CMKs can vary by service and sometimes even by the specific feature within the service. Additionally, the landscape of cloud services is fast evolving, and new features, including security capabilities, are frequently added. Therefore, it's recommended to check the latest Azure documentation or contact Azure support for the most current information about CMK support for any specific Azure AI service.
173+
Finish reason[0]: stop
174+
Received chat completions reply
130175
```
131176

132177
## Clean up resources

0 commit comments

Comments
 (0)