diff --git a/03-CoreGenerativeAITechniques/02-retrieval-augmented-generation.md b/03-CoreGenerativeAITechniques/02-retrieval-augmented-generation.md index 00c9031..fc88186 100644 --- a/03-CoreGenerativeAITechniques/02-retrieval-augmented-generation.md +++ b/03-CoreGenerativeAITechniques/02-retrieval-augmented-generation.md @@ -84,11 +84,14 @@ We'll use the Microsoft.Extension.AI along with the [Microsoft.Extensions.Vector 3. Our next task then is to convert our knowledge store (the `movieData` object) into embeddings and then store them into the in-memory vector store. When we create the embeddings we'll use a different model - an embeddings model instead of a language model. ```csharp - var endpoint = new Uri("https://models.inference.ai.azure.com"); - var modelId = "text-embedding-3-small"; // get embeddings generator and generate embeddings for movies - IEmbeddingGenerator> generator = - new OllamaEmbeddingGenerator(new Uri("http://localhost:11434/"), "all-minilm"); + var githubToken = Environment.GetEnvironmentVariable("GITHUB_TOKEN") ?? throw new InvalidOperationException("GITHUB_TOKEN environment variable is not set."); + var endpoint = new Uri("https://models.inference.ai.azure.com"); + var modelId = "text-embedding-3-small"; + + var embeddingsClient = new EmbeddingsClient(endpoint, new AzureKeyCredential(githubToken)); + IEmbeddingGenerator> generator = embeddingsClient.AsIEmbeddingGenerator(modelId); + foreach (var movie in movieData) { movie.Vector = await generator.GenerateVectorAsync(movie.Description); diff --git a/03-CoreGenerativeAITechniques/src/RAGSimple-02MEAIVectorsMemory/Program.cs b/03-CoreGenerativeAITechniques/src/RAGSimple-02MEAIVectorsMemory/Program.cs index 3d30d95..5b594a9 100644 --- a/03-CoreGenerativeAITechniques/src/RAGSimple-02MEAIVectorsMemory/Program.cs +++ b/03-CoreGenerativeAITechniques/src/RAGSimple-02MEAIVectorsMemory/Program.cs @@ -1,4 +1,6 @@ -using Microsoft.Extensions.AI; +using Azure; +using Azure.AI.Inference; +using Microsoft.Extensions.AI; using Microsoft.SemanticKernel.Connectors.InMemory; var vectorStore = new InMemoryVectorStore(); @@ -9,8 +11,12 @@ var movieData = MovieFactory.GetMovieVectorList(); // get embeddings generator and generate embeddings for movies -IEmbeddingGenerator> generator = - new OllamaEmbeddingGenerator(new Uri("http://localhost:11434/"), "all-minilm"); +var githubToken = Environment.GetEnvironmentVariable("GITHUB_TOKEN") ?? throw new InvalidOperationException("GITHUB_TOKEN environment variable is not set."); +var endpoint = new Uri("https://models.inference.ai.azure.com"); +var modelId = "text-embedding-3-small"; + +var embeddingsClient = new EmbeddingsClient(endpoint, new AzureKeyCredential(githubToken)); +IEmbeddingGenerator> generator = embeddingsClient.AsIEmbeddingGenerator(modelId); foreach (var movie in movieData) { movie.Vector = await generator.GenerateVectorAsync(movie.Description); diff --git a/03-CoreGenerativeAITechniques/src/RAGSimple-02MEAIVectorsMemory/RAGSimple-02MEAIVectorsMemory.csproj b/03-CoreGenerativeAITechniques/src/RAGSimple-02MEAIVectorsMemory/RAGSimple-02MEAIVectorsMemory.csproj index 09310eb..3e21ad8 100644 --- a/03-CoreGenerativeAITechniques/src/RAGSimple-02MEAIVectorsMemory/RAGSimple-02MEAIVectorsMemory.csproj +++ b/03-CoreGenerativeAITechniques/src/RAGSimple-02MEAIVectorsMemory/RAGSimple-02MEAIVectorsMemory.csproj @@ -7,7 +7,7 @@ enable - +