Skip to content

Commit 2f25ca8

Browse files
removed commented code and final fixes
1 parent 7c705a7 commit 2f25ca8

File tree

2 files changed

+15
-52
lines changed

2 files changed

+15
-52
lines changed

infra/scripts/index_scripts/04_cu_process_data_new_data.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,7 @@
1010
from azure.search.documents import SearchClient
1111
from azure.search.documents.indexes import SearchIndexClient
1212
from azure.storage.filedatalake import DataLakeServiceClient
13-
# --- REPLACED ---
14-
# from openai import AzureOpenAI
1513
from azure.ai.inference import ChatCompletionsClient, EmbeddingsClient
16-
# ----------------
1714
from content_understanding_client import AzureContentUnderstandingClient
1815
from azure_credential_utils import get_azure_credential
1916
from azure.search.documents.indexes.models import (
@@ -72,6 +69,17 @@ def get_secrets_from_kv(kv_name, secret_name):
7269
index_client = SearchIndexClient(endpoint=search_endpoint, credential=search_credential)
7370
print("Azure Search setup complete.")
7471

72+
# ---------- Azure AI Foundry (Inference) clients (Managed Identity) ----------
73+
# For Azure OpenAI endpoints, the Inference SDK expects the deployment path and api_version + scopes.
74+
# chat deployment (already coming from Key Vault as `deployment`)
75+
chat_endpoint = f"{openai_api_base}/openai/deployments/{deployment}"
76+
chat_client = ChatCompletionsClient(
77+
endpoint=chat_endpoint,
78+
credential=credential,
79+
credential_scopes=["https://cognitiveservices.azure.com/.default"],
80+
api_version=openai_api_version,
81+
)
82+
7583
# Delete the search index
7684
search_index_client = SearchIndexClient(search_endpoint, search_credential)
7785
search_index_client.delete_index(INDEX_NAME)
@@ -405,8 +413,6 @@ def call_gpt4(topics_str1, client):
405413

406414
max_tokens = 3096
407415
res = call_gpt4(", ".join([]), chat_client)
408-
# (rest of topic mining and mapping logic unchanged)
409-
410416
for object1 in res['topics']:
411417
cursor.execute("INSERT INTO km_mined_topics (label, description) VALUES (?,?)", (object1['label'], object1['description']))
412418
conn.commit()
@@ -424,8 +430,7 @@ def get_mined_topic_mapping(input_text, list_of_topics):
424430
prompt = f'''You are a data analysis assistant to help find the closest topic for a given text {input_text}
425431
from a list of topics - {list_of_topics}.
426432
ALWAYS only return a topic from list - {list_of_topics}. Do not add any other text.'''
427-
response = openai_client.chat.completions.create(
428-
model=deployment,
433+
response = chat_client.complete(
429434
messages=[
430435
{"role": "system", "content": "You are a helpful assistant."},
431436
{"role": "user", "content": prompt},

src/api/services/history_service.py

Lines changed: 3 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
import uuid
33
from typing import Optional
44
from fastapi import HTTPException, status
5-
# from openai import AsyncAzureOpenAI # Commented out - replaced with Foundry SDK
65
from azure.ai.projects import AIProjectClient
76
from azure.ai.agents.models import MessageRole, ListSortOrder
87
from common.config.config import Config
@@ -31,11 +30,9 @@ def __init__(self):
3130
and self.azure_cosmosdb_conversations_container
3231
)
3332

34-
# OpenAI configuration (partially commented out - now using Foundry SDK)
35-
# self.azure_openai_endpoint = config.azure_openai_endpoint # Not needed for Foundry SDK
36-
# self.azure_openai_api_version = config.azure_openai_api_version # Not needed for Foundry SDK
37-
self.azure_openai_deployment_name = config.azure_openai_deployment_model # Still needed for Foundry SDK model parameter
38-
# self.azure_openai_resource = config.azure_openai_resource # Not needed for Foundry SDK
33+
34+
self.azure_openai_deployment_name = config.azure_openai_deployment_model
35+
3936
self.azure_client_id = config.azure_client_id
4037

4138
# AI Project configuration for Foundry SDK
@@ -62,47 +59,8 @@ def init_cosmosdb_client(self):
6259
logger.exception("Failed to initialize CosmosDB client")
6360
raise
6461

65-
# def init_openai_client(self):
66-
# """
67-
# COMMENTED OUT - Replaced with Foundry SDK in generate_title method
68-
# This method was used for direct OpenAI API calls, but we now use
69-
# Azure AI Foundry SDK with AIProjectClient for title generation
70-
# """
71-
# user_agent = "GitHubSampleWebApp/AsyncAzureOpenAI/1.0.0"
72-
#
73-
# try:
74-
# if not self.azure_openai_endpoint and not self.azure_openai_resource:
75-
# raise ValueError(
76-
# "AZURE_OPENAI_ENDPOINT or AZURE_OPENAI_RESOURCE is required")
77-
#
78-
# endpoint = self.azure_openai_endpoint or f"https://{self.azure_openai_resource}.openai.azure.com/"
79-
# ad_token_provider = None
80-
#
81-
# logger.debug("Using Azure AD authentication for OpenAI")
82-
# ad_token_provider = get_bearer_token_provider(
83-
# get_azure_credential(client_id=self.azure_client_id), "https://cognitiveservices.azure.com/.default")
84-
#
85-
# if not self.azure_openai_deployment_name:
86-
# raise ValueError("AZURE_OPENAI_MODEL is required")
87-
#
88-
# return AsyncAzureOpenAI(
89-
# api_version=self.azure_openai_api_version,
90-
# azure_ad_token_provider=ad_token_provider,
91-
# default_headers={"x-ms-useragent": user_agent},
92-
# azure_endpoint=endpoint,
93-
# )
94-
# except Exception:
95-
# logger.exception("Failed to initialize Azure OpenAI client")
96-
# raise
9762

9863
async def generate_title(self, conversation_messages):
99-
"""
100-
Generate a conversation title using Azure AI Foundry SDK.
101-
102-
This method has been migrated from direct OpenAI API calls to use
103-
Azure AI Foundry SDK with AIProjectClient for better resource management
104-
and integration with Azure AI services.
105-
"""
10664
title_prompt = (
10765
"Summarize the conversation so far into a 4-word or less title. "
10866
"Do not use any quotation marks or punctuation. "

0 commit comments

Comments
 (0)