|
46 | 46 | "https://cognitiveservices.azure.com/.default", |
47 | 47 | ) |
48 | 48 | client = AsyncAzureOpenAI( |
49 | | - api_version=os.environ.get("AZURE_OPENAI_VERSION"), |
50 | | - azure_endpoint=os.environ.get("AZURE_OPENAI_ENDPOINT"), |
| 49 | + api_version=os.environ["AZURE_OPENAI_VERSION"], |
| 50 | + azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"], |
51 | 51 | azure_ad_token_provider=token_provider, |
52 | 52 | ) |
53 | 53 | model = OpenAIChatModel( |
54 | | - os.environ.get("AZURE_OPENAI_CHAT_DEPLOYMENT"), |
| 54 | + os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"], |
55 | 55 | provider=OpenAIProvider(openai_client=client), |
56 | 56 | ) |
57 | 57 | elif API_HOST == "github": |
58 | | - client = AsyncOpenAI(api_key=os.environ.get("GITHUB_TOKEN"), base_url="https://models.inference.ai.azure.com") |
59 | | - model = OpenAIChatModel(os.getenv("GITHUB_MODEL", "gpt-4o"), provider=OpenAIProvider(openai_client=client)) |
| 58 | + client = AsyncOpenAI(api_key=os.environ["GITHUB_TOKEN"], base_url="https://models.inference.ai.azure.com") |
| 59 | + model = OpenAIChatModel( |
| 60 | + os.environ.get("GITHUB_MODEL", "gpt-4o-mini"), provider=OpenAIProvider(openai_client=client) |
| 61 | + ) |
60 | 62 | elif API_HOST == "ollama": |
61 | | - client = AsyncOpenAI(base_url=os.environ.get("OLLAMA_ENDPOINT", "http://localhost:11434/v1"), api_key="none") |
62 | | - model = OpenAIChatModel(os.environ.get("OLLAMA_MODEL"), provider=OpenAIProvider(openai_client=client)) |
| 63 | + client = AsyncOpenAI(base_url=os.environ["OLLAMA_ENDPOINT"], api_key="none") |
| 64 | + model = OpenAIChatModel(os.environ["OLLAMA_MODEL"], provider=OpenAIProvider(openai_client=client)) |
63 | 65 | else: |
64 | | - client = AsyncOpenAI() # fallback to default OpenAI client settings |
65 | | - model = OpenAIChatModel(os.getenv("OPENAI_MODEL", "gpt-4o-mini"), provider=OpenAIProvider(openai_client=client)) |
| 66 | + client = AsyncOpenAI() |
| 67 | + model = OpenAIChatModel( |
| 68 | + os.environ.get("OPENAI_MODEL", "gpt-4o-mini"), provider=OpenAIProvider(openai_client=client) |
| 69 | + ) |
66 | 70 |
|
67 | 71 |
|
68 | 72 | class IssueProposal(BaseModel): |
|
0 commit comments