Skip to content

Commit f848161

Browse files
committed
qa validations
1 parent 8a715e6 commit f848161

File tree

3 files changed

+17
-10
lines changed

3 files changed

+17
-10
lines changed

azure/functions/decorators/function_app.py

Lines changed: 13 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3217,7 +3217,8 @@ def decorator():
32173217
def text_completion_input(self,
32183218
arg_name: str,
32193219
prompt: str,
3220-
chat_model: Optional[OpenAIModels] = OpenAIModels.DefaultChatModel, # NoQA
3220+
chat_model: Optional[
3221+
OpenAIModels] = OpenAIModels.DefaultChatModel,
32213222
temperature: Optional[str] = "0.5",
32223223
top_p: Optional[str] = None,
32233224
max_tokens: Optional[str] = "100",
@@ -3373,7 +3374,8 @@ def decorator():
33733374
def assistant_post_input(self, arg_name: str,
33743375
id: str,
33753376
user_message: str,
3376-
chat_model: Optional[str] = OpenAIModels.DefaultChatModel,
3377+
chat_model: Optional[
3378+
OpenAIModels] = OpenAIModels.DefaultChatModel,
33773379
chat_storage_connection_setting: Optional[str] = "AzureWebJobsStorage", # noqa: E501
33783380
collection_name: Optional[str] = "ChatState", # noqa: E501
33793381
temperature: Optional[str] = "0.5",
@@ -3409,7 +3411,7 @@ def assistant_post_input(self, arg_name: str,
34093411
:param max_tokens: The maximum number of tokens to generate in the
34103412
completion. The token count of your prompt plus max_tokens cannot
34113413
exceed the model's context length. Most models have a context length of
3412-
2048 tokens (except for the newest models, which support 4096)
3414+
2048 tokens (except for the newest models, which support 4096)
34133415
:param is_reasoning_model: Whether the configured chat completion model
34143416
is a reasoning model or not. Properties max_tokens and temperature are not
34153417
supported for reasoning models.
@@ -3449,7 +3451,8 @@ def embeddings_input(self,
34493451
arg_name: str,
34503452
input: str,
34513453
input_type: InputType,
3452-
embeddings_model: Optional[str] = OpenAIModels.DefaultEmbeddingsModel,
3454+
embeddings_model: Optional[
3455+
OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel,
34533456
max_chunk_length: Optional[int] = 8 * 1024,
34543457
max_overlap: Optional[int] = 128,
34553458
data_type: Optional[
@@ -3504,8 +3507,10 @@ def semantic_search_input(self,
35043507
search_connection_name: str,
35053508
collection: str,
35063509
query: Optional[str] = None,
3507-
embeddings_model: Optional[OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel, # NoQA
3508-
chat_model: Optional[OpenAIModels] = OpenAIModels.DefaultChatModel, # NoQA
3510+
embeddings_model: Optional[
3511+
OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel,
3512+
chat_model: Optional[
3513+
OpenAIModels] = OpenAIModels.DefaultChatModel,
35093514
system_prompt: Optional[str] = semantic_search_system_prompt, # NoQA
35103515
max_knowledge_count: Optional[int] = 1,
35113516
temperature: Optional[str] = "0.5",
@@ -3595,7 +3600,8 @@ def embeddings_store_output(self,
35953600
input_type: InputType,
35963601
store_connection_name: str,
35973602
collection: str,
3598-
embeddings_model: Optional[OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel, # NoQA
3603+
embeddings_model: Optional[
3604+
OpenAIModels] = OpenAIModels.DefaultEmbeddingsModel,
35993605
max_chunk_length: Optional[int] = 8 * 1024,
36003606
max_overlap: Optional[int] = 128,
36013607
data_type: Optional[

azure/functions/decorators/openai.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,8 @@ def get_binding_name() -> str:
5151
def __init__(self,
5252
name: str,
5353
prompt: str,
54-
chat_model: Optional[OpenAIModels] = OpenAIModels.DefaultChatModel,
54+
chat_model: Optional[
55+
OpenAIModels] = OpenAIModels.DefaultChatModel,
5556
temperature: Optional[str] = "0.5",
5657
top_p: Optional[str] = None,
5758
max_tokens: Optional[str] = "100",

tests/decorators/test_openai.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ def test_embeddings_store_output_valid_creation(self):
182182
max_overlap=1,
183183
max_chunk_length=1,
184184
collection="test_collection",
185-
embeddings_model=OpenAIModels.DefaultEmbeddingsModel,
185+
embeddings_model=OpenAIModels.DefaultEmbeddingsModel, # noqa: E501
186186
dummy_field="dummy_field")
187187
self.assertEqual(output.get_binding_name(),
188188
"embeddingsStore")
@@ -194,7 +194,7 @@ def test_embeddings_store_output_valid_creation(self):
194194
"input": "test_input",
195195
"inputType": "test_input_type",
196196
"collection": "test_collection",
197-
"embeddingsModel": OpenAIModels.DefaultEmbeddingsModel,
197+
"embeddingsModel": OpenAIModels.DefaultEmbeddingsModel, # noqa: E501
198198
"storeConnectionName": "test_connection",
199199
"maxOverlap": 1,
200200
"maxChunkLength": 1,

0 commit comments

Comments
 (0)