Skip to content

Commit 9c9752e

Browse files
committed
chore: fix ContextualAI property names
1 parent 361fd0c commit 9c9752e

File tree

3 files changed

+19
-19
lines changed

3 files changed

+19
-19
lines changed

test/collection/test_config.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1052,20 +1052,20 @@ def test_config_with_vectorizer_and_properties(
10521052
(
10531053
Configure.Generative.contextualai(
10541054
model="v2",
1055-
max_new_tokens=512,
10561055
temperature=0.7,
10571056
top_p=0.9,
1057+
max_new_tokens=512,
10581058
system_prompt="You are a helpful assistant that provides accurate and informative responses based on the given context.",
10591059
avoid_commentary=False,
10601060
),
10611061
{
10621062
"generative-contextualai": {
10631063
"model": "v2",
1064-
"maxNewTokensProperty": 512,
1065-
"temperatureProperty": 0.7,
1066-
"topPProperty": 0.9,
1067-
"systemPromptProperty": "You are a helpful assistant that provides accurate and informative responses based on the given context.",
1068-
"avoidCommentaryProperty": False,
1064+
"temperature": 0.7,
1065+
"topP": 0.9,
1066+
"maxNewTokens": 512,
1067+
"systemPrompt": "You are a helpful assistant that provides accurate and informative responses based on the given context.",
1068+
"avoidCommentary": False,
10691069
}
10701070
},
10711071
),

weaviate/collections/classes/config.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -468,11 +468,11 @@ class _GenerativeContextualAIConfig(_GenerativeProvider):
468468
default=GenerativeSearches.CONTEXTUALAI, frozen=True, exclude=True
469469
)
470470
model: Optional[str]
471-
maxNewTokensProperty: Optional[int]
472-
temperatureProperty: Optional[float]
473-
topPProperty: Optional[float]
474-
systemPromptProperty: Optional[str]
475-
avoidCommentaryProperty: Optional[bool]
471+
temperature: Optional[float]
472+
topP: Optional[float]
473+
maxNewTokens: Optional[int]
474+
systemPrompt: Optional[str]
475+
avoidCommentary: Optional[bool]
476476

477477

478478
class _GenerativeGoogleConfig(_GenerativeProvider):
@@ -886,12 +886,12 @@ def contextualai(
886886
avoid_commentary: If `True`, reduce conversational commentary in responses. Defaults to `None`, which uses the server-defined default
887887
"""
888888
return _GenerativeContextualAIConfig(
889-
maxNewTokensProperty=max_new_tokens,
890889
model=model,
891-
temperatureProperty=temperature,
892-
topPProperty=top_p,
893-
systemPromptProperty=system_prompt,
894-
avoidCommentaryProperty=avoid_commentary,
890+
temperature=temperature,
891+
topP=top_p,
892+
maxNewTokens=max_new_tokens,
893+
systemPrompt=system_prompt,
894+
avoidCommentary=avoid_commentary,
895895
)
896896

897897
@staticmethod

weaviate/collections/classes/generative.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -450,9 +450,9 @@ class _GenerativeContextualAI(_GenerativeConfigRuntime):
450450
default=GenerativeSearches.CONTEXTUALAI, frozen=True, exclude=True
451451
)
452452
model: Optional[str]
453-
max_new_tokens: Optional[int]
454453
temperature: Optional[float]
455454
top_p: Optional[float]
455+
max_new_tokens: Optional[int]
456456
system_prompt: Optional[str]
457457
avoid_commentary: Optional[bool]
458458
knowledge: Optional[List[str]]
@@ -463,9 +463,9 @@ def _to_grpc(self, opts: _GenerativeConfigRuntimeOptions) -> generative_pb2.Gene
463463
return_metadata=opts.return_metadata,
464464
contextualai=generative_pb2.GenerativeContextualAI(
465465
model=self.model,
466-
max_new_tokens=self.max_new_tokens,
467466
temperature=self.temperature,
468467
top_p=self.top_p,
468+
max_new_tokens=self.max_new_tokens,
469469
system_prompt=self.system_prompt,
470470
avoid_commentary=self.avoid_commentary or False,
471471
knowledge=_to_text_array(self.knowledge),
@@ -632,9 +632,9 @@ def contextualai(
632632
"""
633633
return _GenerativeContextualAI(
634634
model=model,
635-
max_new_tokens=max_new_tokens,
636635
temperature=temperature,
637636
top_p=top_p,
637+
max_new_tokens=max_new_tokens,
638638
system_prompt=system_prompt,
639639
avoid_commentary=avoid_commentary,
640640
knowledge=knowledge,

0 commit comments

Comments
 (0)