Skip to content

Commit c3d4061

Browse files
authored
Added support for max_tokens attribute in ChatVertexAI (#714)
1 parent 106872f commit c3d4061

File tree

2 files changed

+7
-0
lines changed

2 files changed

+7
-0
lines changed

libs/vertexai/langchain_google_vertexai/_base.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -225,6 +225,10 @@ def _is_gemini_model(self) -> bool:
225225
def _llm_type(self) -> str:
226226
return "vertexai"
227227

228+
@property
229+
def max_tokens(self) -> int | None:
230+
return self.max_output_tokens
231+
228232
@property
229233
def _identifying_params(self) -> Dict[str, Any]:
230234
"""Gets the identifying parameters."""

libs/vertexai/tests/unit_tests/test_chat_models.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,10 +119,13 @@ def test_tuned_model_name() -> None:
119119
model_name="gemini-pro",
120120
project="test-project",
121121
tuned_model_name="projects/123/locations/europe-west4/endpoints/456",
122+
max_tokens=500,
122123
)
123124
assert llm.model_name == "gemini-pro"
124125
assert llm.tuned_model_name == "projects/123/locations/europe-west4/endpoints/456"
125126
assert llm.full_model_name == "projects/123/locations/europe-west4/endpoints/456"
127+
assert llm.max_output_tokens == 500
128+
assert llm.max_tokens == 500
126129

127130

128131
def test_parse_examples_correct() -> None:

0 commit comments

Comments
 (0)