Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
name: CI
on:
push:
branches-ignore:
- 'generated'
- 'codegen/**'
- 'integrated/**'
- 'stl-preview-head/**'
- 'stl-preview-base/**'
branches:
- '**'
- '!integrated/**'
- '!stl-preview-head/**'
- '!stl-preview-base/**'
- '!generated'
- '!codegen/**'
- 'codegen/stl/**'
pull_request:
branches-ignore:
- 'stl-preview-head/**'
Expand Down
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "2.4.0"
".": "2.5.0"
}
6 changes: 3 additions & 3 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 74
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2Ftogetherai-452048e531c558b879791ccc9788c3f3c23c50c808c909a6d95f47af360566a4.yml
openapi_spec_hash: 11835b47ff4c2d1b4dbeed74c49908e1
config_hash: b66198d27b4d5c152688ff6cccfdeab5
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/togetherai%2Ftogetherai-31893d157d3c85caa1d8615b73a5fa431ea2cc126bd2410e0f84f3defd5c7dec.yml
openapi_spec_hash: b652a4d504b4a3dbf585ab803b0f59fc
config_hash: 52d213100a0ca1a4b2cdcd2718936b51
25 changes: 25 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,30 @@
# Changelog

## 2.5.0 (2026-03-18)

Full Changelog: [v2.4.0...v2.5.0](https://github.com/togethercomputer/together-py/compare/v2.4.0...v2.5.0)

### Features

* **api:** manual updates ([9db2163](https://github.com/togethercomputer/together-py/commit/9db2163681520a95a54e03472ed6719abc40dc05))
* **api:** manual updates ([a3436ea](https://github.com/togethercomputer/together-py/commit/a3436ea1f04273a0d91103cbd850136854389794))
* **api:** manual updates ([ca7f97d](https://github.com/togethercomputer/together-py/commit/ca7f97de14718352cb990c281d3fa053e954fc42))
* Update llama 3.1 8b w/ qwen 3.5 9b ([0680858](https://github.com/togethercomputer/together-py/commit/0680858c6f88e9e8ad72c76dc6d374e5074e12a2))


### Bug Fixes

* **deps:** bump minimum typing-extensions version ([94c9632](https://github.com/togethercomputer/together-py/commit/94c96328c13e6955f777e4c6a3517744b84a1d23))
* **jig:** use Together.get instead of Together._client.get so that registry errors are handled correctly ([#302](https://github.com/togethercomputer/together-py/issues/302)) ([aefd483](https://github.com/togethercomputer/together-py/commit/aefd483f7c335232fe798be6f92f52a960f70e88))
* **pydantic:** do not pass `by_alias` unless set ([dca687e](https://github.com/togethercomputer/together-py/commit/dca687e39cf86f80cfd051e4e1596e8f54753968))
* **types:** remove model enum constraint in chat completions ([c30e2a0](https://github.com/togethercomputer/together-py/commit/c30e2a0929888955fe7bae94f38a024ff0a601ed))


### Chores

* Fix unit tests with recent model deprecations ([#305](https://github.com/togethercomputer/together-py/issues/305)) ([28902b4](https://github.com/togethercomputer/together-py/commit/28902b4f67f0f5158ea09589fe63de5e51efc9b7))
* **internal:** tweak CI branches ([556c449](https://github.com/togethercomputer/together-py/commit/556c4491ed2c1ad39f4d587db2c25d94dd210192))

## 2.4.0 (2026-03-11)

Full Changelog: [v2.3.2...v2.4.0](https://github.com/togethercomputer/together-py/compare/v2.3.2...v2.4.0)
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ chat_completion = client.chat.completions.create(
"role": "system",
}
],
model="meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
model="model",
reasoning={},
)
print(chat_completion.reasoning)
Expand Down
1 change: 1 addition & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ Types:
from together.types.chat import (
ChatCompletion,
ChatCompletionChunk,
ChatCompletionPrompt,
ChatCompletionStructuredMessageImageURL,
ChatCompletionStructuredMessageText,
ChatCompletionStructuredMessageVideoURL,
Expand Down
2 changes: 1 addition & 1 deletion examples/fine_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
client = Together()

file_id = "file-bf72b951-fa1a-41af-a152-fe385dca0201"
fine_tune_model = client.fine_tuning.create(model="meta-llama/Meta-Llama-3-8B", training_file=file_id)
fine_tune_model = client.fine_tuning.create(model="openai/gpt-oss-20b", training_file=file_id)
print(fine_tune_model)

fine_tune_id = fine_tune_model.id
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "together"
version = "2.4.0"
version = "2.5.0"
description = "The official Python library for the together API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand All @@ -11,7 +11,7 @@ authors = [
dependencies = [
"httpx>=0.23.0, <1",
"pydantic>=1.9.0, <3",
"typing-extensions>=4.10, <5",
"typing-extensions>=4.14, <5",
"anyio>=3.5.0, <5",
"distro>=1.7.0, <2",
"sniffio",
Expand Down
16 changes: 11 additions & 5 deletions requirements-dev.lock
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ execnet==2.1.2
# via pytest-xdist
filelock==3.19.1 ; python_full_version < '3.10'
# via together
filelock==3.20.3 ; python_full_version >= '3.10'
filelock==3.25.2 ; python_full_version >= '3.10'
# via together
frozenlist==1.8.0
# via
Expand Down Expand Up @@ -87,7 +87,7 @@ pathspec==1.0.3
# via mypy
pillow==11.3.0 ; python_full_version < '3.10'
# via together
pillow==12.1.0 ; python_full_version >= '3.10'
pillow==12.1.1 ; python_full_version >= '3.10'
# via together
pluggy==1.6.0
# via pytest
Expand Down Expand Up @@ -128,7 +128,9 @@ six==1.17.0 ; python_full_version < '3.10'
# via python-dateutil
sniffio==1.3.1
# via together
tabulate==0.9.0
tabulate==0.9.0 ; python_full_version < '3.10'
# via together
tabulate==0.10.0 ; python_full_version >= '3.10'
# via together
time-machine==2.19.0 ; python_full_version < '3.10'
time-machine==3.2.0 ; python_full_version >= '3.10'
Expand All @@ -143,9 +145,13 @@ types-pyyaml==6.0.12.20250915
# via together
types-requests==2.32.4.20260107
# via types-tqdm
types-tabulate==0.9.0.20241207
types-tabulate==0.9.0.20241207 ; python_full_version < '3.10'
# via together
types-tabulate==0.10.0.20260308 ; python_full_version >= '3.10'
# via together
types-tqdm==4.67.3.20260205 ; python_full_version < '3.10'
# via together
types-tqdm==4.67.3.20260205
types-tqdm==4.67.3.20260303 ; python_full_version >= '3.10'
# via together
typing-extensions==4.15.0
# via
Expand Down
11 changes: 9 additions & 2 deletions src/together/_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
from datetime import date, datetime
from typing_extensions import Self, Literal
from typing_extensions import Self, Literal, TypedDict

import pydantic
from pydantic.fields import FieldInfo
Expand Down Expand Up @@ -131,6 +131,10 @@ def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
return model.model_dump_json(indent=indent)


class _ModelDumpKwargs(TypedDict, total=False):
by_alias: bool


def model_dump(
model: pydantic.BaseModel,
*,
Expand All @@ -142,14 +146,17 @@ def model_dump(
by_alias: bool | None = None,
) -> dict[str, Any]:
if (not PYDANTIC_V1) or hasattr(model, "model_dump"):
kwargs: _ModelDumpKwargs = {}
if by_alias is not None:
kwargs["by_alias"] = by_alias
return model.model_dump(
mode=mode,
exclude=exclude,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
# warnings are not supported in Pydantic v1
warnings=True if PYDANTIC_V1 else warnings,
by_alias=by_alias,
**kwargs,
)
return cast(
"dict[str, Any]",
Expand Down
2 changes: 1 addition & 1 deletion src/together/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "together"
__version__ = "2.4.0" # x-release-please-version
__version__ = "2.5.0" # x-release-please-version
6 changes: 2 additions & 4 deletions src/together/lib/cli/api/beta/jig/jig.py
Original file line number Diff line number Diff line change
Expand Up @@ -505,11 +505,9 @@ def state(self) -> State:
def registry(self) -> str:
"""Get registry and namespace for current user"""
if not self.state.registry_base_path:
response = self.together._client.get("/image-repositories/base-path", headers=self.together.auth_headers)
if not response.is_success:
raise JigError(f"Failed to get registry path (HTTP {response.status_code})")
response = self.together.get("/image-repositories/base-path", cast_to=dict[str, str])
# strip protocol for docker image format
self.state.registry_base_path = response.json()["base-path"].split("://", 1)[-1]
self.state.registry_base_path = response["base-path"].split("://", 1)[-1]
self.state.save()
return self.state.registry_base_path + "/"

Expand Down
2 changes: 1 addition & 1 deletion src/together/lib/cli/api/endpoints/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
@click.option(
"--model",
required=True,
help="The model to deploy (e.g. meta-llama/Llama-4-Scout-17B-16E-Instruct)",
help="The model to deploy",
)
@click.option(
"--min-replicas",
Expand Down
8 changes: 8 additions & 0 deletions src/together/lib/cli/api/fine_tuning/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,12 @@ def get_confirmation_message(price: str, warning: str) -> str:
@click.option("--wandb-project-name", type=str, default=None, help="Wandb project name")
@click.option("--wandb-name", type=str, default=None, help="Wandb run name")
@click.option("--wandb-entity", type=str, default=None, help="Wandb entity name")
@click.option(
"--random-seed",
type=int,
default=None,
help="Random seed for reproducible training (e.g. 42). If not set, server default is used.",
)
@click.option(
"--confirm",
"-y",
Expand Down Expand Up @@ -232,6 +238,7 @@ def create(
wandb_project_name: str | None,
wandb_name: str | None,
wandb_entity: str | None,
random_seed: int | None,
confirm: bool | None,
train_on_inputs: bool | Literal["auto"] | None,
training_method: str | None,
Expand Down Expand Up @@ -275,6 +282,7 @@ def create(
wandb_project_name=wandb_project_name,
wandb_name=wandb_name,
wandb_entity=wandb_entity,
random_seed=random_seed,
train_on_inputs=train_on_inputs,
training_method=training_method,
dpo_beta=dpo_beta,
Expand Down
2 changes: 2 additions & 0 deletions src/together/lib/resources/fine_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def create_finetune_request(
wandb_project_name: str | None = None,
wandb_name: str | None = None,
wandb_entity: str | None = None,
random_seed: int | None = None,
train_on_inputs: bool | Literal["auto"] | None = None,
training_method: str = "sft",
dpo_beta: float | None = None,
Expand Down Expand Up @@ -249,6 +250,7 @@ def create_finetune_request(
wandb_project_name=wandb_project_name,
wandb_name=wandb_name,
wandb_entity=wandb_entity,
random_seed=random_seed,
training_method=training_method_cls, # pyright: ignore[reportPossiblyUnboundVariable]
multimodal_params=multimodal_params,
from_checkpoint=from_checkpoint,
Expand Down
1 change: 1 addition & 0 deletions src/together/lib/types/fine_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,6 +483,7 @@ class FinetuneRequest(BaseModel):
wandb_name: Union[str, None] = None
# wandb entity
wandb_entity: Union[str, None] = None
random_seed: Union[int, None] = None
# training type
training_type: Union[TrainingType, None] = None
# training method
Expand Down
Loading
Loading