Skip to content

Commit ea03d45

Browse files
committed
Fixing dependencies.
1 parent 9089d25 commit ea03d45

File tree

9 files changed

+4511
-3392
lines changed

9 files changed

+4511
-3392
lines changed

docs/api/chat.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2654,7 +2654,7 @@ def score(
26542654
self.scorers.extend(
26552655
[
26562656
dn.scorers.wrap_chat(
2657-
scorer if isinstance(scorer, dn.Scorer) else dn.Scorer.from_callable(scorer),
2657+
scorer if isinstance(scorer, dn.Scorer) else dn.Scorer(scorer),
26582658
filter=filter,
26592659
)
26602660
for scorer in scorers

docs/api/generator.mdx

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1646,12 +1646,18 @@ Create a new instance of TransformersGenerator from an already loaded model and
16461646
(`Any`)
16471647
–The loaded model for text generation.
16481648
* **`tokenizer`**
1649+
(`AutoTokenizer`)
16491650
–The tokenizer associated with the model.
16501651
* **`pipeline`**
16511652
(`TextGenerationPipeline | None`, default:
16521653
`None`
16531654
)
16541655
–The text generation pipeline. Defaults to None.
1656+
* **`params`**
1657+
(`GenerateParams | None`, default:
1658+
`None`
1659+
)
1660+
–Generation parameters. Defaults to None.
16551661

16561662
**Returns:**
16571663

@@ -1674,8 +1680,9 @@ def from_obj(
16741680
16751681
Args:
16761682
model: The loaded model for text generation.
1677-
tokenizer : The tokenizer associated with the model.
1683+
tokenizer: The tokenizer associated with the model.
16781684
pipeline: The text generation pipeline. Defaults to None.
1685+
params: Generation parameters. Defaults to None.
16791686
16801687
Returns:
16811688
The TransformersGenerator instance.
@@ -2491,12 +2498,18 @@ Create a new instance of TransformersGenerator from an already loaded model and
24912498
(`Any`)
24922499
–The loaded model for text generation.
24932500
* **`tokenizer`**
2501+
(`AutoTokenizer`)
24942502
–The tokenizer associated with the model.
24952503
* **`pipeline`**
24962504
(`TextGenerationPipeline | None`, default:
24972505
`None`
24982506
)
24992507
–The text generation pipeline. Defaults to None.
2508+
* **`params`**
2509+
(`GenerateParams | None`, default:
2510+
`None`
2511+
)
2512+
–Generation parameters. Defaults to None.
25002513

25012514
**Returns:**
25022515

@@ -2519,8 +2532,9 @@ def from_obj(
25192532
25202533
Args:
25212534
model: The loaded model for text generation.
2522-
tokenizer : The tokenizer associated with the model.
2535+
tokenizer: The tokenizer associated with the model.
25232536
pipeline: The text generation pipeline. Defaults to None.
2537+
params: Generation parameters. Defaults to None.
25242538
25252539
Returns:
25262540
The TransformersGenerator instance.

docs/api/model.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -710,7 +710,7 @@ def xml_example(cls) -> str:
710710
return f"<{cls.__xml_tag__}>{escape_xml(example)}</{cls.__xml_tag__}>"
711711

712712
lines = []
713-
attribute_parts = []
713+
attribute_parts: list[str] = []
714714
element_fields = {}
715715

716716
for field_name, field_info in cls.model_fields.items():

docs/api/prompt.mdx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,8 @@ Prompt(
6767
max_parsing_rounds: int = DEFAULT_MAX_PARSE_ROUNDS,
6868
max_tool_rounds: int = DEFAULT_MAX_TOOL_ROUNDS,
6969
inputs: list[Input] = list(),
70-
output: Output = lambda: ChatOutput(
71-
id="chat", context=Ctx()
70+
output: Output = (
71+
lambda: ChatOutput(id="chat", context=(Ctx()))
7272
)(),
7373
watch_callbacks: list[WatchChatCallback] = list(),
7474
then_callbacks: list[ThenChatCallback] = list(),

poetry.lock

Lines changed: 4485 additions & 3381 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ packages = [{ include = "rigging" }]
1313
[tool.poetry.dependencies]
1414
python = ">=3.10,<3.14"
1515
pydantic = "^2.7.3"
16-
pydantic-xml = "^2.11.0"
16+
pydantic-xml = "<=2.17.0"
1717
loguru = "^0.7.2"
1818
litellm = "^1.67.2"
1919
xmltodict = "^0.13.0"

rigging/chat.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@
5959
from rigging.util import flatten_list, get_callable_name
6060

6161
if t.TYPE_CHECKING:
62-
from dreadnode.metric import Scorer, ScorerCallable
62+
from dreadnode.scorers import Scorer, ScorerCallable
6363
from dreadnode.scorers.rigging import ChatFilterFunction, ChatFilterMode
6464
from elasticsearch import AsyncElasticsearch # type: ignore [import-not-found, unused-ignore]
6565

@@ -1441,7 +1441,7 @@ def score(
14411441
self.scorers.extend(
14421442
[
14431443
dn.scorers.wrap_chat(
1444-
scorer if isinstance(scorer, dn.Scorer) else dn.Scorer.from_callable(scorer),
1444+
scorer if isinstance(scorer, dn.Scorer) else dn.Scorer(scorer),
14451445
filter=filter,
14461446
)
14471447
for scorer in scorers

rigging/generator/transformers_.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,8 +116,9 @@ def from_obj(
116116
117117
Args:
118118
model: The loaded model for text generation.
119-
tokenizer : The tokenizer associated with the model.
119+
tokenizer: The tokenizer associated with the model.
120120
pipeline: The text generation pipeline. Defaults to None.
121+
params: Generation parameters. Defaults to None.
121122
122123
Returns:
123124
The TransformersGenerator instance.

rigging/model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -357,7 +357,7 @@ def xml_example(cls) -> str:
357357
return f"<{cls.__xml_tag__}>{escape_xml(example)}</{cls.__xml_tag__}>"
358358

359359
lines = []
360-
attribute_parts = []
360+
attribute_parts: list[str] = []
361361
element_fields = {}
362362

363363
for field_name, field_info in cls.model_fields.items():

0 commit comments

Comments
 (0)