Skip to content

Commit e5954e9

Browse files
fix: fix dataclass decorator for generators
1 parent c0df143 commit e5954e9

File tree

5 files changed

+5
-25
lines changed

5 files changed

+5
-25
lines changed

graphgen/models/generator/aggregated_generator.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,10 @@
1-
from dataclasses import dataclass
21
from typing import Any
32

4-
from graphgen.bases import BaseGenerator, BaseLLMClient
3+
from graphgen.bases import BaseGenerator
54
from graphgen.templates import AGGREGATED_GENERATION_PROMPT
65
from graphgen.utils import compute_content_hash, detect_main_language, logger
76

87

9-
@dataclass
108
class AggregatedGenerator(BaseGenerator):
119
"""
1210
Aggregated Generator follows a TWO-STEP process:
@@ -15,8 +13,6 @@ class AggregatedGenerator(BaseGenerator):
1513
2. question generation: Generate relevant questions based on the rephrased text.
1614
"""
1715

18-
llm_client: BaseLLMClient = None
19-
2016
@staticmethod
2117
def build_prompt(
2218
batch: tuple[list[tuple[str, dict]], list[tuple[Any, Any, dict]]]

graphgen/models/generator/atomic_generator.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,11 @@
1-
from dataclasses import dataclass
21
from typing import Any
32

4-
from graphgen.bases import BaseGenerator, BaseLLMClient
3+
from graphgen.bases import BaseGenerator
54
from graphgen.templates import ATOMIC_GENERATION_PROMPT
65
from graphgen.utils import compute_content_hash, detect_main_language, logger
76

87

9-
@dataclass
108
class AtomicGenerator(BaseGenerator):
11-
llm_client: BaseLLMClient = None
12-
139
@staticmethod
1410
def build_prompt(
1511
batch: tuple[list[tuple[str, dict]], list[tuple[Any, Any, dict]]]

graphgen/models/generator/cot_generator.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,11 @@
1-
from dataclasses import dataclass
21
from typing import Any
32

4-
from graphgen.bases import BaseGenerator, BaseLLMClient
3+
from graphgen.bases import BaseGenerator
54
from graphgen.templates import COT_GENERATION_PROMPT
65
from graphgen.utils import compute_content_hash, detect_main_language, logger
76

87

9-
@dataclass
108
class CoTGenerator(BaseGenerator):
11-
llm_client: BaseLLMClient = None
12-
139
@staticmethod
1410
def build_prompt(
1511
batch: tuple[list[tuple[str, dict]], list[tuple[Any, Any, dict]]]

graphgen/models/generator/multi_hop_generator.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,11 @@
1-
from dataclasses import dataclass
21
from typing import Any
32

4-
from graphgen.bases import BaseGenerator, BaseLLMClient
3+
from graphgen.bases import BaseGenerator
54
from graphgen.templates import MULTI_HOP_GENERATION_PROMPT
65
from graphgen.utils import compute_content_hash, detect_main_language, logger
76

87

9-
@dataclass
108
class MultiHopGenerator(BaseGenerator):
11-
llm_client: BaseLLMClient = None
12-
139
@staticmethod
1410
def build_prompt(
1511
batch: tuple[list[tuple[str, dict]], list[tuple[Any, Any, dict]]]

graphgen/models/generator/vqa_generator.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,11 @@
1-
from dataclasses import dataclass
21
from typing import Any
32

4-
from graphgen.bases import BaseGenerator, BaseLLMClient
3+
from graphgen.bases import BaseGenerator
54
from graphgen.templates import VQA_GENERATION_PROMPT
65
from graphgen.utils import compute_content_hash, detect_main_language, logger
76

87

9-
@dataclass
108
class VQAGenerator(BaseGenerator):
11-
llm_client: BaseLLMClient = None
12-
139
@staticmethod
1410
def build_prompt(
1511
batch: tuple[list[tuple[str, dict]], list[tuple[Any, Any, dict]]]

0 commit comments

Comments
 (0)