Skip to content

Commit 9a56fdc

Browse files
fix: fix cyclic import
1 parent e70fe78 commit 9a56fdc

File tree

3 files changed

+3
-3
lines changed

3 files changed

+3
-3
lines changed

graphgen/operators/split_graph.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,7 @@ def _sort_edges(edges: list, edge_sampling: str) -> list:
224224
raise ValueError(f"Invalid edge sampling: {edge_sampling}")
225225
return edges
226226

227-
async def get_batches_with_strategy( # pylint: disable=too-many-arguments
227+
async def get_batches_with_strategy( # pylint: disable=too-many-branches
228228
nodes: list,
229229
edges: list,
230230
graph_storage: NetworkXStorage,

models/evaluate/uni_evaluator.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
import torch.multiprocessing as mp
88

99
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
10-
from models import TextPair
10+
from models.text.text_pair import TextPair
1111

1212

1313
def _add_questions(dimension: str, question: str, answer: str):

models/llm/openai_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
retry_if_exception_type,
1111
)
1212

13-
from models import TopkTokenModel, Token
13+
from models.llm.topk_token_model import TopkTokenModel, Token
1414

1515

1616
def get_top_response_tokens(response: openai.ChatCompletion) -> List[Token]:

0 commit comments

Comments
 (0)