Skip to content
This repository was archived by the owner on Aug 5, 2025. It is now read-only.

Commit 1a49aaa

Browse files
fix: fix linter issue
1 parent 2c9973d commit 1a49aaa

File tree

5 files changed

+18
-13
lines changed

5 files changed

+18
-13
lines changed

.github/workflows/CI.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@ on:
44
workflow_dispatch:
55
pull_request:
66
branches: ["main"]
7+
push:
8+
branches:
9+
- main
710

811
permissions:
912
contents: read

examples/langchain_variable.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
cb = lai.langchain_callback()
3333

3434
# Returns a langchain_openai.ChatOpenAI instance.
35-
gpt_4o = init_chat_model(
35+
gpt_4o = init_chat_model( # type: ignore
3636
model_provider=prompt.provider,
3737
**prompt.settings,
3838
)

literalai/instrumentation/llamaindex/event_handler.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -144,8 +144,8 @@ def extract_query(x: Union[str, QueryBundle]):
144144
class LiteralEventHandler(BaseEventHandler):
145145
"""This class handles events coming from LlamaIndex."""
146146

147-
_client: "LiteralClient" = PrivateAttr(...)
148-
_span_handler: "LiteralSpanHandler" = PrivateAttr(...)
147+
_client: "LiteralClient" = PrivateAttr()
148+
_span_handler: "LiteralSpanHandler" = PrivateAttr()
149149
runs: Dict[str, List[Step]] = {}
150150
streaming_run_ids: List[str] = []
151151
_standalone_step_id: Optional[str] = None
@@ -238,7 +238,7 @@ def handle(self, event: BaseEvent, **kwargs) -> None:
238238
thread_id=thread_id,
239239
content=query,
240240
)
241-
241+
242242
# Retrieval wraps the Embedding step in LlamaIndex
243243
if isinstance(event, RetrievalStartEvent):
244244
run = self._client.start_step(

literalai/observability/generation.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -67,15 +67,16 @@ class BaseGeneration(Utils):
6767
to_dict(self) -> Dict:
6868
Converts the generation object to a dictionary.
6969
"""
70+
7071
id: Optional[str] = None
7172
prompt_id: Optional[str] = None
7273
provider: Optional[str] = None
7374
model: Optional[str] = None
7475
error: Optional[str] = None
75-
settings: Optional[Dict] = Field(default_factory=dict)
76-
variables: Optional[Dict] = Field(default_factory=dict)
77-
tags: Optional[List[str]] = Field(default_factory=list)
78-
metadata: Optional[Dict] = Field(default_factory=dict)
76+
settings: Optional[Dict] = Field(default_factory=lambda: {})
77+
variables: Optional[Dict] = Field(default_factory=lambda: {})
78+
tags: Optional[List[str]] = Field(default_factory=lambda: [])
79+
metadata: Optional[Dict] = Field(default_factory=lambda: {})
7980
tools: Optional[List[Dict]] = None
8081
token_count: Optional[int] = None
8182
input_token_count: Optional[int] = None
@@ -129,6 +130,7 @@ class CompletionGeneration(BaseGeneration, Utils):
129130
completion (Optional[str]): The generated completion text.
130131
type (GenerationType): The type of generation, which is set to GenerationType.COMPLETION.
131132
"""
133+
132134
prompt: Optional[str] = None
133135
completion: Optional[str] = None
134136
type = GenerationType.COMPLETION
@@ -177,8 +179,9 @@ class ChatGeneration(BaseGeneration, Utils):
177179
message_completion (Optional[GenerationMessage]): The completion message of the chat generation.
178180
type (GenerationType): The type of generation, which is set to GenerationType.CHAT.
179181
"""
182+
180183
type = GenerationType.CHAT
181-
messages: Optional[List[GenerationMessage]] = Field(default_factory=list)
184+
messages: Optional[List[GenerationMessage]] = Field(default_factory=lambda: [])
182185
message_completion: Optional[GenerationMessage] = None
183186

184187
def to_dict(self):
@@ -213,4 +216,3 @@ def from_dict(self, generation_dict: Dict):
213216
messages=generation_dict.get("messages", []),
214217
message_completion=generation_dict.get("messageCompletion"),
215218
)
216-

literalai/prompt_engineering/prompt.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ class Prompt(Utils):
6767
6868
Attributes
6969
----------
70-
template_messages : List[GenerationMessage]
70+
template_messages : List[GenerationMessage]
7171
The messages that make up the prompt. Messages can be of type `text` or `image`.
7272
Messages can reference variables.
7373
variables : List[PromptVariable]
@@ -214,9 +214,9 @@ def to_langchain_chat_prompt_template(self, additional_messages=[]):
214214

215215
class CustomChatPromptTemplate(ChatPromptTemplate):
216216
orig_messages: Optional[List[GenerationMessage]] = Field(
217-
default_factory=list
217+
default_factory=lambda: []
218218
)
219-
default_vars: Optional[Dict] = Field(default_factory=dict)
219+
default_vars: Optional[Dict] = Field(default_factory=lambda: {})
220220
prompt_id: Optional[str] = None
221221

222222
def format_messages(self, **kwargs: Any) -> List[BaseMessage]:

0 commit comments

Comments
 (0)