Skip to content
This repository was archived by the owner on Aug 5, 2025. It is now read-only.

Commit cd3dc94

Browse files
Merge pull request #155 from Chainlit/clement/bump-version-09-12
chore: bump version
2 parents 87274ae + 2141421 commit cd3dc94

File tree

7 files changed

+32
-30
lines changed

7 files changed

+32
-30
lines changed

.github/workflows/CI.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@ on:
44
workflow_dispatch:
55
pull_request:
66
branches: ["main"]
7+
push:
8+
branches:
9+
- main
710

811
permissions:
912
contents: read

examples/langchain_variable.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232
cb = lai.langchain_callback()
3333

3434
# Returns a langchain_openai.ChatOpenAI instance.
35-
gpt_4o = init_chat_model(
35+
gpt_4o = init_chat_model( # type: ignore
3636
model_provider=prompt.provider,
3737
**prompt.settings,
3838
)

literalai/instrumentation/llamaindex/event_handler.py

Lines changed: 15 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ def extract_document_info(nodes: List[NodeWithScore]):
100100

101101

102102
def build_message_dict(message: ChatMessage):
103-
message_dict = {
103+
message_dict: GenerationMessage = {
104104
"role": convert_message_role(message.role),
105105
"content": message.content,
106106
}
@@ -144,8 +144,8 @@ def extract_query(x: Union[str, QueryBundle]):
144144
class LiteralEventHandler(BaseEventHandler):
145145
"""This class handles events coming from LlamaIndex."""
146146

147-
_client: "LiteralClient" = PrivateAttr(...)
148-
_span_handler: "LiteralSpanHandler" = PrivateAttr(...)
147+
_client: "LiteralClient" = PrivateAttr()
148+
_span_handler: "LiteralSpanHandler" = PrivateAttr()
149149
runs: Dict[str, List[Step]] = {}
150150
streaming_run_ids: List[str] = []
151151
_standalone_step_id: Optional[str] = None
@@ -163,21 +163,18 @@ def __init__(
163163
object.__setattr__(self, "_client", literal_client)
164164
object.__setattr__(self, "_span_handler", llama_index_span_handler)
165165

166-
def _convert_message(
167-
self,
168-
message: ChatMessage,
169-
):
166+
def _convert_message(self, message: ChatMessage):
170167
tool_calls = message.additional_kwargs.get("tool_calls")
171-
msg = GenerationMessage(
172-
name=getattr(message, "name", None),
173-
role=convert_message_role(message.role),
174-
content="",
175-
)
176-
177-
msg["content"] = message.content
178-
179-
if tool_calls:
180-
msg["tool_calls"] = [tool_call.to_dict() for tool_call in tool_calls]
168+
msg: GenerationMessage = {
169+
"name": getattr(message, "name", None),
170+
"role": convert_message_role(message.role),
171+
"content": message.content,
172+
"tool_calls": (
173+
[tool_call.to_dict() for tool_call in tool_calls]
174+
if tool_calls
175+
else None
176+
),
177+
}
181178

182179
return msg
183180

@@ -238,7 +235,7 @@ def handle(self, event: BaseEvent, **kwargs) -> None:
238235
thread_id=thread_id,
239236
content=query,
240237
)
241-
238+
242239
# Retrieval wraps the Embedding step in LlamaIndex
243240
if isinstance(event, RetrievalStartEvent):
244241
run = self._client.start_step(

literalai/observability/generation.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -67,15 +67,16 @@ class BaseGeneration(Utils):
6767
to_dict(self) -> Dict:
6868
Converts the generation object to a dictionary.
6969
"""
70+
7071
id: Optional[str] = None
7172
prompt_id: Optional[str] = None
7273
provider: Optional[str] = None
7374
model: Optional[str] = None
7475
error: Optional[str] = None
75-
settings: Optional[Dict] = Field(default_factory=dict)
76-
variables: Optional[Dict] = Field(default_factory=dict)
77-
tags: Optional[List[str]] = Field(default_factory=list)
78-
metadata: Optional[Dict] = Field(default_factory=dict)
76+
settings: Optional[Dict] = Field(default_factory=lambda: {})
77+
variables: Optional[Dict] = Field(default_factory=lambda: {})
78+
tags: Optional[List[str]] = Field(default_factory=lambda: [])
79+
metadata: Optional[Dict] = Field(default_factory=lambda: {})
7980
tools: Optional[List[Dict]] = None
8081
token_count: Optional[int] = None
8182
input_token_count: Optional[int] = None
@@ -129,6 +130,7 @@ class CompletionGeneration(BaseGeneration, Utils):
129130
completion (Optional[str]): The generated completion text.
130131
type (GenerationType): The type of generation, which is set to GenerationType.COMPLETION.
131132
"""
133+
132134
prompt: Optional[str] = None
133135
completion: Optional[str] = None
134136
type = GenerationType.COMPLETION
@@ -177,8 +179,9 @@ class ChatGeneration(BaseGeneration, Utils):
177179
message_completion (Optional[GenerationMessage]): The completion message of the chat generation.
178180
type (GenerationType): The type of generation, which is set to GenerationType.CHAT.
179181
"""
182+
180183
type = GenerationType.CHAT
181-
messages: Optional[List[GenerationMessage]] = Field(default_factory=list)
184+
messages: Optional[List[GenerationMessage]] = Field(default_factory=lambda: [])
182185
message_completion: Optional[GenerationMessage] = None
183186

184187
def to_dict(self):
@@ -213,4 +216,3 @@ def from_dict(self, generation_dict: Dict):
213216
messages=generation_dict.get("messages", []),
214217
message_completion=generation_dict.get("messageCompletion"),
215218
)
216-

literalai/prompt_engineering/prompt.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ class Prompt(Utils):
6767
6868
Attributes
6969
----------
70-
template_messages : List[GenerationMessage]
70+
template_messages : List[GenerationMessage]
7171
The messages that make up the prompt. Messages can be of type `text` or `image`.
7272
Messages can reference variables.
7373
variables : List[PromptVariable]
@@ -214,9 +214,9 @@ def to_langchain_chat_prompt_template(self, additional_messages=[]):
214214

215215
class CustomChatPromptTemplate(ChatPromptTemplate):
216216
orig_messages: Optional[List[GenerationMessage]] = Field(
217-
default_factory=list
217+
default_factory=lambda: []
218218
)
219-
default_vars: Optional[Dict] = Field(default_factory=dict)
219+
default_vars: Optional[Dict] = Field(default_factory=lambda: {})
220220
prompt_id: Optional[str] = None
221221

222222
def format_messages(self, **kwargs: Any) -> List[BaseMessage]:

literalai/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.1.102"
1+
__version__ = "0.1.103"

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
setup(
44
name="literalai",
5-
version="0.1.102", # update version in literalai/version.py
5+
version="0.1.103", # update version in literalai/version.py
66
description="An SDK for observability in Python applications",
77
long_description=open("README.md").read(),
88
long_description_content_type="text/markdown",

0 commit comments

Comments
 (0)