@@ -83,15 +83,13 @@ defmodule HyperLLM.Chat do
8383
8484 ## Examples
8585
86- iex> chat = HyperLLM.Chat.start(model: "gpt-4o-mini")
87- %HyperLLM.Chat{messages: [], provider: HyperLLM.Provider.OpenAI, config: [model: "gpt-4o-mini"]}
88- iex> HyperLLM.Chat.append(chat, ["Hello", "World"])
89- %HyperLLM.Chat{messages: [%HyperLLM.Chat.Message{role: :user, content: "Hello"}, %HyperLLM.Chat.Message{role: :user, content: "World"}], provider: HyperLLM.Provider.OpenAI, config: [model: "gpt-4o-mini"]}
86+ iex> chat = HyperLLM.Chat.start(model: "gpt-4o-mini")
87+ iex> HyperLLM.Chat.append(chat, ["Hello", "World"])
88+ %HyperLLM.Chat{messages: [%HyperLLM.Chat.Message{role: :user, content: "Hello"}, %HyperLLM.Chat.Message{role: :user, content: "World"}], provider: HyperLLM.Provider.OpenAI, config: [model: "gpt-4o-mini"]}
9089
91- iex> chat = HyperLLM.Chat.start(model: "gpt-4o-mini")
92- %HyperLLM.Chat{messages: [], provider: HyperLLM.Provider.OpenAI, config: [model: "gpt-4o-mini"]}
93- iex> HyperLLM.Chat.append(chat, "Hello")
94- %HyperLLM.Chat{messages: [%HyperLLM.Chat.Message{role: :user, content: "Hello"}], provider: HyperLLM.Provider.OpenAI, config: [model: "gpt-4o-mini"]}
90+ iex> chat = HyperLLM.Chat.start(model: "gpt-4o-mini")
91+ iex> HyperLLM.Chat.append(chat, "Hello")
92+ %HyperLLM.Chat{messages: [%HyperLLM.Chat.Message{role: :user, content: "Hello"}], provider: HyperLLM.Provider.OpenAI, config: [model: "gpt-4o-mini"]}
9593 """
9694 @ spec append ( t ( ) , [ Message . t ( ) ] ) :: t ( )
9795 def append ( % __MODULE__ { } = chat , messages ) when is_list ( messages ) do
0 commit comments