Skip to content

Commit 101e541

Browse files
committed
feat: PoC for sending multiple messages, ref #65
1 parent e8f9021 commit 101e541

File tree

3 files changed

+69
-5
lines changed

3 files changed

+69
-5
lines changed

ac-local-plugin/code/LocalLlama.cpp

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,9 @@
3838
#include "aclp-llama-version.h"
3939
#include "aclp-llama-interface.hpp"
4040

41+
// TODO: remove this include
42+
#include <iostream>
43+
4144
namespace ac::local {
4245

4346
namespace {
@@ -81,6 +84,15 @@ class ChatSession {
8184
m_instance.stopSession();
8285
}
8386

87+
xec::coro<void> sendMessages(Schema::OpSendMessages::Params& params) {
88+
auto& messages = params.messages.value();
89+
for (size_t i = 0; i < messages.size(); ++i) {
90+
std::cout << messages[i].role.value() << ": " << messages[i].content.value() << "\n";
91+
}
92+
93+
co_await m_io.push(Frame_from(schema::SimpleOpReturn<Schema::OpSendMessages>{}, {}));
94+
}
95+
8496
xec::coro<void> pushPrompt(Schema::OpAddChatPrompt::Params& params) {
8597
auto& prompt = params.prompt.value();
8698

@@ -441,6 +453,8 @@ struct LocalLlama {
441453
co_await chatSession.getResponse(*iparams, false);
442454
} else if (auto iparams = Frame_optTo(schema::OpParams<Schema::OpStreamChatResponse>{}, *f)) {
443455
co_await chatSession.getResponse(*iparams, true);
456+
} else if (auto iparams = Frame_optTo(schema::OpParams<Schema::OpSendMessages>{}, *f)) {
457+
co_await chatSession.sendMessages(*iparams);
444458
} else {
445459
err = unknownOpError(*f);
446460
}

ac-local-plugin/example/ep-chat.cpp

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ int main() try {
3131
std::cout << "Initial state: " << sid << '\n';
3232

3333
for (auto x : llama.stream<schema::StateLlama::OpLoadModel>({
34-
.ggufPath = AC_TEST_DATA_LLAMA_DIR "/gpt2-117m-q6_k.gguf"
34+
// .ggufPath = AC_TEST_DATA_LLAMA_DIR "/gpt2-117m-q6_k.gguf"
35+
.ggufPath = AC_TEST_DATA_LLAMA_DIR "/../../../tmp/Meta-Llama-3.1-8B-Instruct-Q6_K.gguf"
3536
})) {
3637
std::cout << "Model loaded: " << x.tag.value() << " " << x.progress.value() << '\n';
3738
}
@@ -42,11 +43,23 @@ int main() try {
4243
sid = llama.call<schema::StateModelLoaded::OpStartInstance>({
4344
.instanceType = "chat",
4445
.setup = "A chat between a human user and a helpful AI assistant.",
45-
.roleUser = roleUser,
46-
.roleAssistant = roleAssistant
46+
// .roleUser = roleUser,
47+
// .roleAssistant = roleAssistant
4748
});
4849
std::cout << "Instance started: " << sid << '\n';
4950

51+
std::vector<schema::Message> initMessages = {
52+
{roleUser, "I need assistance for API design"},
53+
{roleAssistant, "What aspect of API design are you looking for help with? Do you have a specific problem or question in mind?"},
54+
{roleUser, "It's a C++ implementation of a class"},
55+
};
56+
57+
llama.call<schema::StateChatInstance::OpSendMessages>({
58+
.messages = initMessages
59+
});
60+
61+
std::vector<schema::Message> messages;
62+
5063
while (true) {
5164
std::cout << roleUser <<": ";
5265
std::string user;
@@ -55,19 +68,26 @@ int main() try {
5568
}
5669
if (user == "/quit") break;
5770
user = ' ' + user;
71+
messages.push_back({roleUser, user});
72+
5873
llama.call<schema::StateChatInstance::OpAddChatPrompt>({
5974
.prompt = user
6075
});
6176

77+
std::string text;
6278
std::cout << roleAssistant << ": ";
6379
constexpr bool streamChat = false;
6480
if (streamChat) {
6581
for(auto t: llama.stream<schema::StateChatInstance::OpStreamChatResponse>({})) {
82+
text += t;
6683
std::cout << t << std::flush;
6784
}
6885
} else {
69-
std::cout << llama.call<schema::StateChatInstance::OpGetChatResponse>({}).response.value() << std::flush;
86+
auto res = llama.call<schema::StateChatInstance::OpGetChatResponse>({});
87+
text += res.response.value();
88+
std::cout << res.response.value() << std::flush;
7089
}
90+
messages.push_back({roleUser, text});
7191
std::cout << "\n";
7292
}
7393

ac-local-plugin/schema/ac/schema/LlamaCpp.hpp

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,20 @@ struct StreamToken {
1919
using Type = std::string;
2020
};
2121

22+
struct Message {
23+
static constexpr auto id = "chat-message";
24+
static constexpr auto desc = "Chat message";
25+
26+
Field<std::string> role;
27+
Field<std::string> content;
28+
29+
template <typename Visitor>
30+
void visitFields(Visitor& v) {
31+
v(role, "role", "Messages to add to the chat session");
32+
v(content, "content", "Messages to add to the chat session");
33+
}
34+
};
35+
2236
struct StateLlama {
2337
static constexpr auto id = "llama.cpp";
2438
static constexpr auto desc = "Initial state";
@@ -68,6 +82,7 @@ struct StateModelLoaded {
6882
Field<std::vector<std::string>> ctrlVectorPaths = Default();
6983

7084
Field<std::string> setup = Default();
85+
Field<std::string> chatTemplate = Default();
7186
Field<std::string> roleUser = Default("User");
7287
Field<std::string> roleAssistant = Default("Assistant");
7388

@@ -93,7 +108,6 @@ struct StateModelLoaded {
93108
};
94109

95110
using Ops = std::tuple<OpStartInstance>;
96-
97111
};
98112

99113
struct StateGeneralInstance {
@@ -206,6 +220,22 @@ struct StateChatInstance {
206220
static constexpr auto id = "chat-instance";
207221
static constexpr auto desc = "Chat state";
208222

223+
struct OpSendMessages {
224+
static inline constexpr std::string_view id = "send-messages";
225+
static inline constexpr std::string_view desc = "Send messages to the chat session";
226+
227+
struct Params {
228+
Field<std::vector<Message>> messages;
229+
230+
template <typename Visitor>
231+
void visitFields(Visitor& v) {
232+
v(messages, "messages", "Messages to add to the chat session");
233+
}
234+
};
235+
236+
using Return = nullptr_t;
237+
};
238+
209239
struct OpAddChatPrompt {
210240
static inline constexpr std::string_view id = "add-chat-prompt";
211241
static inline constexpr std::string_view desc = "Add a prompt to the chat session as a user";

0 commit comments

Comments
 (0)