Skip to content

Commit 467fc03

Browse files
committed
fix(build): fix msvc build
1 parent 8fbd7e5 commit 467fc03

File tree

3 files changed

+4
-3
lines changed

3 files changed

+4
-3
lines changed

code/ac/llama/ChatFormat.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
#include "export.h"
66
#include "ChatMsg.hpp"
77

8+
#include <memory>
89
#include <span>
910

1011
struct llama_chat_message;

example/e-rag.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ std::string retrieveKnowledge(const std::string& query, VectorDatabase<Document>
202202

203203
std::string generateResponse(ac::llama::Session& session, const std::string& prompt, VectorDatabase<Document>& vdb, int maxTokens = 512) {
204204
ac::llama::ChatFormat chatFormat("llama3");
205-
ac::llama::ChatMsg msg{.text = prompt, .role = "user"};
205+
ac::llama::ChatMsg msg{.role = "user", .text = prompt};
206206

207207
// 1. Fill the context with the relevant recipes
208208
const std::string systemPrompt = "You are a recipe assistant. Given the following relevant recipes, select the most relevant one or paraphrase it:\n";
@@ -233,7 +233,7 @@ std::string generateResponse(ac::llama::Session& session, const std::string& pro
233233
response += g_chatInstance->model().vocab().tokenToString(token);
234234
}
235235

236-
g_messages.emplace_back(ac::llama::ChatMsg{.text = response, .role = "system"});
236+
g_messages.emplace_back(ac::llama::ChatMsg{.role = "system", .text = response});
237237

238238
return response;
239239
}

test/t-ChatFormat.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
#include <astl/u8c.h>
88
#include <doctest/doctest.h>
99
#include <vector>
10-
10+
#include <regex>
1111
#include <iostream>
1212

1313
#include "ac-test-data-llama-dir.h"

0 commit comments

Comments
 (0)