Skip to content

Commit 31555d7

Browse files
Merge pull request #56 from lucaromagnoli/feature/response-parser
Feature/response parser
2 parents 54f6536 + 102d793 commit 31555d7

16 files changed

+1263
-23
lines changed

CMakeLists.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ set(LLMCPP_SOURCES
6464
src/core/LLMClient.cpp
6565
src/core/JsonSchemaBuilder.cpp
6666
src/core/ClientFactory.cpp
67+
src/core/ResponseParser.cpp
6768
src/providers/ClientManager.cpp
6869
src/providers/ClientFactory.cpp
6970
src/openai/OpenAIClient.cpp
@@ -74,6 +75,7 @@ set(LLMCPP_SOURCES
7475
src/openai/OpenAIUtils.cpp
7576
src/anthropic/AnthropicClient.cpp
7677
src/anthropic/AnthropicHttpClient.cpp
78+
src/anthropic/AnthropicSchemaBuilder.cpp
7779
)
7880

7981
# Create library
@@ -112,6 +114,8 @@ if(LLMCPP_BUILD_EXAMPLES)
112114
add_subdirectory(examples)
113115
endif()
114116

117+
118+
115119
# Add tests if requested
116120
if(LLMCPP_BUILD_TESTS)
117121
add_subdirectory(tests)

debug_test.cpp

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
#include <iostream>
2+
3+
#include "core/ResponseParser.h"
4+
#include "llmcpp.h"
5+
6+
int main() {
7+
// Exact same input as failing test
8+
std::string text =
9+
"I'll create several jazzy chord progressions...\n\n<musical_aideas-musical_sequence>\n";
10+
text += R"([
11+
{
12+
"description": "Test progression",
13+
"sequence": [
14+
{"note": 60, "start": 0.0, "duration": 1.0, "velocity": 90},
15+
{"note": 64, "start": 1.0, "duration": 1.0, "velocity": 90},
16+
{"note": 67, "start": 2.0, "duration": 2.0, "velocity": 90}
17+
]
18+
}
19+
])";
20+
21+
// Create response exactly like the test
22+
llmcpp::LLMResponse response;
23+
response.success = true;
24+
response.result = nlohmann::json{{"text", text}};
25+
26+
// Call parseStructuredResponse exactly like aideas does
27+
auto results = llmcpp::ResponseParser::parseStructuredResponse(
28+
response, "Anthropic", "musical_aideas-musical_sequence");
29+
30+
std::cout << "Results count: " << results.size() << std::endl;
31+
for (size_t i = 0; i < results.size(); ++i) {
32+
std::cout << "Result " << i << ":" << std::endl;
33+
std::cout << " source: " << results[i].source << std::endl;
34+
std::cout << " description: " << results[i].description << std::endl;
35+
std::cout << " data: " << results[i].data.dump(2) << std::endl;
36+
}
37+
38+
return 0;
39+
}
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
#pragma once
2+
3+
#include <nlohmann/json.hpp>
4+
#include <string>
5+
#include <vector>
6+
7+
using json = nlohmann::json;
8+
9+
namespace Anthropic {
10+
11+
/**
12+
* @brief Schema builder for Anthropic tool definitions
13+
*
14+
* Builds JSON schemas compatible with Anthropic's tool use API
15+
* following JSON Schema draft 2020-12 specification
16+
*/
17+
class SchemaBuilder {
18+
private:
19+
json schema_;
20+
21+
public:
22+
SchemaBuilder();
23+
24+
// Chainable builder methods
25+
SchemaBuilder& setType(const std::string& type);
26+
SchemaBuilder& setDescription(const std::string& description);
27+
SchemaBuilder& setMinimum(double minimum);
28+
SchemaBuilder& setMaximum(double maximum);
29+
SchemaBuilder& setMinLength(int minLength);
30+
SchemaBuilder& setMaxLength(int maxLength);
31+
SchemaBuilder& setMinItems(int minItems);
32+
SchemaBuilder& setMaxItems(int maxItems);
33+
SchemaBuilder& setRequired(const std::vector<std::string>& required);
34+
SchemaBuilder& setAdditionalProperties(bool allowed);
35+
SchemaBuilder& setProperties(const json& properties);
36+
SchemaBuilder& setItems(const json& itemSchema);
37+
38+
// Build final schema
39+
json build() const;
40+
41+
// Static convenience methods for common schemas
42+
static json buildMusicalSequenceSchema();
43+
static json buildObjectSchema(const json& properties,
44+
const std::vector<std::string>& required = {});
45+
static json buildArraySchema(const json& itemSchema, int minItems = -1, int maxItems = -1);
46+
static json buildStringSchema(const std::string& description = "", int minLength = -1,
47+
int maxLength = -1);
48+
static json buildIntegerSchema(const std::string& description = "", int minimum = INT_MIN,
49+
int maximum = INT_MAX);
50+
static json buildNumberSchema(const std::string& description = "", double minimum = -INFINITY,
51+
double maximum = INFINITY);
52+
};
53+
54+
} // namespace Anthropic

include/anthropic/AnthropicTypes.h

Lines changed: 147 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -98,16 +98,18 @@ inline Model modelFromString(const std::string& modelStr) {
9898
*/
9999
inline std::vector<std::string> getAvailableModels() {
100100
return {// Latest Claude 4 models
101-
"claude-opus-4-1-20250805", "claude-opus-4-20250514", "claude-sonnet-4-20250514",
101+
toString(Model::CLAUDE_OPUS_4_1), toString(Model::CLAUDE_OPUS_4),
102+
toString(Model::CLAUDE_SONNET_4),
102103

103104
// Claude 3.7
104-
"claude-3-7-sonnet-20250219",
105+
toString(Model::CLAUDE_SONNET_3_7),
105106

106107
// Claude 3.5 models
107-
"claude-3-5-sonnet-20241022", "claude-3-5-sonnet-20240620", "claude-3-5-haiku-20241022",
108+
toString(Model::CLAUDE_SONNET_3_5_V2), toString(Model::CLAUDE_SONNET_3_5),
109+
toString(Model::CLAUDE_HAIKU_3_5),
108110

109111
// Legacy Claude 3 models
110-
"claude-3-opus-20240229", "claude-3-haiku-20240307"};
112+
toString(Model::CLAUDE_OPUS_3), toString(Model::CLAUDE_HAIKU_3)};
111113
}
112114

113115
/**
@@ -127,13 +129,64 @@ inline std::string toString(MessageRole role) {
127129
}
128130

129131
/**
130-
* Anthropic message content (text-only for now)
132+
* Anthropic message content (supports text, tool_use, and tool_result)
131133
*/
132134
struct MessageContent {
133135
std::string type = "text";
134136
std::string text;
135137

136-
json toJson() const { return json{{"type", type}, {"text", text}}; }
138+
// For tool_use content
139+
std::string id;
140+
std::string name;
141+
json input;
142+
143+
// For tool_result content
144+
std::string toolUseId;
145+
json content;
146+
bool isError = false;
147+
148+
json toJson() const {
149+
if (type == "text") {
150+
return json{{"type", type}, {"text", text}};
151+
} else if (type == "tool_use") {
152+
return json{{"type", type}, {"id", id}, {"name", name}, {"input", input}};
153+
} else if (type == "tool_result") {
154+
json j = {{"type", type}, {"tool_use_id", toolUseId}, {"content", content}};
155+
if (isError) {
156+
j["is_error"] = true;
157+
}
158+
return j;
159+
}
160+
return json{{"type", type}, {"text", text}};
161+
}
162+
163+
// Convenience constructors
164+
static MessageContent createText(const std::string& txt) {
165+
MessageContent content;
166+
content.type = "text";
167+
content.text = txt;
168+
return content;
169+
}
170+
171+
static MessageContent createToolUse(const std::string& toolId, const std::string& toolName,
172+
const json& toolInput) {
173+
MessageContent content;
174+
content.type = "tool_use";
175+
content.id = toolId;
176+
content.name = toolName;
177+
content.input = toolInput;
178+
return content;
179+
}
180+
181+
static MessageContent createToolResult(const std::string& useId, const json& result,
182+
bool error = false) {
183+
MessageContent content;
184+
content.type = "tool_result";
185+
content.toolUseId = useId;
186+
content.content = result;
187+
content.isError = error;
188+
return content;
189+
}
137190
};
138191

139192
/**
@@ -159,13 +212,61 @@ struct AnthropicConfig {
159212
std::string apiKey;
160213
std::string baseUrl = "https://api.anthropic.com";
161214
std::string anthropicVersion = "2023-06-01";
162-
Model defaultModel = Model::CLAUDE_SONNET_3_5_V2; // Latest stable model
215+
Model defaultModel = Model::CLAUDE_SONNET_3_5_V2;
163216
int timeoutSeconds = 30;
164217

165218
AnthropicConfig() = default;
166219
explicit AnthropicConfig(const std::string& key) : apiKey(key) {}
167220
};
168221

222+
/**
223+
* Tool definition for function calling
224+
*/
225+
struct Tool {
226+
std::string name;
227+
std::string description;
228+
json inputSchema;
229+
230+
json toJson() const {
231+
return json{{"name", name}, {"description", description}, {"input_schema", inputSchema}};
232+
}
233+
};
234+
235+
/**
236+
* Tool use content (when model calls a tool)
237+
*/
238+
struct ToolUse {
239+
std::string type = "tool_use";
240+
std::string id;
241+
std::string name;
242+
json input;
243+
244+
json toJson() const {
245+
return json{{"type", type}, {"id", id}, {"name", name}, {"input", input}};
246+
}
247+
};
248+
249+
/**
250+
* Tool result content (response to tool use)
251+
*/
252+
struct ToolResult {
253+
std::string type = "tool_result";
254+
std::string toolUseId;
255+
json content;
256+
bool isError = false;
257+
258+
json toJson() const {
259+
json j = {{"type", type}, {"tool_use_id", toolUseId}};
260+
261+
if (isError) {
262+
j["is_error"] = true;
263+
}
264+
265+
j["content"] = content;
266+
return j;
267+
}
268+
};
269+
169270
/**
170271
* Anthropic Messages API request
171272
*/
@@ -177,6 +278,8 @@ struct MessagesRequest {
177278
std::optional<double> topP;
178279
std::optional<std::string> system;
179280
std::vector<std::string> stopSequences;
281+
std::vector<Tool> tools; // Tool definitions for function calling
282+
std::optional<std::string> toolChoice; // "auto", "any", or specific tool name
180283

181284
json toJson() const {
182285
json j = {{"model", model}, {"messages", json::array()}};
@@ -202,6 +305,15 @@ struct MessagesRequest {
202305
if (!stopSequences.empty()) {
203306
j["stop_sequences"] = stopSequences;
204307
}
308+
if (!tools.empty()) {
309+
j["tools"] = json::array();
310+
for (const auto& tool : tools) {
311+
j["tools"].push_back(tool.toJson());
312+
}
313+
}
314+
if (toolChoice.has_value()) {
315+
j["tool_choice"] = json{{"type", toolChoice.value()}};
316+
}
205317

206318
return j;
207319
}
@@ -277,20 +389,32 @@ struct MessagesResponse {
277389
/**
278390
* Convert to common LLMResponse
279391
*/
280-
LLMResponse toLLMResponse() const {
392+
LLMResponse toLLMResponse(bool expectStructuredOutput = false) const {
281393
LLMResponse response;
282394
response.success = !content.empty();
283395

284-
// Combine all text content into result JSON
396+
// Combine all text content and parse as JSON
285397
std::string fullText;
286398
for (const auto& c : content) {
287399
if (c.type == "text") {
288400
fullText += c.text;
289401
}
290402
}
291403

292-
// Store content as a simple text result
293-
response.result = json{{"text", fullText}};
404+
// For tool calls, we return the input JSON; for text responses, handle based on expectation
405+
if (!content.empty() && content[0].type == "tool_use") {
406+
// Extract tool call input as the result
407+
response.result = content[0].input;
408+
} else {
409+
// For text responses, handle based on whether structured output is expected
410+
if (expectStructuredOutput) {
411+
// Parse as JSON for structured output
412+
response.result = json::parse(fullText);
413+
} else {
414+
// Wrap free-form text in text field
415+
response.result = json{{"text", fullText}};
416+
}
417+
}
294418

295419
response.usage.inputTokens = usage.inputTokens;
296420
response.usage.outputTokens = usage.outputTokens;
@@ -337,6 +461,18 @@ struct MessagesResponse {
337461
if (contentItem.contains("text")) {
338462
content.text = contentItem["text"];
339463
}
464+
// Parse tool_use content
465+
if (content.type == "tool_use") {
466+
if (contentItem.contains("id")) {
467+
content.id = contentItem["id"];
468+
}
469+
if (contentItem.contains("name")) {
470+
content.name = contentItem["name"];
471+
}
472+
if (contentItem.contains("input")) {
473+
content.input = contentItem["input"];
474+
}
475+
}
340476
response.content.push_back(content);
341477
}
342478
}

0 commit comments

Comments
 (0)