Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit fd7d834

Browse files
committed
fix: cleanup
1 parent afed644 commit fd7d834

File tree

2 files changed

+26
-23
lines changed

2 files changed

+26
-23
lines changed

engine/config/model_config.h

Lines changed: 26 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,34 @@ namespace config {
1515

1616
namespace {
1717
const std::string kOpenAITransformReqTemplate =
18-
R"({ {% set first = true %} {% for key, value in input_request %} {% if key == \"messages\" or key == \"model\" or key == \"temperature\" or key == \"store\" or key == \"max_tokens\" or key == \"stream\" or key == \"presence_penalty\" or key == \"metadata\" or key == \"frequency_penalty\" or key == \"tools\" or key == \"tool_choice\" or key == \"logprobs\" or key == \"top_logprobs\" or key == \"logit_bias\" or key == \"n\" or key == \"modalities\" or key == \"prediction\" or key == \"response_format\" or key == \"service_tier\" or key == \"seed\" or key == \"stop\" or key == \"stream_options\" or key == \"top_p\" or key == \"parallel_tool_calls\" or key == \"user\" %} {% if not first %},{% endif %} \"{{ key }}\": {{ tojson(value) }} {% set first = false %} {% endif %} {% endfor %} })";
18+
R"({ {% set first = true %} {% for key, value in input_request %} {% if key == "messages" or key == "model" or key == "temperature" or key == "store" or key == "max_tokens" or key == "stream" or key == "presence_penalty" or key == "metadata" or key == "frequency_penalty" or key == "tools" or key == "tool_choice" or key == "logprobs" or key == "top_logprobs" or key == "logit_bias" or key == "n" or key == "modalities" or key == "prediction" or key == "response_format" or key == "service_tier" or key == "seed" or key == "stop" or key == "stream_options" or key == "top_p" or key == "parallel_tool_calls" or key == "user" %} {% if not first %},{% endif %} "{{ key }}": {{ tojson(value) }} {% set first = false %} {% endif %} {% endfor %} })";
1919
const std::string kOpenAITransformRespTemplate =
20-
R"({ {%- set first = true -%} {%- for key, value in input_request -%} {%- if key == \"id\" or key == \"choices\" or key == \"created\" or key == \"model\" or key == \"service_tier\" or key == \"system_fingerprint\" or key == \"object\" or key == \"usage\" -%} {%- if not first -%},{%- endif -%} \"{{ key }}\": {{ tojson(value) }} {%- set first = false -%} {%- endif -%} {%- endfor -%} })";
20+
R"({ {%- set first = true -%} {%- for key, value in input_request -%} {%- if key == "id" or key == "choices" or key == "created" or key == "model" or key == "service_tier" or key == "system_fingerprint" or key == "object" or key == "usage" -%} {%- if not first -%},{%- endif -%} "{{ key }}": {{ tojson(value) }} {%- set first = false -%} {%- endif -%} {%- endfor -%} })";
2121
const std::string kAnthropicTransformReqTemplate =
22-
R"({ {% set first = true %} {% for key, value in input_request %} {% if key == \"system\" or key == \"messages\" or key == \"model\" or key == \"temperature\" or key == \"store\" or key == \"max_tokens\" or key == \"stream\" or key == \"presence_penalty\" or key == \"metadata\" or key == \"frequency_penalty\" or key == \"tools\" or key == \"tool_choice\" or key == \"logprobs\" or key == \"top_logprobs\" or key == \"logit_bias\" or key == \"n\" or key == \"modalities\" or key == \"prediction\" or key == \"response_format\" or key == \"service_tier\" or key == \"seed\" or key == \"stop\" or key == \"stream_options\" or key == \"top_p\" or key == \"parallel_tool_calls\" or key == \"user\" %} {% if not first %},{% endif %} \"{{ key }}\": {{ tojson(value) }} {% set first = false %} {% endif %} {% endfor %} })";
22+
R"({
23+
{% for key, value in input_request %}
24+
{% if key == "messages" %}
25+
{% if input_request.messages.0.role == "system" %}
26+
"system": "{{ input_request.messages.0.content }}",
27+
"messages": [
28+
{% for message in input_request.messages %}
29+
{% if not loop.is_first %}
30+
{"role": "{{ message.role }}", "content": "{{ message.content }}" } {% if not loop.is_last %},{% endif %}
31+
{% endif %}
32+
{% endfor %}
33+
]
34+
{% else %}
35+
"messages": [
36+
{% for message in input_request.messages %}
37+
{"role": " {{ message.role}}", "content": "{{ message.content }}" } {% if not loop.is_last %},{% endif %}
38+
{% endfor %}
39+
]
40+
{% endif %}
41+
{% else if key == "system" or key == "model" or key == "temperature" or key == "store" or key == "max_tokens" or key == "stream" or key == "presence_penalty" or key == "metadata" or key == "frequency_penalty" or key == "tools" or key == "tool_choice" or key == "logprobs" or key == "top_logprobs" or key == "logit_bias" or key == "n" or key == "modalities" or key == "prediction" or key == "response_format" or key == "service_tier" or key == "seed" or key == "stop" or key == "stream_options" or key == "top_p" or key == "parallel_tool_calls" or key == "user" %}
42+
"{{ key }}": {{ tojson(value) }}
43+
{% endif %}
44+
{% if not loop.is_last %},{% endif %}
45+
{% endfor %} })";
2346
const std::string kAnthropicTransformRespTemplate = R"({
2447
"id": "{{ input_request.id }}",
2548
"created": null,

engine/extensions/remote-engine/remote_engine.cc

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -34,15 +34,12 @@ size_t StreamWriteCallback(char* ptr, size_t size, size_t nmemb,
3434
while ((pos = context->buffer.find('\n')) != std::string::npos) {
3535
std::string line = context->buffer.substr(0, pos);
3636
context->buffer = context->buffer.substr(pos + 1);
37-
// CTL_INF(line);
3837

3938
// Skip empty lines
4039
if (line.empty() || line == "\r" ||
4140
line.find("event:") != std::string::npos)
4241
continue;
4342

44-
// Skip [DONE] message
45-
// std::cout << line << std::endl;
4643
CTL_DBG(line);
4744
if (line == "data: [DONE]" ||
4845
line.find("message_stop") != std::string::npos) {
@@ -522,23 +519,6 @@ void RemoteEngine::HandleChatCompletion(
522519
std::string(e.what()));
523520
}
524521

525-
// Parse system for anthropic
526-
if (is_anthropic(model)) {
527-
bool has_system = false;
528-
Json::Value msgs(Json::arrayValue);
529-
for (auto& kv : (*json_body)["messages"]) {
530-
if (kv["role"].asString() == "system") {
531-
(*json_body)["system"] = kv["content"].asString();
532-
has_system = true;
533-
} else {
534-
msgs.append(kv);
535-
}
536-
}
537-
if (has_system) {
538-
(*json_body)["messages"] = msgs;
539-
}
540-
}
541-
542522
// Render with error handling
543523
try {
544524
result = renderer_.Render(template_str, *json_body);

0 commit comments

Comments
 (0)