Skip to content

Commit 47c83e0

Browse files
authored
feat: plugins: add new filter llm_tag plugin (#123)
* feat(plugins): add filter_llm_tag plugin * feat(config_format): add variant support for filter plugins Add new LLM-based log tagging filter plugin with OpenAI-compatible API support Extend YAML config parser to support complex nested structures in filter plugins
1 parent 0390f9f commit 47c83e0

File tree

11 files changed

+1990
-29
lines changed

11 files changed

+1990
-29
lines changed

source/cmake/plugins_options.cmake

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,7 @@ DEFINE_OPTION(FLB_PROCESSOR_DEDUP "Enable record deduplication processo
8989
DEFINE_OPTION(FLB_FILTER_ALTER_SIZE "Enable alter_size filter" OFF)
9090
DEFINE_OPTION(FLB_FILTER_AWS "Enable aws filter" ON)
9191
DEFINE_OPTION(FLB_FILTER_CHECKLIST "Enable checklist filter" OFF)
92+
DEFINE_OPTION(FLB_FILTER_LLM_TAG "Enable LLM-based log tagging filter" ON)
9293
DEFINE_OPTION(FLB_FILTER_ECS "Enable AWS ECS filter" ON)
9394
DEFINE_OPTION(FLB_FILTER_EXPECT "Enable expect filter" ON)
9495
DEFINE_OPTION(FLB_FILTER_GEOIP2 "Enable geoip2 filter" OFF)
Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
2+
3+
/* Fluent Bit OpenAI Client
4+
* =========================
5+
* Copyright (C) 2015-2024 The Fluent Bit Authors
6+
*
7+
* Licensed under the Apache License, Version 2.0 (the "License");
8+
* you may not use this file except in compliance with the License.
9+
* You may obtain a copy of the License at
10+
*
11+
* http://www.apache.org/licenses/LICENSE-2.0
12+
*
13+
* Unless required by applicable law or agreed to in writing, software
14+
* distributed under the License is distributed on an "AS IS" BASIS,
15+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16+
* See the License for the specific language governing permissions and
17+
* limitations under the License.
18+
*/
19+
20+
#ifndef FLB_OPENAI_CLIENT_H
21+
#define FLB_OPENAI_CLIENT_H
22+
23+
#include <fluent-bit/flb_info.h>
24+
#include <fluent-bit/flb_config.h>
25+
#include <fluent-bit/flb_upstream.h>
26+
#include <fluent-bit/tls/flb_tls.h>
27+
28+
/*
29+
* OpenAI-compatible client for LLM inference
30+
*
31+
* Supports:
32+
* - llama.cpp server (llama-server)
33+
* - vLLM
34+
* - Ollama
35+
* - Text Generation Inference (TGI)
36+
* - OpenAI API
37+
* - Any OpenAI-compatible endpoint
38+
*/
39+
40+
/* OpenAI client context */
41+
struct flb_openai_client {
42+
char *endpoint; /* Full endpoint URL */
43+
char *host; /* Parsed host */
44+
int port; /* Parsed port */
45+
char *path; /* Parsed path (e.g., /v1/chat/completions) */
46+
int use_tls; /* FLB_TRUE for HTTPS, FLB_FALSE for HTTP */
47+
48+
char *api_key; /* Optional API key (Bearer token) */
49+
char *proxy; /* Optional proxy URL */
50+
char *proxy_host; /* Parsed proxy host */
51+
int proxy_port; /* Parsed proxy port */
52+
53+
struct flb_upstream *upstream; /* Upstream connection pool */
54+
struct flb_tls *tls; /* TLS configuration */
55+
struct flb_config *config; /* Fluent Bit config */
56+
};
57+
58+
/* Chat completion simple response */
59+
struct flb_openai_response {
60+
char *content; /* Response text content */
61+
size_t content_len; /* Response length */
62+
int status_code; /* HTTP status code */
63+
};
64+
65+
/*
66+
* Create OpenAI client
67+
*
68+
* @param endpoint Full endpoint URL (e.g., http://127.0.0.1:8080)
69+
* @param api_key Optional API key for authentication (NULL for local)
70+
* @param proxy Optional proxy URL (NULL if not using proxy)
71+
* @param tls Optional TLS configuration (NULL for HTTP)
72+
* @param config Fluent Bit configuration
73+
*
74+
* @return Client context on success, NULL on failure
75+
*/
76+
struct flb_openai_client *flb_openai_client_create(const char *endpoint,
77+
const char *api_key,
78+
const char *proxy,
79+
struct flb_tls *tls,
80+
struct flb_config *config);
81+
82+
/*
83+
* Simple chat completion for yes/no classification
84+
*
85+
* This is a simplified API optimized for binary classification tasks.
86+
* It sends a request with a system prompt and user message, and returns
87+
* the text response.
88+
*
89+
* @param client OpenAI client context
90+
* @param model_id Model identifier (e.g., "qwen2.5-3b-instruct-q4")
91+
* @param system_prompt System role message (classification instructions)
92+
* @param user_message User role message (content to classify)
93+
* @param timeout_ms Request timeout in milliseconds
94+
* @param response Output: allocated response (caller must free)
95+
*
96+
* @return 0 on success, -1 on failure
97+
*/
98+
int flb_openai_chat_completion_simple(struct flb_openai_client *client,
99+
const char *model_id,
100+
const char *system_prompt,
101+
const char *user_message,
102+
int timeout_ms,
103+
struct flb_openai_response *response);
104+
105+
/*
106+
* Destroy OpenAI client
107+
*
108+
* @param client Client context to destroy
109+
*/
110+
void flb_openai_client_destroy(struct flb_openai_client *client);
111+
112+
/*
113+
* Free response structure
114+
*
115+
* @param response Response to free
116+
*/
117+
void flb_openai_response_destroy(struct flb_openai_response *response);
118+
119+
#endif

source/plugins/CMakeLists.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -420,6 +420,7 @@ endif()
420420
# =======
421421
REGISTER_FILTER_PLUGIN("filter_alter_size")
422422
REGISTER_FILTER_PLUGIN("filter_aws")
423+
REGISTER_FILTER_PLUGIN("filter_llm_tag")
423424
REGISTER_FILTER_PLUGIN("filter_checklist")
424425
REGISTER_FILTER_PLUGIN("filter_ecs")
425426
REGISTER_FILTER_PLUGIN("filter_record_modifier")
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
set(src
2+
llm_tag.c
3+
${CMAKE_SOURCE_DIR}/src/flb_openai_client.c
4+
)
5+
6+
FLB_PLUGIN(filter_llm_tag "${src}" "")

0 commit comments

Comments
 (0)