Skip to content

Commit a8d8733

Browse files
authored
feat: Add mcp support and example. (#12)
* feat: Add `mcp` support and example. - Add `cpp-httplib` as submodule. * Update llama.cpp submodule to latest commit * fix: Use quotes for httplib. * fix: Update lint workflow to compile with MCP. * chore: Cleanup
1 parent 5773124 commit a8d8733

File tree

17 files changed

+1113
-11
lines changed

17 files changed

+1113
-11
lines changed

.github/workflows/lint.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ jobs:
2626
- name: Install clang tools
2727
run: |
2828
sudo apt-get update
29-
sudo apt-get install -y clang-format-14 clang-tidy-14
29+
sudo apt-get install -y clang-format-14 clang-tidy-14 libssl-dev
3030
sudo update-alternatives --install /usr/bin/clang-format clang-format /usr/bin/clang-format-14 100
3131
sudo update-alternatives --install /usr/bin/clang-tidy clang-tidy /usr/bin/clang-tidy-14 100
3232
@@ -38,7 +38,7 @@ jobs:
3838
clang-tidy --version
3939
4040
- name: Configure CMake
41-
run: cmake -B build -S .
41+
run: cmake -B build -S . -DAGENT_CPP_BUILD_MCP=ON
4242

4343
- name: Run pre-commit
4444
run: pre-commit run --all-files --show-diff-on-failure --verbose

.github/workflows/update-llama-cpp.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ jobs:
2828
- name: Check for submodule updates
2929
id: check
3030
run: |
31-
cd llama.cpp
31+
cd deps/llama.cpp
3232
3333
CURRENT_COMMIT=$(git rev-parse HEAD)
3434
echo "Current commit: $CURRENT_COMMIT"
@@ -69,11 +69,11 @@ jobs:
6969
7070
git checkout -b "$BRANCH_NAME"
7171
72-
cd llama.cpp
72+
cd deps/llama.cpp
7373
git checkout origin/master
74-
cd ..
74+
cd ../..
7575
76-
git add llama.cpp
76+
git add deps/llama.cpp
7777
git commit -m "Update llama.cpp submodule to ${{ steps.check.outputs.latest_short }}"
7878
7979
git push origin "$BRANCH_NAME"

.gitmodules

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
11
[submodule "llama.cpp"]
2-
path = llama.cpp
2+
path = deps/llama.cpp
33
url = https://github.com/ggerganov/llama.cpp.git
4+
[submodule "vendor/cpp-httplib"]
5+
path = deps/cpp-httplib
6+
url = https://github.com/yhirose/cpp-httplib.git

CMakeLists.txt

Lines changed: 67 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ if(AGENT_CPP_BUNDLED_LLAMA)
2020
set(LLAMA_SOURCE_DIR "${LLAMA_CPP_DIR}")
2121
message(STATUS "Using custom llama.cpp from: ${LLAMA_SOURCE_DIR}")
2222
else()
23-
set(LLAMA_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/llama.cpp")
23+
set(LLAMA_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/deps/llama.cpp")
2424
if(NOT EXISTS "${LLAMA_SOURCE_DIR}/CMakeLists.txt")
2525
message(FATAL_ERROR
2626
"llama.cpp submodule not found at ${LLAMA_SOURCE_DIR}\n"
@@ -75,6 +75,30 @@ target_include_directories(agent
7575
target_link_libraries(agent PUBLIC model common llama)
7676
target_compile_features(agent PUBLIC cxx_std_17)
7777

78+
# MCP Client library for connecting to MCP servers via HTTP
79+
option(AGENT_CPP_BUILD_MCP "Build MCP client (requires OpenSSL for HTTPS)" OFF)
80+
81+
if(AGENT_CPP_BUILD_MCP)
82+
find_package(OpenSSL REQUIRED)
83+
84+
add_library(mcp_client STATIC
85+
src/mcp/mcp_client.cpp
86+
src/mcp/mcp_tool.cpp
87+
)
88+
add_library(agent-cpp::mcp_client ALIAS mcp_client)
89+
target_include_directories(mcp_client
90+
PUBLIC
91+
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
92+
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/deps/cpp-httplib>
93+
$<BUILD_INTERFACE:${LLAMA_SOURCE_DIR}/common>
94+
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}/agent-cpp>
95+
)
96+
target_link_libraries(mcp_client PUBLIC common OpenSSL::SSL OpenSSL::Crypto)
97+
target_compile_features(mcp_client PUBLIC cxx_std_17)
98+
99+
message(STATUS "MCP client enabled (using cpp-httplib)")
100+
endif()
101+
78102
if(AGENT_CPP_BUILD_TESTS)
79103
enable_testing()
80104

@@ -98,6 +122,19 @@ if(AGENT_CPP_BUILD_TESTS)
98122
add_test(NAME ToolTests COMMAND test_tool)
99123
add_test(NAME CallbacksTests COMMAND test_callbacks)
100124

125+
if(AGENT_CPP_BUILD_MCP)
126+
add_executable(test_mcp_client tests/test_mcp_client.cpp)
127+
target_include_directories(test_mcp_client PRIVATE
128+
src
129+
tests
130+
${LLAMA_SOURCE_DIR}/common
131+
)
132+
target_link_libraries(test_mcp_client PRIVATE mcp_client common)
133+
target_compile_features(test_mcp_client PRIVATE cxx_std_17)
134+
135+
add_test(NAME MCPClientTests COMMAND test_mcp_client)
136+
endif()
137+
101138
# On Windows, DLLs are placed in the bin/ directory by llama.cpp
102139
# We need to add this directory to PATH so tests can find the DLLs
103140
if(WIN32)
@@ -162,6 +199,21 @@ if(AGENT_CPP_BUILD_EXAMPLES)
162199
target_link_libraries(context-engineering-example PRIVATE agent model common llama)
163200
target_compile_features(context-engineering-example PRIVATE cxx_std_17)
164201

202+
# MCP client example (requires MCP support)
203+
if(AGENT_CPP_BUILD_MCP)
204+
add_executable(mcp-example examples/mcp/mcp.cpp)
205+
target_include_directories(mcp-example PRIVATE
206+
${CMAKE_CURRENT_SOURCE_DIR}/src
207+
${CMAKE_CURRENT_SOURCE_DIR}/examples/shared
208+
${LLAMA_SOURCE_DIR}/common
209+
${LLAMA_SOURCE_DIR}/ggml/include
210+
${LLAMA_SOURCE_DIR}/include
211+
${LLAMA_SOURCE_DIR}/vendor
212+
)
213+
target_link_libraries(mcp-example PRIVATE agent model mcp_client common llama)
214+
target_compile_features(mcp-example PRIVATE cxx_std_17)
215+
endif()
216+
165217
# Note: tracing-example is not included here as it requires additional
166218
# dependencies (OpenTelemetry, protobuf, curl). Build it separately from
167219
# examples/tracing/
@@ -176,17 +228,29 @@ if(AGENT_CPP_INSTALL)
176228
include(CMakePackageConfigHelpers)
177229

178230
# Install public headers
179-
install(FILES
231+
set(INSTALL_HEADERS
180232
src/agent.h
181233
src/callbacks.h
182234
src/error.h
183235
src/model.h
184236
src/tool.h
237+
)
238+
239+
if(AGENT_CPP_BUILD_MCP)
240+
list(APPEND INSTALL_HEADERS src/mcp/mcp_client.h src/mcp/mcp_tool.h)
241+
endif()
242+
243+
install(FILES ${INSTALL_HEADERS}
185244
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/agent-cpp
186245
)
187246

188247
# Install libraries with export set
189-
install(TARGETS agent model
248+
set(INSTALL_TARGETS agent model)
249+
if(AGENT_CPP_BUILD_MCP)
250+
list(APPEND INSTALL_TARGETS mcp_client)
251+
endif()
252+
253+
install(TARGETS ${INSTALL_TARGETS}
190254
EXPORT agent-cpp-targets
191255
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
192256
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}

deps/cpp-httplib

Submodule cpp-httplib added at 59905c7

deps/llama.cpp

Submodule llama.cpp added at e443fbc

examples/mcp/CMakeLists.txt

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
cmake_minimum_required(VERSION 3.14)
2+
project(mcp-example VERSION 0.1.0)
3+
4+
set(CMAKE_CXX_STANDARD 17)
5+
set(CMAKE_CXX_STANDARD_REQUIRED ON)
6+
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
7+
8+
set(AGENT_CPP_BUILD_MCP ON CACHE BOOL "Build MCP client" FORCE)
9+
10+
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/../.. ${CMAKE_CURRENT_BINARY_DIR}/agent-cpp)
11+
12+
add_executable(mcp-example mcp.cpp)
13+
14+
target_include_directories(mcp-example PRIVATE
15+
${CMAKE_CURRENT_SOURCE_DIR}/../../src
16+
${CMAKE_CURRENT_SOURCE_DIR}/../shared
17+
${LLAMA_SOURCE_DIR}/common
18+
${LLAMA_SOURCE_DIR}/ggml/include
19+
${LLAMA_SOURCE_DIR}/include
20+
${LLAMA_SOURCE_DIR}/vendor
21+
)
22+
23+
target_link_libraries(mcp-example PRIVATE agent mcp_client common llama)
24+
target_compile_features(mcp-example PRIVATE cxx_std_17)
25+
26+
message(STATUS "MCP example configured.")

examples/mcp/README.md

Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
# MCP Client Example
2+
3+
[MCP (Model Context Protocol)](https://modelcontextprotocol.io/) is an open protocol that allows AI applications to connect to external tools and data sources.
4+
5+
This example demonstrates how to connect to an MCP server via HTTP and use its tools with an agent.cpp agent.
6+
7+
## Building Blocks
8+
9+
### Tools
10+
11+
Tools are dynamically discovered from the MCP server at runtime. The client connects to the server, performs a handshake, and retrieves the available tool definitions.
12+
13+
### Callbacks
14+
15+
This example uses two shared callbacks from `examples/shared/`:
16+
17+
- **LoggingCallback**: Displays tool execution information with colored output showing which tools are called and their results.
18+
19+
- **ErrorRecoveryCallback**: Converts tool execution errors into JSON results, allowing the agent to see errors and potentially retry or adjust.
20+
21+
## Building
22+
23+
> [!IMPORTANT]
24+
> Check the [llama.cpp build documentation](https://github.com/ggml-org/llama.cpp/blob/master/docs/build.md) to find
25+
> Cmake flags you might want to pass depending on your available hardware.
26+
27+
```bash
28+
cd examples/mcp
29+
30+
git -C ../.. submodule update --init --recursive
31+
32+
# MCP requires OpenSSL for HTTPS support
33+
cmake -B build -DAGENT_CPP_BUILD_MCP=ON
34+
cmake --build build -j$(nproc)
35+
```
36+
37+
### Using a custom llama.cpp
38+
39+
If you have llama.cpp already downloaded:
40+
41+
```bash
42+
cmake -B build -DLLAMA_CPP_DIR=/path/to/your/llama.cpp -DAGENT_CPP_BUILD_MCP=ON
43+
cmake --build build -j$(nproc)
44+
```
45+
46+
## Usage
47+
48+
```bash
49+
./build/mcp-example -m <path-to-model.gguf> -u <mcp-server-url>
50+
```
51+
52+
Options:
53+
- `-m <path>` - Path to the GGUF model file (required)
54+
- `-u <url>` - MCP server URL (Streamable HTTP transport) (required)
55+
56+
## Example
57+
58+
This example includes a simple MCP server (`server.py`) with a `calculator` tool that performs basic math operations (similar to the calculator in `examples/shared`).
59+
60+
### 1. Start the MCP Server
61+
62+
The server uses [uv](https://docs.astral.sh/uv/) inline script metadata, so no installation is needed:
63+
64+
```bash
65+
uv run server.py
66+
```
67+
68+
This starts the MCP server on `http://localhost:8000/mcp`.
69+
70+
### 2. Run the Agent
71+
72+
```bash
73+
./build/mcp-example -m ../../granite-4.0-micro-Q8_0.gguf -u "http://localhost:8000/mcp"
74+
```
75+
76+
### 3. Example Conversation
77+
78+
```console
79+
$ ./build/mcp-example -m ../../granite-4.0-micro-Q8_0.gguf -u "http://localhost:8000/mcp"
80+
Connecting to MCP server: http://localhost:8000/mcp
81+
Initializing MCP session...
82+
MCP session initialized.
83+
84+
Available tools (1):
85+
- calculator: Perform basic mathematical operations.
86+
87+
Loading model...
88+
Model loaded successfully
89+
90+
MCP Agent ready!
91+
Connected to: http://localhost:8000/mcp
92+
Type an empty line to quit.
93+
94+
> What is 42 multiplied by 17?
95+
96+
<tool_call>
97+
{"name": "calculator", "arguments": "{\n \"operation\": \"multiply\",\n \"a\": 42,\n \"b\": 17\n}"}
98+
</tool_call>
99+
100+
[TOOL EXECUTION] Calling calculator
101+
[TOOL RESULT]
102+
{"result": 714}
103+
104+
42 multiplied by 17 equals **714**.
105+
```

0 commit comments

Comments
 (0)