Skip to content

Commit 1f1c6ca

Browse files
committed
feat: cloudflare provider
1 parent b0732fd commit 1f1c6ca

File tree

3 files changed

+80
-2
lines changed

3 files changed

+80
-2
lines changed

lib/models.ex

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
defmodule HyperLLM.Models do
22
@providers %{
33
"anthropic" => HyperLLM.Provider.Anthropic,
4+
"cloudflare" => HyperLLM.Provider.Cloudflare,
45
"groq" => HyperLLM.Provider.Groq,
56
"openai" => HyperLLM.Provider.OpenAI
67
}
@@ -42,7 +43,7 @@ defmodule HyperLLM.Models do
4243
Example:
4344
4445
iex> HyperLLM.Models.list_providers()
45-
["anthropic", "groq", "openai"]
46+
["anthropic", "cloudflare", "groq", "openai"]
4647
"""
4748
def list_providers, do: Map.keys(@providers)
4849
end

lib/providers/cloudflare.ex

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
defmodule HyperLLM.Provider.Cloudflare do
2+
@behaviour HyperLLM.Provider
3+
4+
@moduledoc """
5+
Provider implementation for Cloudflare.
6+
7+
Uses Cloudflare's [OpenAI compatibility API](https://developers.cloudflare.com/workers-ai/configuration/open-ai-compatibility/) for chat completions.
8+
9+
## Configuration:
10+
11+
congfig :hyper_llm,
12+
cloudflare: [
13+
api_key: "sk-...",
14+
account_id: "..."
15+
]
16+
"""
17+
18+
@impl true
19+
def completion(messages, config) do
20+
model = Keyword.fetch!(config, :model)
21+
22+
{_request, response} =
23+
request("/ai/v1/chat/completions",
24+
method: :post,
25+
receive_timeout: 30_000,
26+
json: %{
27+
model: model,
28+
messages: messages
29+
}
30+
)
31+
32+
case response do
33+
%{status: 200, body: body} ->
34+
choices = body["choices"]
35+
choice = List.first(choices)
36+
{:ok, choice["message"]["content"]}
37+
38+
%{status: 400, body: body} ->
39+
{:error, body.error.message}
40+
41+
%{status: 401} ->
42+
{:error, "Cloudflare API key is invalid"}
43+
44+
%{status: 404} ->
45+
{:error, "Cloudflare API not found"}
46+
47+
%{status: 500} ->
48+
{:error, "Cloudflare Server error"}
49+
50+
_ ->
51+
{:error, "Unknown error"}
52+
end
53+
end
54+
55+
@impl true
56+
@doc """
57+
Check if a model is supported by the provider.
58+
Currently it's just checking that the model name starts with `@`
59+
"""
60+
def has_model?(model) do
61+
String.starts_with?(model, "@")
62+
end
63+
64+
defp request(url, opts) do
65+
api_key = HyperLLM.config!(:cloudflare, :api_key)
66+
account_id = HyperLLM.config!(:cloudflare, :account_id)
67+
68+
req =
69+
Req.new(
70+
auth: {:bearer, api_key},
71+
base_url: "https://api.cloudflare.com/client/v4/accounts/#{account_id}",
72+
url: url
73+
)
74+
75+
Req.run(req, opts)
76+
end
77+
end

test/models_test.exs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ defmodule HyperLLM.ModelsTest do
1919

2020
describe "list_providers/0" do
2121
test "returns a list of providers" do
22-
assert ["anthropic", "groq", "openai"] = HyperLLM.Models.list_providers()
22+
assert ["anthropic", "cloudflare", "groq", "openai"] = HyperLLM.Models.list_providers()
2323
end
2424
end
2525
end

0 commit comments

Comments
 (0)