Skip to content

Commit b49946f

Browse files
committed
Preserve URL path when building relative URLs in JS
It's necessary to preserve the path because the server might be hosted under a subdirectory, specified by `--url-prefix`. Fixes #732
1 parent 1e777d2 commit b49946f

File tree

2 files changed

+18
-3
lines changed

2 files changed

+18
-3
lines changed

llama.cpp/server/public/completion.js

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,20 @@ const paramDefaults = {
77

88
let generation_settings = null;
99

10+
// Returns a new URL that starts with `urlPrefix` and ends with `path`. The
11+
// `path` must not begin with a slash. This is more robust than `new URL(path,
12+
// urlPrefix)` because it preserves the prefix's entire path, even when the
13+
// prefix has no trailing slash.
14+
function buildUrl(urlPrefix, path) {
15+
if (path.startsWith('/')) {
16+
throw new Error("path must not have a leading slash");
17+
}
18+
const base = new URL(urlPrefix);
19+
if (!base.pathname.endsWith('/')) {
20+
base.pathname += '/';
21+
}
22+
return new URL(path, base);
23+
}
1024

1125
// Completes the prompt as a generator. Recommended for most use cases.
1226
//
@@ -28,7 +42,7 @@ export async function* llama(prompt, params = {}, config = {}) {
2842

2943
const completionParams = { ...paramDefaults, ...params, prompt };
3044

31-
const response = await fetch(`${url_prefix}/completion`, {
45+
const response = await fetch(buildUrl(url_prefix, 'completion'), {
3246
method: 'POST',
3347
body: JSON.stringify(completionParams),
3448
headers: {
@@ -196,7 +210,7 @@ export const llamaComplete = async (params, controller, callback) => {
196210
export const llamaModelInfo = async (config = {}) => {
197211
if (!generation_settings) {
198212
const url_prefix = config.url_prefix || "";
199-
const props = await fetch(`${url_prefix}/props`).then(r => r.json());
213+
const props = await fetch(buildUrl(url_prefix, 'props')).then(r => r.json());
200214
generation_settings = props.default_generation_settings;
201215
}
202216
return generation_settings;

llama.cpp/server/public/index.html

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -419,7 +419,8 @@
419419
throw new Error("already running");
420420
}
421421
controller.value = new AbortController();
422-
for await (const chunk of llama(prompt, llamaParams, { controller: controller.value, url_prefix: new URL('.', document.baseURI).origin })) {
422+
423+
for await (const chunk of llama(prompt, llamaParams, { controller: controller.value, url_prefix: document.baseURI })) {
423424
const data = chunk.data;
424425

425426
if (data.stop) {

0 commit comments

Comments
 (0)