Skip to content

Commit 18f5277

Browse files
authored
Make endpoint url for Ollama configurable (#355)
1 parent fef25a2 commit 18f5277

File tree

5 files changed

+89
-4
lines changed

5 files changed

+89
-4
lines changed

README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,13 @@ You can also run it with local model through ollama:
7070
git add <files...>
7171
OCO_AI_PROVIDER='ollama' opencommit
7272
```
73+
if you have ollama that is set up in docker/ on another machine with GPUs (not locally), you can change the default endpoint url.
74+
You can do so by setting the `OCO_OLLAMA_API_URL` environment variable as follows:
75+
76+
```sh
77+
OCO_OLLAMA_API_URL='http://192.168.1.10:11434/api/chat' opencommit
78+
```
79+
where 192.168.1.10 is example of endpoint URL, where you have ollama set up.
7380

7481
### Flags
7582

out/cli.cjs

Lines changed: 33 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30629,6 +30629,26 @@ function getI18nLocal(value) {
3062930629
}
3063030630

3063130631
// src/commands/config.ts
30632+
var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
30633+
CONFIG_KEYS2["OCO_OPENAI_API_KEY"] = "OCO_OPENAI_API_KEY";
30634+
CONFIG_KEYS2["OCO_ANTHROPIC_API_KEY"] = "OCO_ANTHROPIC_API_KEY";
30635+
CONFIG_KEYS2["OCO_AZURE_API_KEY"] = "OCO_AZURE_API_KEY";
30636+
CONFIG_KEYS2["OCO_TOKENS_MAX_INPUT"] = "OCO_TOKENS_MAX_INPUT";
30637+
CONFIG_KEYS2["OCO_TOKENS_MAX_OUTPUT"] = "OCO_TOKENS_MAX_OUTPUT";
30638+
CONFIG_KEYS2["OCO_OPENAI_BASE_PATH"] = "OCO_OPENAI_BASE_PATH";
30639+
CONFIG_KEYS2["OCO_DESCRIPTION"] = "OCO_DESCRIPTION";
30640+
CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI";
30641+
CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL";
30642+
CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE";
30643+
CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER";
30644+
CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE";
30645+
CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER";
30646+
CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH";
30647+
CONFIG_KEYS2["OCO_ONE_LINE_COMMIT"] = "OCO_ONE_LINE_COMMIT";
30648+
CONFIG_KEYS2["OCO_AZURE_ENDPOINT"] = "OCO_AZURE_ENDPOINT";
30649+
CONFIG_KEYS2["OCO_OLLAMA_API_URL"] = "OCO_API_URL";
30650+
return CONFIG_KEYS2;
30651+
})(CONFIG_KEYS || {});
3063230652
var MODEL_LIST = {
3063330653
openai: [
3063430654
"gpt-3.5-turbo",
@@ -30825,6 +30845,14 @@ var configValidators = {
3082530845
'Must be in format "https://<resource name>.openai.azure.com/"'
3082630846
);
3082730847
return value;
30848+
},
30849+
["OCO_API_URL" /* OCO_OLLAMA_API_URL */](value) {
30850+
validateConfig(
30851+
CONFIG_KEYS.OCO_API_URL,
30852+
typeof value === "string" && value.startsWith("http"),
30853+
`${value} is not a valid URL`
30854+
);
30855+
return value;
3082830856
}
3082930857
};
3083030858
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -34151,12 +34179,16 @@ var api = new OpenAi();
3415134179
var config4 = getConfig();
3415234180
var OllamaAi = class {
3415334181
model = "mistral";
34182+
url = "http://localhost:11434/api/chat";
3415434183
setModel(model) {
3415534184
this.model = model ?? config4?.OCO_MODEL ?? "mistral";
3415634185
}
34186+
setUrl(url2) {
34187+
this.url = url2 ?? config4?.OCO_OLLAMA_API_URL ?? "http://localhost:11434/api/chat";
34188+
}
3415734189
async generateCommitMessage(messages) {
3415834190
const model = this.model;
34159-
const url2 = "http://localhost:11434/api/chat";
34191+
const url2 = this.url;
3416034192
const p4 = {
3416134193
model,
3416234194
messages,

out/github-action.cjs

Lines changed: 33 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49444,6 +49444,26 @@ function getI18nLocal(value) {
4944449444
}
4944549445

4944649446
// src/commands/config.ts
49447+
var CONFIG_KEYS = /* @__PURE__ */ ((CONFIG_KEYS2) => {
49448+
CONFIG_KEYS2["OCO_OPENAI_API_KEY"] = "OCO_OPENAI_API_KEY";
49449+
CONFIG_KEYS2["OCO_ANTHROPIC_API_KEY"] = "OCO_ANTHROPIC_API_KEY";
49450+
CONFIG_KEYS2["OCO_AZURE_API_KEY"] = "OCO_AZURE_API_KEY";
49451+
CONFIG_KEYS2["OCO_TOKENS_MAX_INPUT"] = "OCO_TOKENS_MAX_INPUT";
49452+
CONFIG_KEYS2["OCO_TOKENS_MAX_OUTPUT"] = "OCO_TOKENS_MAX_OUTPUT";
49453+
CONFIG_KEYS2["OCO_OPENAI_BASE_PATH"] = "OCO_OPENAI_BASE_PATH";
49454+
CONFIG_KEYS2["OCO_DESCRIPTION"] = "OCO_DESCRIPTION";
49455+
CONFIG_KEYS2["OCO_EMOJI"] = "OCO_EMOJI";
49456+
CONFIG_KEYS2["OCO_MODEL"] = "OCO_MODEL";
49457+
CONFIG_KEYS2["OCO_LANGUAGE"] = "OCO_LANGUAGE";
49458+
CONFIG_KEYS2["OCO_MESSAGE_TEMPLATE_PLACEHOLDER"] = "OCO_MESSAGE_TEMPLATE_PLACEHOLDER";
49459+
CONFIG_KEYS2["OCO_PROMPT_MODULE"] = "OCO_PROMPT_MODULE";
49460+
CONFIG_KEYS2["OCO_AI_PROVIDER"] = "OCO_AI_PROVIDER";
49461+
CONFIG_KEYS2["OCO_GITPUSH"] = "OCO_GITPUSH";
49462+
CONFIG_KEYS2["OCO_ONE_LINE_COMMIT"] = "OCO_ONE_LINE_COMMIT";
49463+
CONFIG_KEYS2["OCO_AZURE_ENDPOINT"] = "OCO_AZURE_ENDPOINT";
49464+
CONFIG_KEYS2["OCO_OLLAMA_API_URL"] = "OCO_API_URL";
49465+
return CONFIG_KEYS2;
49466+
})(CONFIG_KEYS || {});
4944749467
var MODEL_LIST = {
4944849468
openai: [
4944949469
"gpt-3.5-turbo",
@@ -49640,6 +49660,14 @@ var configValidators = {
4964049660
'Must be in format "https://<resource name>.openai.azure.com/"'
4964149661
);
4964249662
return value;
49663+
},
49664+
["OCO_API_URL" /* OCO_OLLAMA_API_URL */](value) {
49665+
validateConfig(
49666+
CONFIG_KEYS.OCO_API_URL,
49667+
typeof value === "string" && value.startsWith("http"),
49668+
`${value} is not a valid URL`
49669+
);
49670+
return value;
4964349671
}
4964449672
};
4964549673
var defaultConfigPath = (0, import_path.join)((0, import_os.homedir)(), ".opencommit");
@@ -52966,12 +52994,16 @@ var api = new OpenAi();
5296652994
var config4 = getConfig();
5296752995
var OllamaAi = class {
5296852996
model = "mistral";
52997+
url = "http://localhost:11434/api/chat";
5296952998
setModel(model) {
5297052999
this.model = model ?? config4?.OCO_MODEL ?? "mistral";
5297153000
}
53001+
setUrl(url2) {
53002+
this.url = url2 ?? config4?.OCO_OLLAMA_API_URL ?? "http://localhost:11434/api/chat";
53003+
}
5297253004
async generateCommitMessage(messages) {
5297353005
const model = this.model;
52974-
const url2 = "http://localhost:11434/api/chat";
53006+
const url2 = this.url;
5297553007
const p3 = {
5297653008
model,
5297753009
messages,

src/commands/config.ts

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,8 @@ export enum CONFIG_KEYS {
2727
OCO_AI_PROVIDER = 'OCO_AI_PROVIDER',
2828
OCO_GITPUSH = 'OCO_GITPUSH',
2929
OCO_ONE_LINE_COMMIT = 'OCO_ONE_LINE_COMMIT',
30-
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT'
30+
OCO_AZURE_ENDPOINT = 'OCO_AZURE_ENDPOINT',
31+
OCO_OLLAMA_API_URL = 'OCO_API_URL',
3132
}
3233

3334
export enum CONFIG_MODES {
@@ -270,6 +271,14 @@ export const configValidators = {
270271

271272
return value;
272273
},
274+
[CONFIG_KEYS.OCO_OLLAMA_API_URL](value: any) { // add simple api validator
275+
validateConfig(
276+
CONFIG_KEYS.OCO_API_URL,
277+
typeof value === 'string' && value.startsWith('http'),
278+
`${value} is not a valid URL`
279+
);
280+
return value;
281+
},
273282
};
274283

275284
export type ConfigType = {

src/engine/ollama.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,15 @@ const config = getConfig();
1010

1111
export class OllamaAi implements AiEngine {
1212
private model = "mistral"; // as default model of Ollama
13+
private url = "http://localhost:11434/api/chat"; // default URL of Ollama API
1314

1415
setModel(model: string) {
1516
this.model = model ?? config?.OCO_MODEL ?? 'mistral';
1617
}
18+
19+
setUrl(url: string) {
20+
this.url = url ?? config?.OCO_OLLAMA_API_URL ?? 'http://localhost:11434/api/chat';
21+
}
1722
async generateCommitMessage(
1823
messages: Array<ChatCompletionRequestMessage>
1924
): Promise<string | undefined> {
@@ -22,7 +27,7 @@ export class OllamaAi implements AiEngine {
2227
//console.log(messages);
2328
//process.exit()
2429

25-
const url = 'http://localhost:11434/api/chat';
30+
const url = this.url;
2631
const p = {
2732
model,
2833
messages,

0 commit comments

Comments
 (0)