From ff5b869ae869fdb5eebd342523910cfd317d8e4d Mon Sep 17 00:00:00 2001 From: Jorge Cortes Date: Thu, 10 Oct 2024 16:34:42 -0500 Subject: [PATCH] Ollama: new action components --- .../ollama/actions/copy-model/copy-model.mjs | 50 ++++++++++ .../actions/create-model/create-model.mjs | 55 +++++++++++ .../actions/delete-model/delete-model.mjs | 43 ++++++++ .../generate-chat-completion.mjs | 82 ++++++++++++++++ .../generate-completion.mjs | 90 +++++++++++++++++ .../generate-embeddings.mjs | 73 ++++++++++++++ .../list-local-models/list-local-models.mjs | 19 ++++ .../ollama/actions/pull-model/pull-model.mjs | 56 +++++++++++ .../ollama/actions/push-model/push-model.mjs | 59 +++++++++++ .../show-model-information.mjs | 51 ++++++++++ components/ollama/common/utils.mjs | 87 +++++++++++++++++ components/ollama/ollama.app.mjs | 97 ++++++++++++++++++- components/ollama/package.json | 7 +- pnpm-lock.yaml | 5 +- 14 files changed, 767 insertions(+), 7 deletions(-) create mode 100644 components/ollama/actions/copy-model/copy-model.mjs create mode 100644 components/ollama/actions/create-model/create-model.mjs create mode 100644 components/ollama/actions/delete-model/delete-model.mjs create mode 100644 components/ollama/actions/generate-chat-completion/generate-chat-completion.mjs create mode 100644 components/ollama/actions/generate-completion/generate-completion.mjs create mode 100644 components/ollama/actions/generate-embeddings/generate-embeddings.mjs create mode 100644 components/ollama/actions/list-local-models/list-local-models.mjs create mode 100644 components/ollama/actions/pull-model/pull-model.mjs create mode 100644 components/ollama/actions/push-model/push-model.mjs create mode 100644 components/ollama/actions/show-model-information/show-model-information.mjs create mode 100644 components/ollama/common/utils.mjs diff --git a/components/ollama/actions/copy-model/copy-model.mjs b/components/ollama/actions/copy-model/copy-model.mjs new file mode 100644 index 0000000000000..895c39a745e2f --- /dev/null +++ b/components/ollama/actions/copy-model/copy-model.mjs @@ -0,0 +1,50 @@ +import app from "../../ollama.app.mjs"; + +export default { + key: "ollama-copy-model", + name: "Copy Model", + description: "Copies a model, creating a model with another name from an existing model. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#copy-a-model).", + version: "0.0.1", + type: "action", + props: { + app, + source: { + propDefinition: [ + app, + "model", + ], + }, + destination: { + type: "string", + label: "New Model Name", + description: "The new name for the copied model.", + }, + }, + methods: { + copyModel(args = {}) { + return this.app.post({ + path: "/copy", + ...args, + }); + }, + }, + async run({ $ }) { + const { + copyModel, + source, + destination, + } = this; + + await copyModel({ + $, + data: { + source, + destination, + }, + }); + $.export("$summary", "Successfully copied model."); + return { + success: true, + }; + }, +}; diff --git a/components/ollama/actions/create-model/create-model.mjs b/components/ollama/actions/create-model/create-model.mjs new file mode 100644 index 0000000000000..367625a53b44e --- /dev/null +++ b/components/ollama/actions/create-model/create-model.mjs @@ -0,0 +1,55 @@ +import app from "../../ollama.app.mjs"; + +export default { + key: "ollama-create-model", + name: "Create Model", + description: "Create a model from a modelfile. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#create-a-model).", + version: "0.0.1", + type: "action", + props: { + app, + name: { + type: "string", + label: "Name", + description: "The name of the model.", + }, + modelfile: { + type: "string", + label: "Model File", + description: "Contents of the Modelfile. Eg. `FROM llama3 SYSTEM You are mario from Super Mario Bros`", + }, + stream: { + propDefinition: [ + app, + "stream", + ], + }, + }, + methods: { + createModel(args = {}) { + return this.app.post({ + path: "/create", + ...args, + }); + }, + }, + async run({ $ }) { + const { + createModel, + name, + modelfile, + stream, + } = this; + + const response = await createModel({ + $, + data: { + name, + modelfile, + stream, + }, + }); + $.export("$summary", "Successfully created model."); + return response; + }, +}; diff --git a/components/ollama/actions/delete-model/delete-model.mjs b/components/ollama/actions/delete-model/delete-model.mjs new file mode 100644 index 0000000000000..f475c1391113e --- /dev/null +++ b/components/ollama/actions/delete-model/delete-model.mjs @@ -0,0 +1,43 @@ +import app from "../../ollama.app.mjs"; + +export default { + key: "ollama-delete-model", + name: "Delete Model", + description: "Delete a model and its data. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#delete-a-model)", + version: "0.0.1", + type: "action", + props: { + app, + name: { + propDefinition: [ + app, + "model", + ], + }, + }, + methods: { + deleteModel(args = {}) { + return this.app.delete({ + path: "/delete", + ...args, + }); + }, + }, + async run({ $ }) { + const { + deleteModel, + name, + } = this; + + await deleteModel({ + $, + data: { + name, + }, + }); + $.export("$summary", "Successfully deleted model."); + return { + success: true, + }; + }, +}; diff --git a/components/ollama/actions/generate-chat-completion/generate-chat-completion.mjs b/components/ollama/actions/generate-chat-completion/generate-chat-completion.mjs new file mode 100644 index 0000000000000..3fb01de635b74 --- /dev/null +++ b/components/ollama/actions/generate-chat-completion/generate-chat-completion.mjs @@ -0,0 +1,82 @@ +import app from "../../ollama.app.mjs"; +import utils from "../../common/utils.mjs"; + +export default { + key: "ollama-generate-chat-completion", + name: "Generate Chat Completion", + description: "Generates the next message in a chat with a provided model. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-chat-completion).", + version: "0.0.1", + type: "action", + props: { + app, + model: { + propDefinition: [ + app, + "model", + ], + }, + messages: { + type: "string[]", + label: "Messages", + description: "The messages of the chat, this can be used to keep a chat memory. Each row should be set as a JSON format string. Eg. `{\"role\": \"user\", \"content\": \"Hello\"}`. The message object has the following fields:\n- `role`: the role of the message, either `system`, `user`, `assistant`, or `tool`.\n- `content`: The content of the message.\n- `images` (optional): a list of images to include in the message (for multimodal models such as `llava`).\n- `tool_calls`(optional): a list of tools the model wants to use.", + }, + tools: { + type: "string[]", + label: "Tools", + description: "A list of tools the model can use. Each row should be set as a JSON format string.", + optional: true, + }, + options: { + propDefinition: [ + app, + "options", + ], + }, + stream: { + propDefinition: [ + app, + "stream", + ], + }, + keepAlive: { + propDefinition: [ + app, + "keepAlive", + ], + }, + }, + methods: { + generateChatCompletion(args = {}) { + return this.app.post({ + path: "/chat", + ...args, + }); + }, + }, + async run({ $ }) { + const { + generateChatCompletion, + model, + messages, + tools, + options, + stream, + keepAlive, + } = this; + + const response = await generateChatCompletion({ + $, + data: { + model, + messages: utils.parseArray(messages), + tools: utils.parseArray(tools), + options: utils.parseOptions(options), + stream, + keep_alive: keepAlive, + }, + }); + + $.export("$summary", "Successfully generated chat completion."); + return response; + }, +}; diff --git a/components/ollama/actions/generate-completion/generate-completion.mjs b/components/ollama/actions/generate-completion/generate-completion.mjs new file mode 100644 index 0000000000000..28d90fe1f3a9a --- /dev/null +++ b/components/ollama/actions/generate-completion/generate-completion.mjs @@ -0,0 +1,90 @@ +import utils from "../../common/utils.mjs"; +import app from "../../ollama.app.mjs"; + +export default { + key: "ollama-generate-completion", + name: "Generate Completion", + description: "Generates a response for a given prompt with a provided model. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#generate-a-completion).", + version: "0.0.1", + type: "action", + props: { + app, + model: { + propDefinition: [ + app, + "model", + ], + }, + prompt: { + propDefinition: [ + app, + "prompt", + ], + }, + suffix: { + propDefinition: [ + app, + "suffix", + ], + }, + images: { + propDefinition: [ + app, + "images", + ], + }, + options: { + propDefinition: [ + app, + "options", + ], + }, + stream: { + propDefinition: [ + app, + "stream", + ], + }, + keepAlive: { + propDefinition: [ + app, + "keepAlive", + ], + }, + }, + methods: { + generateCompletion(args = {}) { + return this.app.post({ + path: "/generate", + ...args, + }); + }, + }, + async run({ $ }) { + const { + generateCompletion, + model, + prompt, + suffix, + images, + options, + stream, + keepAlive, + } = this; + + const response = await generateCompletion({ + $, + data: { + model, + prompt, + suffix, + images, + options: utils.parseOptions(options), + stream, + keep_alive: keepAlive, + }, + }); + $.export("$summary", "Successfully generated completion."); + return response; + }, +}; diff --git a/components/ollama/actions/generate-embeddings/generate-embeddings.mjs b/components/ollama/actions/generate-embeddings/generate-embeddings.mjs new file mode 100644 index 0000000000000..954f11f29f062 --- /dev/null +++ b/components/ollama/actions/generate-embeddings/generate-embeddings.mjs @@ -0,0 +1,73 @@ +import utils from "../../common/utils.mjs"; +import app from "../../ollama.app.mjs"; + +export default { + key: "ollama-generate-embeddings", + name: "Generate Embeddings", + description: "Generate embeddings from a model. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#generate-embeddings).", + version: "0.0.1", + type: "action", + props: { + app, + model: { + propDefinition: [ + app, + "model", + ], + }, + input: { + type: "string[]", + label: "Input", + description: "The list of texts to generate embeddings for.", + }, + truncate: { + type: "boolean", + label: "Truncate", + description: "Truncates the end of each input to fit within context length. Returns error if `false` and context length is exceeded. Defaults to `true`.", + optional: true, + }, + options: { + propDefinition: [ + app, + "options", + ], + }, + keepAlive: { + propDefinition: [ + app, + "keepAlive", + ], + }, + }, + methods: { + generateEmbeddings(args = {}) { + return this.app.post({ + path: "/embed", + ...args, + }); + }, + }, + async run({ $ }) { + const { + generateEmbeddings, + model, + input, + truncate, + options, + keepAlive, + } = this; + + const response = await generateEmbeddings({ + $, + data: { + model, + input, + truncate, + options: utils.parseOptions(options), + keep_alive: keepAlive, + }, + }); + $.export("$summary", "Successfully generated embeddings."); + return response; + }, +}; diff --git a/components/ollama/actions/list-local-models/list-local-models.mjs b/components/ollama/actions/list-local-models/list-local-models.mjs new file mode 100644 index 0000000000000..250a910f6dd74 --- /dev/null +++ b/components/ollama/actions/list-local-models/list-local-models.mjs @@ -0,0 +1,19 @@ +import app from "../../ollama.app.mjs"; + +export default { + key: "ollama-list-local-models", + name: "List Local Models", + description: "List models that are available locally. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#list-running-models).", + version: "0.0.1", + type: "action", + props: { + app, + }, + async run({ $ }) { + const response = await this.app.listLocalModels({ + $, + }); + $.export("$summary", "Successfully listed local models."); + return response; + }, +}; diff --git a/components/ollama/actions/pull-model/pull-model.mjs b/components/ollama/actions/pull-model/pull-model.mjs new file mode 100644 index 0000000000000..2121c18bf972e --- /dev/null +++ b/components/ollama/actions/pull-model/pull-model.mjs @@ -0,0 +1,56 @@ +import app from "../../ollama.app.mjs"; + +export default { + key: "ollama-pull-model", + name: "Pull Model", + description: "Download a model from the ollama library. Cancelled pulls are resumed from where they left off, and multiple calls will share the same download progress. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#pull-a-model).", + version: "0.0.1", + type: "action", + props: { + app, + name: { + type: "string", + label: "Model Name", + description: "The name of the model to pull.", + }, + insecure: { + propDefinition: [ + app, + "insecure", + ], + }, + stream: { + propDefinition: [ + app, + "stream", + ], + }, + }, + methods: { + pullModel(args = {}) { + return this.app.post({ + path: "/pull", + ...args, + }); + }, + }, + async run({ $ }) { + const { + pullModel, + name, + insecure, + stream, + } = this; + + const response = await pullModel({ + $, + data: { + name, + insecure, + stream, + }, + }); + $.export("$summary", "Successfully pulled model."); + return response; + }, +}; diff --git a/components/ollama/actions/push-model/push-model.mjs b/components/ollama/actions/push-model/push-model.mjs new file mode 100644 index 0000000000000..fa7b474b5351f --- /dev/null +++ b/components/ollama/actions/push-model/push-model.mjs @@ -0,0 +1,59 @@ +import app from "../../ollama.app.mjs"; + +export default { + key: "ollama-push-model", + name: "Push Model to Library", + description: "Upload a model to a model library. Requires registering for ollama.ai and adding a public key first. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#push-a-model).", + version: "0.0.1", + type: "action", + props: { + app, + name: { + description: "Name of the model to push in the form of `/:`. Please make sure you follow [the instructions in this issue](https://github.com/ollama/ollama/issues/1140#issuecomment-1814823949) in order to push a model to your own library in [ollama.com](https://ollama.com/).", + propDefinition: [ + app, + "model", + ], + }, + insecure: { + propDefinition: [ + app, + "insecure", + ], + }, + stream: { + propDefinition: [ + app, + "stream", + ], + }, + }, + methods: { + pushModel(args = {}) { + return this.app.post({ + path: "/push", + ...args, + }); + }, + }, + async run({ $ }) { + const { + pushModel, + name, + insecure, + stream, + } = this; + + const response = await pushModel({ + $, + data: { + name, + insecure, + stream, + }, + }); + + $.export("$summary", "Successfully pushed model."); + return response; + }, +}; diff --git a/components/ollama/actions/show-model-information/show-model-information.mjs b/components/ollama/actions/show-model-information/show-model-information.mjs new file mode 100644 index 0000000000000..bbb82666f7c82 --- /dev/null +++ b/components/ollama/actions/show-model-information/show-model-information.mjs @@ -0,0 +1,51 @@ +import app from "../../ollama.app.mjs"; + +export default { + key: "ollama-show-model-information", + name: "Show Model Information", + description: "Show information about a model including details, modelfile, template, parameters, license, and system prompt. [See the documentation](https://github.com/ollama/ollama/blob/main/docs/api.md#show-model-information).", + version: "0.0.1", + type: "action", + props: { + app, + name: { + propDefinition: [ + app, + "model", + ], + }, + verbose: { + type: "boolean", + label: "Verbose", + description: "Show verbose output.", + optional: true, + }, + }, + methods: { + getModelInfo(args = {}) { + return this.app.post({ + path: "/show", + ...args, + }); + }, + }, + async run({ $ }) { + const { + getModelInfo, + name, + verbose, + } = this; + + const response = await getModelInfo({ + $, + data: { + name, + verbose, + }, + }); + + $.export("$summary", "Successfully retrieved model information."); + + return response; + }, +}; diff --git a/components/ollama/common/utils.mjs b/components/ollama/common/utils.mjs new file mode 100644 index 0000000000000..ac8fa4db58f74 --- /dev/null +++ b/components/ollama/common/utils.mjs @@ -0,0 +1,87 @@ +import { ConfigurationError } from "@pipedream/platform"; + +const parseJson = (input) => { + const parse = (value) => { + if (typeof(value) === "string") { + try { + return parseJson(JSON.parse(value)); + } catch (e) { + return value; + } + } else if (typeof(value) === "object" && value !== null) { + return Object.entries(value) + .reduce((acc, [ + key, + val, + ]) => Object.assign(acc, { + [key]: parse(val), + }), {}); + } + return value; + }; + + return parse(input); +}; + +function parseArray(value) { + try { + if (!value) { + return []; + } + + if (Array.isArray(value)) { + return value; + } + + const parsedValue = JSON.parse(value); + + if (!Array.isArray(parsedValue)) { + throw new Error("Not an array"); + } + + return parsedValue; + + } catch (e) { + throw new ConfigurationError("Make sure the custom expression contains a valid array object"); + } +} + +function isJson(value) { + try { + JSON.parse(value); + } catch (e) { + return false; + } + return true; +} + +function parseOptions(options) { + if (!options) { + return; + } + return Object.fromEntries( + Object.entries(options) + .map(([ + key, + value, + ]) => { + let parsedValue = isNaN(value) + ? value + : Number(value); + + parsedValue = isJson(value) + ? JSON.parse(value) + : parsedValue; + + return [ + key, + parsedValue, + ]; + }), + ); +} + +export default { + parseArray: (value) => parseArray(value).map(parseJson), + parseOptions, +}; diff --git a/components/ollama/ollama.app.mjs b/components/ollama/ollama.app.mjs index fd8f8a2f806a0..3066adcd87d0e 100644 --- a/components/ollama/ollama.app.mjs +++ b/components/ollama/ollama.app.mjs @@ -1,11 +1,100 @@ +import { axios } from "@pipedream/platform"; + export default { type: "app", app: "ollama", - propDefinitions: {}, + propDefinitions: { + model: { + type: "string", + label: "Model Name", + description: "Model names follow a `model:tag` format, where model can have an optional namespace such as `example/model`. Some examples are `orca-mini:3b-q4_1` and `llama3:70b`. The tag is optional and, if not provided, will default to latest. The tag is used to identify a specific version.", + async options() { + const { models } = await this.listLocalModels(); + return models.map(({ name }) => name); + }, + }, + prompt: { + type: "string", + label: "Prompt", + description: "The prompt to generate a response for.", + }, + suffix: { + type: "string", + label: "Suffix", + description: "The text after the model response.", + optional: true, + }, + images: { + type: "string[]", + label: "Images", + description: "A list of base64-encoded images (for multimodal models such as `llava`).", + optional: true, + }, + options: { + type: "object", + label: "Advanced Options", + description: "Additional model parameters listed in the documentation for the [Modelfile](https://github.com/ollama/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values) such as `temperature`", + optional: true, + }, + insecure: { + type: "boolean", + label: "Insecure", + description: "Allow insecure connections to the library. Only use this if you are pulling from your own library during development.", + optional: true, + }, + stream: { + type: "boolean", + label: "Stream", + description: "If `false` the response will be returned as a single response object, rather than a stream of objects.", + optional: true, + default: false, + }, + keepAlive: { + type: "string", + label: "Keep Alive", + description: "Controls how long the model will stay loaded into memory following the request (default: 5m).", + optional: true, + }, + }, methods: { - // this.$auth contains connected account data - authKeys() { - console.log(Object.keys(this.$auth)); + getUrl(path) { + return `${this.$auth.url}/api${path}`; + }, + getHeaders(headers) { + const { apiKey } = this.$auth; + return { + ...headers, + ...(apiKey && { + Authorization: `Bearer ${apiKey}`, + }), + }; + }, + _makeRequest({ + $ = this, path, headers, ...args + } = {}) { + return axios($, { + ...args, + url: this.getUrl(path), + headers: this.getHeaders(headers), + }); + }, + post(args = {}) { + return this._makeRequest({ + method: "POST", + ...args, + }); + }, + delete(args = {}) { + return this._makeRequest({ + method: "DELETE", + ...args, + }); + }, + listLocalModels(args = {}) { + return this._makeRequest({ + path: "/tags", + ...args, + }); }, }, }; diff --git a/components/ollama/package.json b/components/ollama/package.json index 8db90e984d109..00f24ba705318 100644 --- a/components/ollama/package.json +++ b/components/ollama/package.json @@ -1,6 +1,6 @@ { "name": "@pipedream/ollama", - "version": "0.0.1", + "version": "0.1.0", "description": "Pipedream Ollama Components", "main": "ollama.app.mjs", "keywords": [ @@ -11,5 +11,8 @@ "author": "Pipedream (https://pipedream.com/)", "publishConfig": { "access": "public" + }, + "dependencies": { + "@pipedream/platform": "3.0.3" } -} \ No newline at end of file +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1145af726f4d9..16b1c71203506 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -6677,7 +6677,10 @@ importers: '@pipedream/platform': 2.0.0 components/ollama: - specifiers: {} + specifiers: + '@pipedream/platform': 3.0.3 + dependencies: + '@pipedream/platform': 3.0.3 components/omise: specifiers: