Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import mistralAI from "../../mistral_ai.app.mjs";
import constants from "../../common/constants.mjs";
import { parseObj } from "../../common/utils.mjs";

export default {
key: "mistral_ai-create-batch-job",
name: "Create Batch Job",
description: "Create a new batch job, it will be queued for processing. [See the Documentation](https://docs.mistral.ai/api/#tag/batch/operation/jobs_api_routes_batch_get_batch_jobs)",
version: "0.0.1",
type: "action",
props: {
mistralAI,
inputFiles: {
propDefinition: [
mistralAI,
"fileIds",
],
},
modelId: {
propDefinition: [
mistralAI,
"modelId",
],
},
endpoint: {
type: "string",
label: "Endpoint",
description: "The endpoint to use for the batch job",
options: constants.BATCH_JOB_ENDPOINT_OPTIONS,
},
metadata: {
type: "object",
label: "Metadata",
description: "Optional metadata for the batch job in JSON format.",
optional: true,
},
timeoutHours: {
type: "integer",
label: "Timeout Hours",
description: "Optional timeout duration for the batch job in hours.",
optional: true,
default: 24,
},
},
async run({ $ }) {
const response = await this.mistralAI.createBatchJob({
$,
data: {
input_files: this.inputFiles,
endpoint: this.endpoint,
model: this.modelId,
metadata: parseObj(this.metadata),
timeoutHours: this.timeoutHours,
},
});
if (response?.id) {
$.export("$summary", `Successfully created batch job with ID: ${response.id}`);
}
return response;
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import mistralAI from "../../mistral_ai.app.mjs";
import { parseArray } from "../../common/utils.mjs";
import constants from "../../common/constants.mjs";

export default {
key: "mistral_ai-create-embeddings",
name: "Create Embeddings",
description: "Create new embedding in Mistral AI. [See the Documentation](https://docs.mistral.ai/api/#tag/embeddings)",
version: "0.0.1",
type: "action",
props: {
mistralAI,
input: {
type: "string",
label: "Input",
description: "The input text for which to create an embedding. May be a string or an array of strings.",
},
},
async run({ $ }) {
const response = await this.mistralAI.createEmbeddings({
$,
data: {
model: constants.EMBEDDINGS_MODEL,
input: parseArray(this.input),
},
});
if (response?.id) {
$.export("$summary", `Successfully created embedding with ID: ${response.id}`);
}
return response;
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import mistralAI from "../../mistral_ai.app.mjs";
import fs from "fs";

export default {
key: "mistral_ai-download-batch-job-results",
name: "Download Batch Job Results",
description: "Download a batch job results file to the /tmp directory. [See the Documentation](https://docs.mistral.ai/api/#tag/files/operation/files_api_routes_download_file)",
version: "0.0.1",
type: "action",
props: {
mistralAI,
fileId: {
propDefinition: [
mistralAI,
"fileIds",
() => ({
sampleType: "batch_result",
}),
],
type: "string",
label: "File ID",
description: "The identifier of a batch result file to download",
},
filename: {
type: "string",
label: "File Name",
description: "The filename to save the results file in the /tmp directory",
},
},
async run({ $ }) {
const response = await this.mistralAI.downloadFile({
$,
fileId: this.fileId,
responseType: "arraybuffer",
});

const buffer = Buffer.isBuffer(response)
? response
: Buffer.from(response);
const filename = this.filename;
const filePath = `/tmp/${filename}`;
fs.writeFileSync(filePath, buffer);

$.export("$summary", "Successfully downloaded batch results file");

return [
filename,
filePath,
];
},
};
76 changes: 76 additions & 0 deletions components/mistral_ai/actions/generate-text/generate-text.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import mistralAI from "../../mistral_ai.app.mjs";

export default {
key: "mistral_ai-generate-text",
name: "Generate Text",
description: "Generate text using Mistral AI models. [See the Documentation](https://docs.mistral.ai/api/#tag/chat/operation/chat_completion_v1_chat_completions_post)",
version: "0.0.1",
type: "action",
props: {
mistralAI,
message: {
type: "string",
label: "Message",
description: "The prompt message to send",
},
modelId: {
propDefinition: [
mistralAI,
"modelId",
],
},
temperature: {
type: "string",
label: "Temperature",
description: "The sampling temperature to use, we recommend between 0.0 and 0.7. Higher values like 0.7 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or top_p but not both. The default value varies depending on the model you are targeting.",
optional: true,
},
topP: {
type: "string",
label: "Top P",
description: "Nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.",
optional: true,
},
maxTokens: {
type: "integer",
label: "Max Tokens",
description: "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length.",
optional: true,
},
randomSeed: {
type: "integer",
label: "Random Seed",
description: "The seed to use for random sampling. If set, different calls will generate deterministic results.",
optional: true,
},
n: {
type: "integer",
label: "N",
description: "Number of completions to return for each request, input tokens are only billed once.",
optional: true,
},
},
async run({ $ }) {
const response = await this.mistralAI.sendPrompt({
$,
data: {
model: this.modelId,
messages: [
{
content: this.message,
role: "user",
},
],
temperature: this.temperature && +this.temperature,
top_p: this.topP && +this.topP,
max_tokens: this.maxTokens,
random_seed: this.randomSeed,
n: this.n,
},
});
if (response?.id) {
$.export("$summary", `Successfully retrieved response with ID: ${response.id}`);
}
return response;
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import mistralAI from "../../mistral_ai.app.mjs";

export default {
key: "mistral_ai-get-batch-job-details",
name: "Get Batch Job Details",
description: "Get the details of a batch job by its ID. [See the Documentation](https://docs.mistral.ai/api/#tag/batch/operation/jobs_api_routes_batch_get_batch_job)",
version: "0.0.1",
type: "action",
props: {
mistralAI,
batchJobId: {
propDefinition: [
mistralAI,
"batchJobId",
],
},
},
async run({ $ }) {
const response = await this.mistralAI.getBatchJob({
$,
jobId: this.batchJobId,
});
if (response?.id) {
$.export("$summary", `Successfully retrieved details for batch job with ID: ${this.batchJobId}`);
}
return response;
},
};
23 changes: 23 additions & 0 deletions components/mistral_ai/actions/list-models/list-models.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import mistralAI from "../../mistral_ai.app.mjs";

export default {
key: "mistral_ai-list-models",
name: "List Models",
description: "Retrieve a list of available Mistral AI models that the user is authorized to access. [See the Documentation](https://docs.mistral.ai/api/#tag/models)",
version: "0.0.1",
type: "action",
props: {
mistralAI,
},
async run({ $ }) {
const { data } = await this.mistralAI.listModels({
$,
});
if (data?.length) {
$.export("$summary", `Successfully retrieved ${data.length} model${data.length === 1
? ""
: "s"}`);
}
return data;
},
};
59 changes: 59 additions & 0 deletions components/mistral_ai/actions/upload-file/upload-file.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import mistralAI from "../../mistral_ai.app.mjs";
import { ConfigurationError } from "@pipedream/platform";
import fs from "fs";
import FormData from "form-data";

export default {
key: "mistral_ai-upload-file",
name: "Upload File",
description: "Upload a file that can be used across various endpoints. [See the Documentation](https://docs.mistral.ai/api/#tag/files/operation/files_api_routes_upload_file)",
version: "0.0.1",
type: "action",
props: {
mistralAI,
filePath: {
type: "string",
label: "File Path",
description: "The path to a file in the `/tmp` directory. The size of individual files can be a maximum of 512 MB. The Fine-tuning API only supports .jsonl files. [See the Pipedream documentation on working with files](https://pipedream.com/docs/code/nodejs/working-with-files/#writing-a-file-to-tmp)",
},
purpose: {
type: "string",
label: "Purpose",
description: "The purpose of the file",
options: [
"fine-tune",
"batch",
"ocr",
],
optional: true,
},
},
async run({ $ }) {
const filePath = this.filePath.startsWith("/tmp/")
? this.filePath
: `/tmp/${this.filePath}`;

if (!fs.existsSync(filePath)) {
throw new ConfigurationError(`File \`${filePath}\` not found`);
}

const fileContent = fs.createReadStream(filePath);
const form = new FormData();
form.append("file", fileContent);
if (this.purpose) {
form.append("purpose", this.purpose);
}

const response = await this.mistralAI.uploadFile({
$,
data: form,
headers: form.getHeaders(),
});

if (response?.filename) {
$.export("$summary", `Successfully uploaded file: ${response.filename}`);
}

return response;
},
};
26 changes: 26 additions & 0 deletions components/mistral_ai/common/constants.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
const DEFAULT_PAGE_SIZE = 100;

const EMBEDDINGS_MODEL = "mistral-embed";

const BATCH_JOB_STATUS_OPTIONS = [
"SUCCESS",
"FAILED",
"TIMEOUT_EXCEEDED",
"CANCELLATION_REQUESTED",
"CANCELLED",
];

const BATCH_JOB_ENDPOINT_OPTIONS = [
"/v1/chat/completions",
"/v1/embeddings",
"/v1/fim/completions",
"/v1/moderations",
"/v1/chat/moderations",
];

export default {
DEFAULT_PAGE_SIZE,
EMBEDDINGS_MODEL,
BATCH_JOB_STATUS_OPTIONS,
BATCH_JOB_ENDPOINT_OPTIONS,
};
Loading
Loading