diff --git a/components/mistral_ai/actions/create-batch-job/create-batch-job.mjs b/components/mistral_ai/actions/create-batch-job/create-batch-job.mjs new file mode 100644 index 0000000000000..0c32b766ea2bf --- /dev/null +++ b/components/mistral_ai/actions/create-batch-job/create-batch-job.mjs @@ -0,0 +1,61 @@ +import mistralAI from "../../mistral_ai.app.mjs"; +import constants from "../../common/constants.mjs"; +import { parseObj } from "../../common/utils.mjs"; + +export default { + key: "mistral_ai-create-batch-job", + name: "Create Batch Job", + description: "Create a new batch job, it will be queued for processing. [See the Documentation](https://docs.mistral.ai/api/#tag/batch/operation/jobs_api_routes_batch_get_batch_jobs)", + version: "0.0.1", + type: "action", + props: { + mistralAI, + inputFiles: { + propDefinition: [ + mistralAI, + "fileIds", + ], + }, + modelId: { + propDefinition: [ + mistralAI, + "modelId", + ], + }, + endpoint: { + type: "string", + label: "Endpoint", + description: "The endpoint to use for the batch job", + options: constants.BATCH_JOB_ENDPOINT_OPTIONS, + }, + metadata: { + type: "object", + label: "Metadata", + description: "Optional metadata for the batch job in JSON format.", + optional: true, + }, + timeoutHours: { + type: "integer", + label: "Timeout Hours", + description: "Optional timeout duration for the batch job in hours.", + optional: true, + default: 24, + }, + }, + async run({ $ }) { + const response = await this.mistralAI.createBatchJob({ + $, + data: { + input_files: this.inputFiles, + endpoint: this.endpoint, + model: this.modelId, + metadata: parseObj(this.metadata), + timeoutHours: this.timeoutHours, + }, + }); + if (response?.id) { + $.export("$summary", `Successfully created batch job with ID: ${response.id}`); + } + return response; + }, +}; diff --git a/components/mistral_ai/actions/create-embeddings/create-embeddings.mjs b/components/mistral_ai/actions/create-embeddings/create-embeddings.mjs new file mode 100644 index 0000000000000..5b8af6258943a --- /dev/null +++ b/components/mistral_ai/actions/create-embeddings/create-embeddings.mjs @@ -0,0 +1,32 @@ +import mistralAI from "../../mistral_ai.app.mjs"; +import { parseArray } from "../../common/utils.mjs"; +import constants from "../../common/constants.mjs"; + +export default { + key: "mistral_ai-create-embeddings", + name: "Create Embeddings", + description: "Create new embedding in Mistral AI. [See the Documentation](https://docs.mistral.ai/api/#tag/embeddings)", + version: "0.0.1", + type: "action", + props: { + mistralAI, + input: { + type: "string", + label: "Input", + description: "The input text for which to create an embedding. May be a string or an array of strings.", + }, + }, + async run({ $ }) { + const response = await this.mistralAI.createEmbeddings({ + $, + data: { + model: constants.EMBEDDINGS_MODEL, + input: parseArray(this.input), + }, + }); + if (response?.id) { + $.export("$summary", `Successfully created embedding with ID: ${response.id}`); + } + return response; + }, +}; diff --git a/components/mistral_ai/actions/download-batch-job-results/download-batch-job-results.mjs b/components/mistral_ai/actions/download-batch-job-results/download-batch-job-results.mjs new file mode 100644 index 0000000000000..8a1b7141b5171 --- /dev/null +++ b/components/mistral_ai/actions/download-batch-job-results/download-batch-job-results.mjs @@ -0,0 +1,51 @@ +import mistralAI from "../../mistral_ai.app.mjs"; +import fs from "fs"; + +export default { + key: "mistral_ai-download-batch-job-results", + name: "Download Batch Job Results", + description: "Download a batch job results file to the /tmp directory. [See the Documentation](https://docs.mistral.ai/api/#tag/files/operation/files_api_routes_download_file)", + version: "0.0.1", + type: "action", + props: { + mistralAI, + fileId: { + propDefinition: [ + mistralAI, + "fileIds", + () => ({ + sampleType: "batch_result", + }), + ], + type: "string", + label: "File ID", + description: "The identifier of a batch result file to download", + }, + filename: { + type: "string", + label: "File Name", + description: "The filename to save the results file in the /tmp directory", + }, + }, + async run({ $ }) { + const response = await this.mistralAI.downloadFile({ + $, + fileId: this.fileId, + responseType: "arraybuffer", + }); + + const buffer = Buffer.isBuffer(response) + ? response + : Buffer.from(response); + const filename = this.filename; + const filePath = `/tmp/${filename}`; + fs.writeFileSync(filePath, buffer); + + $.export("$summary", "Successfully downloaded batch results file"); + + return [ + filename, + filePath, + ]; + }, +}; diff --git a/components/mistral_ai/actions/generate-text/generate-text.mjs b/components/mistral_ai/actions/generate-text/generate-text.mjs new file mode 100644 index 0000000000000..4de61ba1a40fc --- /dev/null +++ b/components/mistral_ai/actions/generate-text/generate-text.mjs @@ -0,0 +1,76 @@ +import mistralAI from "../../mistral_ai.app.mjs"; + +export default { + key: "mistral_ai-generate-text", + name: "Generate Text", + description: "Generate text using Mistral AI models. [See the Documentation](https://docs.mistral.ai/api/#tag/chat/operation/chat_completion_v1_chat_completions_post)", + version: "0.0.1", + type: "action", + props: { + mistralAI, + message: { + type: "string", + label: "Message", + description: "The prompt message to send", + }, + modelId: { + propDefinition: [ + mistralAI, + "modelId", + ], + }, + temperature: { + type: "string", + label: "Temperature", + description: "The sampling temperature to use, we recommend between 0.0 and 0.7. Higher values like 0.7 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or top_p but not both. The default value varies depending on the model you are targeting.", + optional: true, + }, + topP: { + type: "string", + label: "Top P", + description: "Nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or temperature but not both.", + optional: true, + }, + maxTokens: { + type: "integer", + label: "Max Tokens", + description: "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length.", + optional: true, + }, + randomSeed: { + type: "integer", + label: "Random Seed", + description: "The seed to use for random sampling. If set, different calls will generate deterministic results.", + optional: true, + }, + n: { + type: "integer", + label: "N", + description: "Number of completions to return for each request, input tokens are only billed once.", + optional: true, + }, + }, + async run({ $ }) { + const response = await this.mistralAI.sendPrompt({ + $, + data: { + model: this.modelId, + messages: [ + { + content: this.message, + role: "user", + }, + ], + temperature: this.temperature && +this.temperature, + top_p: this.topP && +this.topP, + max_tokens: this.maxTokens, + random_seed: this.randomSeed, + n: this.n, + }, + }); + if (response?.id) { + $.export("$summary", `Successfully retrieved response with ID: ${response.id}`); + } + return response; + }, +}; diff --git a/components/mistral_ai/actions/get-batch-job-details/get-batch-job-details.mjs b/components/mistral_ai/actions/get-batch-job-details/get-batch-job-details.mjs new file mode 100644 index 0000000000000..fb52ad5309efd --- /dev/null +++ b/components/mistral_ai/actions/get-batch-job-details/get-batch-job-details.mjs @@ -0,0 +1,28 @@ +import mistralAI from "../../mistral_ai.app.mjs"; + +export default { + key: "mistral_ai-get-batch-job-details", + name: "Get Batch Job Details", + description: "Get the details of a batch job by its ID. [See the Documentation](https://docs.mistral.ai/api/#tag/batch/operation/jobs_api_routes_batch_get_batch_job)", + version: "0.0.1", + type: "action", + props: { + mistralAI, + batchJobId: { + propDefinition: [ + mistralAI, + "batchJobId", + ], + }, + }, + async run({ $ }) { + const response = await this.mistralAI.getBatchJob({ + $, + jobId: this.batchJobId, + }); + if (response?.id) { + $.export("$summary", `Successfully retrieved details for batch job with ID: ${this.batchJobId}`); + } + return response; + }, +}; diff --git a/components/mistral_ai/actions/list-models/list-models.mjs b/components/mistral_ai/actions/list-models/list-models.mjs new file mode 100644 index 0000000000000..2370d4ee9f98b --- /dev/null +++ b/components/mistral_ai/actions/list-models/list-models.mjs @@ -0,0 +1,23 @@ +import mistralAI from "../../mistral_ai.app.mjs"; + +export default { + key: "mistral_ai-list-models", + name: "List Models", + description: "Retrieve a list of available Mistral AI models that the user is authorized to access. [See the Documentation](https://docs.mistral.ai/api/#tag/models)", + version: "0.0.1", + type: "action", + props: { + mistralAI, + }, + async run({ $ }) { + const { data } = await this.mistralAI.listModels({ + $, + }); + if (data?.length) { + $.export("$summary", `Successfully retrieved ${data.length} model${data.length === 1 + ? "" + : "s"}`); + } + return data; + }, +}; diff --git a/components/mistral_ai/actions/upload-file/upload-file.mjs b/components/mistral_ai/actions/upload-file/upload-file.mjs new file mode 100644 index 0000000000000..6f6583088e4df --- /dev/null +++ b/components/mistral_ai/actions/upload-file/upload-file.mjs @@ -0,0 +1,59 @@ +import mistralAI from "../../mistral_ai.app.mjs"; +import { ConfigurationError } from "@pipedream/platform"; +import fs from "fs"; +import FormData from "form-data"; + +export default { + key: "mistral_ai-upload-file", + name: "Upload File", + description: "Upload a file that can be used across various endpoints. [See the Documentation](https://docs.mistral.ai/api/#tag/files/operation/files_api_routes_upload_file)", + version: "0.0.1", + type: "action", + props: { + mistralAI, + filePath: { + type: "string", + label: "File Path", + description: "The path to a file in the `/tmp` directory. The size of individual files can be a maximum of 512 MB. The Fine-tuning API only supports .jsonl files. [See the Pipedream documentation on working with files](https://pipedream.com/docs/code/nodejs/working-with-files/#writing-a-file-to-tmp)", + }, + purpose: { + type: "string", + label: "Purpose", + description: "The purpose of the file", + options: [ + "fine-tune", + "batch", + "ocr", + ], + optional: true, + }, + }, + async run({ $ }) { + const filePath = this.filePath.startsWith("/tmp/") + ? this.filePath + : `/tmp/${this.filePath}`; + + if (!fs.existsSync(filePath)) { + throw new ConfigurationError(`File \`${filePath}\` not found`); + } + + const fileContent = fs.createReadStream(filePath); + const form = new FormData(); + form.append("file", fileContent); + if (this.purpose) { + form.append("purpose", this.purpose); + } + + const response = await this.mistralAI.uploadFile({ + $, + data: form, + headers: form.getHeaders(), + }); + + if (response?.filename) { + $.export("$summary", `Successfully uploaded file: ${response.filename}`); + } + + return response; + }, +}; diff --git a/components/mistral_ai/common/constants.mjs b/components/mistral_ai/common/constants.mjs new file mode 100644 index 0000000000000..f0d5c96e9b0c0 --- /dev/null +++ b/components/mistral_ai/common/constants.mjs @@ -0,0 +1,26 @@ +const DEFAULT_PAGE_SIZE = 100; + +const EMBEDDINGS_MODEL = "mistral-embed"; + +const BATCH_JOB_STATUS_OPTIONS = [ + "SUCCESS", + "FAILED", + "TIMEOUT_EXCEEDED", + "CANCELLATION_REQUESTED", + "CANCELLED", +]; + +const BATCH_JOB_ENDPOINT_OPTIONS = [ + "/v1/chat/completions", + "/v1/embeddings", + "/v1/fim/completions", + "/v1/moderations", + "/v1/chat/moderations", +]; + +export default { + DEFAULT_PAGE_SIZE, + EMBEDDINGS_MODEL, + BATCH_JOB_STATUS_OPTIONS, + BATCH_JOB_ENDPOINT_OPTIONS, +}; diff --git a/components/mistral_ai/common/utils.mjs b/components/mistral_ai/common/utils.mjs new file mode 100644 index 0000000000000..92f83d199aed5 --- /dev/null +++ b/components/mistral_ai/common/utils.mjs @@ -0,0 +1,36 @@ +function parseArray(arr) { + if (!arr) { + return undefined; + } + + if (typeof arr === "string") { + try { + return JSON.parse(arr); + } catch { + return arr; + } + } + + return arr; +} + +function parseObj(obj) { + if (!obj) { + return undefined; + } + + if (typeof obj === "string") { + try { + return JSON.parse(obj); + } catch { + return obj; + } + } + + return obj; +} + +export { + parseArray, + parseObj, +}; diff --git a/components/mistral_ai/mistral_ai.app.mjs b/components/mistral_ai/mistral_ai.app.mjs index ff6fa3a29205a..5d957ab6fa074 100644 --- a/components/mistral_ai/mistral_ai.app.mjs +++ b/components/mistral_ai/mistral_ai.app.mjs @@ -1,11 +1,167 @@ +import { axios } from "@pipedream/platform"; +import constants from "./common/constants.mjs"; + export default { type: "app", app: "mistral_ai", - propDefinitions: {}, + propDefinitions: { + fileIds: { + type: "string[]", + label: "File IDs", + description: "Array of input file UUIDs for batch processing", + async options({ + page, sampleType, + }) { + const { data } = await this.listFiles({ + params: { + page, + page_size: constants.DEFAULT_PAGE_SIZE, + sample_type: sampleType, + }, + }); + return data?.map(({ + id: value, filename: label, + }) => ({ + value, + label, + })) || []; + }, + }, + modelId: { + type: "string", + label: "Model ID", + description: "The identifier of the model to use", + async options() { + const { data } = await this.listModels(); + return data?.map(({ + id: value, name: label, + }) => ({ + value, + label, + })) || []; + }, + }, + batchJobId: { + type: "string", + label: "Batch Job ID", + description: "The identifier of the batch job to retrieve", + async options({ page }) { + const { data } = await this.listBatchJobs({ + params: { + page, + page_size: constants.DEFAULT_PAGE_SIZE, + }, + }); + return data?.map(({ id }) => id) || []; + }, + }, + }, methods: { - // this.$auth contains connected account data - authKeys() { - console.log(Object.keys(this.$auth)); + _baseUrl() { + return "https://api.mistral.ai/v1"; + }, + _makeRequest({ + $ = this, + path, + headers, + ...otherOpts + }) { + return axios($, { + ...otherOpts, + url: `${this._baseUrl()}${path}`, + headers: { + "Authorization": `Bearer ${this.$auth.api_key}`, + "Content-Type": "application/json", + ...headers, + }, + }); + }, + listModels(opts = {}) { + return this._makeRequest({ + path: "/models", + ...opts, + }); + }, + listBatchJobs(opts = {}) { + return this._makeRequest({ + path: "/batch/jobs", + ...opts, + }); + }, + listFiles(opts = {}) { + return this._makeRequest({ + path: "/files", + ...opts, + }); + }, + getBatchJobDetails({ + jobId, ...opts + }) { + return this._makeRequest({ + path: `/batch/jobs/${jobId}`, + ...opts, + }); + }, + downloadFile({ + fileId, ...opts + }) { + return this._makeRequest({ + path: `/files/${fileId}/content`, + ...opts, + }); + }, + uploadFile(opts = {}) { + return this._makeRequest({ + method: "POST", + path: "/files", + ...opts, + }); + }, + createEmbeddings(opts = {}) { + return this._makeRequest({ + method: "POST", + path: "/embeddings", + ...opts, + }); + }, + sendPrompt(opts = {}) { + return this._makeRequest({ + method: "POST", + path: "/chat/completions", + ...opts, + }); + }, + createBatchJob(opts = {}) { + return this._makeRequest({ + method: "POST", + path: "/batch/jobs", + ...opts, + }); + }, + async *paginate({ + fn, + params = {}, + max, + }) { + params = { + ...params, + page: 0, + page_size: constants.DEFAULT_PAGE_SIZE, + }; + let total, count = 0; + do { + const { data } = await fn({ + params, + }); + for (const item of data) { + yield item; + if (max && ++count >= max) { + return; + } + } + total = data?.length; + params.page++; + } while (total); }, }, -}; \ No newline at end of file +}; diff --git a/components/mistral_ai/package.json b/components/mistral_ai/package.json index 02d8030515df4..091e79e2483ca 100644 --- a/components/mistral_ai/package.json +++ b/components/mistral_ai/package.json @@ -1,6 +1,6 @@ { "name": "@pipedream/mistral_ai", - "version": "0.0.1", + "version": "0.1.0", "description": "Pipedream Mistral AI Components", "main": "mistral_ai.app.mjs", "keywords": [ @@ -11,5 +11,9 @@ "author": "Pipedream (https://pipedream.com/)", "publishConfig": { "access": "public" + }, + "dependencies": { + "@pipedream/platform": "^3.0.3", + "form-data": "^4.0.2" } -} \ No newline at end of file +} diff --git a/components/mistral_ai/sources/common/base.mjs b/components/mistral_ai/sources/common/base.mjs new file mode 100644 index 0000000000000..2f76cc3c2ff65 --- /dev/null +++ b/components/mistral_ai/sources/common/base.mjs @@ -0,0 +1,86 @@ +import mistralAI from "../../mistral_ai.app.mjs"; +import { + DEFAULT_POLLING_SOURCE_TIMER_INTERVAL, ConfigurationError, +} from "@pipedream/platform"; + +export default { + props: { + mistralAI, + db: "$.service.db", + timer: { + type: "$.interface.timer", + default: { + intervalSeconds: DEFAULT_POLLING_SOURCE_TIMER_INTERVAL, + }, + }, + }, + methods: { + _getLastTs() { + return this.db.get("lastTs") || 0; + }, + _setLastTs(lastTs) { + this.db.set("lastTs", lastTs); + }, + async getPaginatedResults(fn, params, max) { + const items = this.mistralAI.paginate({ + fn, + params, + max, + }); + + const results = []; + for await (const item of items) { + results.push(item); + } + + return results; + }, + async processEvent(max) { + const fn = this.getResourceFn(); + const params = this.getParams(); + const paginated = this.isPaginated(); + + let results; + if (paginated) { + results = await this.getPaginatedResults(fn, params, max); + this.findAndSetLastTs(results); + } else { + results = (await fn({ + params, + })).data; + } + + results.forEach((result) => { + const meta = this.generateMeta(result); + this.$emit(result, meta); + }); + }, + findAndSetLastTs(results) { + let maxTs = this._getLastTs(); + for (const result of results) { + maxTs = Math.max(result.created_at, maxTs); + } + this._setLastTs(maxTs); + }, + getParams() { + return {}; + }, + isPaginated() { + return true; + }, + getResourceFn() { + throw new ConfigurationError("getResourceFn is not implemented"); + }, + generateMeta() { + throw new ConfigurationError("generateMeta is not implemented"); + }, + }, + hooks: { + async deploy() { + await this.processEvent(25); + }, + }, + async run() { + await this.processEvent(); + }, +}; diff --git a/components/mistral_ai/sources/new-batch-job-completed/new-batch-job-completed.mjs b/components/mistral_ai/sources/new-batch-job-completed/new-batch-job-completed.mjs new file mode 100644 index 0000000000000..60740d3a698bf --- /dev/null +++ b/components/mistral_ai/sources/new-batch-job-completed/new-batch-job-completed.mjs @@ -0,0 +1,41 @@ +import common from "../common/base.mjs"; +import constants from "../../common/constants.mjs"; + +export default { + ...common, + key: "mistral_ai-new-batch-job-completed", + name: "New Batch Job Completed", + description: "Emit new event when a new batch job is completed. [See the Documentation](https://docs.mistral.ai/api/#tag/batch/operation/jobs_api_routes_batch_get_batch_jobs)", + version: "0.0.1", + type: "source", + dedupe: "unique", + props: { + ...common.props, + status: { + type: "string", + label: "Status", + description: "Filter the results by the batch job status", + options: constants.BATCH_JOB_STATUS_OPTIONS, + optional: true, + }, + }, + methods: { + ...common.methods, + getResourceFn() { + return this.mistralAI.listBatchJobs; + }, + getParams() { + return { + status: this.status, + created_after: this._getLastTs(), + }; + }, + generateMeta(job) { + return { + id: job.id, + summary: `New Batch Job ${job.status} with ID: ${job.id}`, + ts: job.created_at, + }; + }, + }, +}; diff --git a/components/mistral_ai/sources/new-batch-job-failure/new-batch-job-failure.mjs b/components/mistral_ai/sources/new-batch-job-failure/new-batch-job-failure.mjs new file mode 100644 index 0000000000000..cf46681b8b564 --- /dev/null +++ b/components/mistral_ai/sources/new-batch-job-failure/new-batch-job-failure.mjs @@ -0,0 +1,30 @@ +import common from "../common/base.mjs"; + +export default { + ...common, + key: "mistral_ai-new-batch-job-failure", + name: "New Batch Job Failure", + description: "Emit new event when a new batch job fails. [See the Documentation](https://docs.mistral.ai/api/#tag/batch/operation/jobs_api_routes_batch_get_batch_jobs)", + version: "0.0.1", + type: "source", + dedupe: "unique", + methods: { + ...common.methods, + getResourceFn() { + return this.mistralAI.listBatchJobs; + }, + getParams() { + return { + status: "FAILED", + created_after: this._getLastTs(), + }; + }, + generateMeta(job) { + return { + id: job.id, + summary: `New Batch Job Failed with ID: ${job.id}`, + ts: job.created_at, + }; + }, + }, +}; diff --git a/components/mistral_ai/sources/new-model-added/new-model-added.mjs b/components/mistral_ai/sources/new-model-added/new-model-added.mjs new file mode 100644 index 0000000000000..a2faded0df7fe --- /dev/null +++ b/components/mistral_ai/sources/new-model-added/new-model-added.mjs @@ -0,0 +1,29 @@ +import common from "../common/base.mjs"; +import sampleEmit from "./test-event.mjs"; + +export default { + ...common, + key: "mistral_ai-new-model-added", + name: "New Model Added", + description: "Emit new event when a new AI model is registered or becomes available. [See the Documentation](https://docs.mistral.ai/api/#tag/models)", + version: "0.0.1", + type: "source", + dedupe: "unique", + methods: { + ...common.methods, + getResourceFn() { + return this.mistralAI.listModels; + }, + isPaginated() { + return false; + }, + generateMeta(model) { + return { + id: model.id, + summary: `New Model: ${model.name}`, + ts: model.created, + }; + }, + }, + sampleEmit, +}; diff --git a/components/mistral_ai/sources/new-model-added/test-event.mjs b/components/mistral_ai/sources/new-model-added/test-event.mjs new file mode 100644 index 0000000000000..d18980647c472 --- /dev/null +++ b/components/mistral_ai/sources/new-model-added/test-event.mjs @@ -0,0 +1,23 @@ +export default { + "id": "mistral-ocr-latest", + "object": "model", + "created": 1743621323, + "owned_by": "mistralai", + "capabilities": { + "completion_chat": false, + "completion_fim": false, + "function_calling": false, + "fine_tuning": false, + "vision": false, + "classification": false + }, + "name": "mistral-ocr-2503", + "description": "Official mistral-ocr-2503 Mistral AI model", + "max_context_length": 32768, + "aliases": [ + "mistral-ocr-2503" + ], + "deprecation": null, + "default_model_temperature": null, + "type": "base" +} \ No newline at end of file diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4c0245be37368..2b8f373305357 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1401,8 +1401,7 @@ importers: components/better_uptime: {} - components/bettercontact: - specifiers: {} + components/bettercontact: {} components/bettervoice: dependencies: @@ -1930,8 +1929,7 @@ importers: specifier: ^4.2.0 version: 4.2.0 - components/bytebot: - specifiers: {} + components/bytebot: {} components/byteforms: dependencies: @@ -8076,7 +8074,14 @@ importers: specifier: ^1.4.1 version: 1.6.6 - components/mistral_ai: {} + components/mistral_ai: + dependencies: + '@pipedream/platform': + specifier: ^3.0.3 + version: 3.0.3 + form-data: + specifier: ^4.0.2 + version: 4.0.2 components/mitra: {} @@ -8611,8 +8616,7 @@ importers: specifier: ^1.3.0 version: 1.6.6 - components/notiff: - specifiers: {} + components/notiff: {} components/notion: dependencies: @@ -12782,8 +12786,7 @@ importers: specifier: ^1.6.0 version: 1.6.6 - components/teltel: - specifiers: {} + components/teltel: {} components/temi: dependencies: @@ -16455,8 +16458,8 @@ packages: '@dabh/diagnostics@2.0.3': resolution: {integrity: sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA==} - '@definitelytyped/header-parser@0.2.18': - resolution: {integrity: sha512-3JWGzhieGOx+zhy+qaPDoiby2TPA1PZGpEJHt0VwR1aK0R9dER5BoBvnT5zSafg9kHQTw4aBRFbt3o41FNkaLw==} + '@definitelytyped/header-parser@0.2.19': + resolution: {integrity: sha512-zu+RxQpUCgorYUQZoyyrRIn9CljL1CeM4qak3NDeMO1r7tjAkodfpAGnVzx/6JR2OUk0tAgwmZxNMSwd9LVgxw==} engines: {node: '>=18.18.0'} '@definitelytyped/typescript-versions@0.1.8': @@ -22126,10 +22129,6 @@ packages: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} engines: {node: '>= 0.4'} - es-set-tostringtag@2.0.3: - resolution: {integrity: sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==} - engines: {node: '>= 0.4'} - es-set-tostringtag@2.1.0: resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} engines: {node: '>= 0.4'} @@ -32198,7 +32197,7 @@ snapshots: enabled: 2.0.0 kuler: 2.0.0 - '@definitelytyped/header-parser@0.2.18': + '@definitelytyped/header-parser@0.2.19': dependencies: '@definitelytyped/typescript-versions': 0.1.8 '@definitelytyped/utils': 0.1.8 @@ -33940,7 +33939,7 @@ snapshots: '@pipedream/monday@0.7.0': dependencies: '@pipedream/platform': 3.0.3 - form-data: 4.0.1 + form-data: 4.0.2 lodash.flatmap: 4.5.0 lodash.map: 4.6.0 lodash.uniqby: 4.7.0 @@ -37039,7 +37038,7 @@ snapshots: define-properties: 1.2.1 es-abstract: 1.23.5 es-array-method-boxes-properly: 1.0.0 - es-object-atoms: 1.0.0 + es-object-atoms: 1.1.1 is-string: 1.0.7 array.prototype.tosorted@1.1.4: @@ -37057,7 +37056,7 @@ snapshots: define-properties: 1.2.1 es-abstract: 1.23.5 es-errors: 1.3.0 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 is-array-buffer: 3.0.4 is-shared-array-buffer: 1.0.3 @@ -38838,7 +38837,7 @@ snapshots: dts-critic@3.3.11(typescript@5.7.2): dependencies: - '@definitelytyped/header-parser': 0.2.18 + '@definitelytyped/header-parser': 0.2.19 command-exists: 1.2.9 rimraf: 3.0.2 semver: 6.3.1 @@ -38848,7 +38847,7 @@ snapshots: dtslint@4.2.1(typescript@5.7.2): dependencies: - '@definitelytyped/header-parser': 0.2.18 + '@definitelytyped/header-parser': 0.2.19 '@definitelytyped/typescript-versions': 0.1.8 '@definitelytyped/utils': 0.1.8 dts-critic: 3.3.11(typescript@5.7.2) @@ -38982,7 +38981,7 @@ snapshots: es-define-property: 1.0.0 es-errors: 1.3.0 es-object-atoms: 1.0.0 - es-set-tostringtag: 2.0.3 + es-set-tostringtag: 2.1.0 es-to-primitive: 1.2.1 function.prototype.name: 1.1.6 get-intrinsic: 1.2.4 @@ -39036,7 +39035,7 @@ snapshots: es-define-property@1.0.0: dependencies: - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 es-define-property@1.0.1: {} @@ -39045,8 +39044,8 @@ snapshots: es-get-iterator@1.1.3: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.2.4 - has-symbols: 1.0.3 + get-intrinsic: 1.3.0 + has-symbols: 1.1.0 is-arguments: 1.1.1 is-map: 2.0.3 is-set: 2.0.3 @@ -39060,9 +39059,9 @@ snapshots: define-properties: 1.2.1 es-abstract: 1.23.5 es-errors: 1.3.0 - es-set-tostringtag: 2.0.3 + es-set-tostringtag: 2.1.0 function-bind: 1.1.2 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 globalthis: 1.0.4 gopd: 1.0.1 has-property-descriptors: 1.0.2 @@ -39080,12 +39079,6 @@ snapshots: dependencies: es-errors: 1.3.0 - es-set-tostringtag@2.0.3: - dependencies: - get-intrinsic: 1.2.4 - has-tostringtag: 1.0.2 - hasown: 2.0.2 - es-set-tostringtag@2.1.0: dependencies: es-errors: 1.3.0 @@ -40307,7 +40300,7 @@ snapshots: dependencies: call-bind: 1.0.7 es-errors: 1.3.0 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 get-tsconfig@4.8.1: dependencies: @@ -41320,7 +41313,7 @@ snapshots: is-array-buffer@3.0.4: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 is-arrayish@0.2.1: {} @@ -41491,7 +41484,7 @@ snapshots: is-symbol@1.0.4: dependencies: - has-symbols: 1.0.3 + has-symbols: 1.1.0 is-typed-array@1.1.13: dependencies: @@ -41518,7 +41511,7 @@ snapshots: is-weakset@2.0.3: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 is-wsl@2.2.0: dependencies: @@ -41621,8 +41614,8 @@ snapshots: iterator.prototype@1.1.3: dependencies: define-properties: 1.2.1 - get-intrinsic: 1.2.4 - has-symbols: 1.0.3 + get-intrinsic: 1.3.0 + has-symbols: 1.1.0 reflect.getprototypeof: 1.0.6 set-function-name: 2.0.2 @@ -44710,7 +44703,7 @@ snapshots: axios: 0.28.1 base-64: 0.1.0 build-url: 1.3.3 - form-data: 4.0.1 + form-data: 4.0.2 https-proxy-agent: 5.0.1 joi: 17.13.3 jsonwebtoken: 9.0.2 @@ -44884,7 +44877,7 @@ snapshots: define-properties: 1.2.1 es-abstract: 1.23.5 es-aggregate-error: 1.0.13 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 iterate-value: 1.0.2 prompts@2.4.2: @@ -45536,7 +45529,7 @@ snapshots: define-properties: 1.2.1 es-abstract: 1.23.5 es-errors: 1.3.0 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 globalthis: 1.0.4 which-builtin-type: 1.1.4 @@ -46146,7 +46139,7 @@ snapshots: safe-array-concat@1.1.2: dependencies: call-bind: 1.0.7 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 has-symbols: 1.0.3 isarray: 2.0.5 @@ -46255,7 +46248,7 @@ snapshots: define-data-property: 1.1.4 es-errors: 1.3.0 function-bind: 1.1.2 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 gopd: 1.0.1 has-property-descriptors: 1.0.2 @@ -46713,7 +46706,7 @@ snapshots: es-abstract: 1.23.5 es-errors: 1.3.0 es-object-atoms: 1.0.0 - get-intrinsic: 1.2.4 + get-intrinsic: 1.3.0 gopd: 1.0.1 has-symbols: 1.0.3 internal-slot: 1.0.7