From 146b899a5ce53f851ee0d519d3b532dc50826d1a Mon Sep 17 00:00:00 2001 From: zakkor Date: Mon, 13 Jan 2025 14:28:01 +0200 Subject: [PATCH 1/7] Add support for Anthropic models (wip -- tool use support has not been added yet) --- .../AnthropicAccount.class/README.md | 1 + .../AnthropicAccount.class/class/cleanUp..st | 7 ++ .../class/defaultAccount..st | 4 + .../class/defaultAccount.st | 4 + .../class/defaultApiKey..st | 4 + .../class/defaultApiKey.st | 5 + .../class/defaultBaseUrl..st | 4 + .../class/defaultBaseUrl.st | 5 + .../class/defaultRateLimitsPerTier.st | 98 +++++++++++++++++++ .../class/isValidUsageTier..st | 5 + .../class/openExpenseWatcher.st | 7 ++ .../class/rateLimitsFromSpec..st | 7 ++ .../instance/apiKey..st | 4 + .../AnthropicAccount.class/instance/apiKey.st | 4 + .../instance/baseUrl..st | 4 + .../instance/baseUrl.st | 4 + .../instance/controlConnectionDuring..st | 6 ++ .../instance/customRateLimits.st | 4 + .../instance/defaultBaseUrl.st | 4 + .../instance/defaultRateLimits.st | 5 + .../instance/defaultUsageTier.st | 4 + .../instance/expensesPerModel.st | 10 ++ .../instance/expensesPerUser.st | 5 + .../instance/hasApiKey.st | 4 + .../instance/ignoreCertificate..st | 5 + .../instance/ignoreCertificate.st | 5 + .../instance/initialize.st | 9 ++ .../instance/noteExpense.forUser.model..st | 8 ++ .../instance/openExpenseWatcher.st | 13 +++ .../rateLimitForModel.type.ifUnknown..st | 6 ++ .../instance/rateLimits.st | 14 +++ .../instance/rateLimitsForModel.ifUnknown..st | 13 +++ .../requestsPerDayForModel.ifUnknown..st | 4 + .../requestsPerMinuteForModel.ifUnknown..st | 4 + .../instance/resetExpenses.st | 4 + .../setCustomRateLimitsForModel.to..st | 7 ++ .../tokensPerDayForModel.ifUnknown..st | 4 + .../tokensPerMinuteForModel.ifUnknown..st | 4 + .../instance/totalExpense.st | 5 + .../instance/totalExpensePerModel.st | 4 + .../instance/totalExpensePerUser.st | 4 + .../instance/usageTier..st | 5 + .../instance/usageTier.st | 5 + .../methodProperties.json | 45 +++++++++ .../AnthropicAccount.class/properties.json | 19 ++++ .../README.md | 1 + .../class/bestName.st | 3 + .../class/cheapestName.st | 3 + .../class/claude35Haiku20241022Name.st | 3 + .../class/claude35HaikuLatestName.st | 3 + .../class/claude35Sonnet20240620Name.st | 3 + .../class/claude35Sonnet20241022Name.st | 3 + .../class/claude35SonnetLatestName.st | 3 + .../class/claude3Haiku20240307Name.st | 3 + .../class/claude3Opus20240229Name.st | 3 + .../class/claude3OpusLatestName.st | 3 + .../class/claude3Sonnet20240229Name.st | 3 + .../class/defaultModelNames.st | 18 ++++ .../class/initialize.st | 12 +++ .../class/unload.st | 4 + .../instance/addToolSpec.toInput..st | 20 ++++ .../instance/assignExpense.toMessages..st | 16 +++ .../instance/centsPerCompletionToken.st | 22 +++++ .../instance/centsPerPromptToken.st | 22 +++++ .../instance/countTokensInConversation..st | 54 ++++++++++ .../instance/countTokensInMessage..st | 7 ++ ...okensInMessage.hasMultipleToolMessages..st | 59 +++++++++++ .../instance/countTokensInToolSpec..st | 9 ++ .../instance/defaultConfig.st | 4 + .../instance/defaultName.st | 4 + .../instance/expenseForReplies.after..st | 12 +++ .../instance/expenseForUsage..st | 17 ++++ .../instance/getAnswerFor..st | 4 + .../instance/getAnswerFor.config..st | 5 + .../instance/getAnswers.for..st | 4 + .../instance/getAnswers.for.config..st | 50 ++++++++++ ...for.config.logRawOutput.deferStreaming..st | 34 +++++++ ...ngReplies.for.from.config.logRawOutput..st | 61 ++++++++++++ .../instance/isLegacy.st | 6 ++ .../instance/maxCompletionTokens.st | 11 +++ .../instance/maxPromptTokens.st | 4 + .../instance/maxTokens.st | 9 ++ .../instance/maximumPriceFor..st | 7 ++ .../instance/maximumPriceFor.answers..st | 8 ++ .../instance/minimumPriceFor..st | 7 ++ .../instance/minimumPriceFor.answers..st | 8 ++ .../instance/nameForRateLimits.st | 5 + .../parseMessageFrom.for.logRawOutput..st | 25 +++++ .../parseMessagesFrom.for.logRawOutput..st | 21 ++++ .../parseStreamedChunk.toolSpec.addTo..st | 27 +++++ ...reamedToolCallChunkFrom.toolSpec.addTo..st | 15 +++ ...medToolCallChunksFrom.toolSpec.message..st | 13 +++ .../instance/parseTokenProbabilitiesFrom..st | 4 + .../instance/parseToolCallFrom.toolSpec..st | 15 +++ .../instance/parseToolCallsFrom.toolSpec..st | 5 + .../instance/pathToEndpoint.st | 4 + .../instance/priceFor..st | 5 + .../instance/priceFor.answers..st | 5 + .../priceFor.answers.completionSize..st | 16 +++ .../instance/priceFor.completionMessage..st | 13 +++ .../instance/priceFor.completionSize..st | 8 ++ .../instance/priceForPrompt..st | 6 ++ .../instance/printFunction.on..st | 23 +++++ .../printFunctionCallArguments.on..st | 12 +++ .../printFunctionParameterSchema.on..st | 7 ++ ...printFunctionParameterSchema.on.indent..st | 22 +++++ .../printFunctionParameterType.on.indent..st | 50 ++++++++++ .../instance/printOn..st | 16 +++ .../instance/printToolSpec.on..st | 12 +++ .../instance/releaseDate.st | 21 ++++ .../instance/resolvedName.st | 6 ++ .../instance/shouldStreamRequests.st | 5 + .../instance/sortKey.st | 11 +++ .../instance/streamEventDataFrom..st | 46 +++++++++ .../methodProperties.json | 71 ++++++++++++++ .../properties.json | 14 +++ .../AnthropicModel.class/README.md | 1 + .../class/updateModels.st | 8 ++ .../AnthropicModel.class/instance/account..st | 4 + .../AnthropicModel.class/instance/account.st | 4 + .../instance/asStringOrText.st | 10 ++ .../instance/assureAvailableOr..st | 12 +++ .../instance/baseConfig..st | 4 + .../instance/baseConfig.st | 4 + .../AnthropicModel.class/instance/baseUrl.st | 4 + .../instance/countTokensIn..st | 6 ++ .../instance/defaultAccount.st | 4 + .../instance/defaultConfig.st | 4 + .../instance/defaultName.st | 4 + .../instance/expenseForUsage..st | 4 + .../handleTransientServerErrorsDuring..st | 17 ++++ .../instance/initialize.st | 8 ++ .../invokeWithConfig.documents.editInput..st | 8 ++ ...fig.documents.editInput.handleResponse..st | 73 ++++++++++++++ .../instance/invokeWithConfig.editInput..st | 7 ++ ...okeWithConfig.editInput.handleResponse..st | 8 ++ .../AnthropicModel.class/instance/isLegacy.st | 4 + .../instance/maxCharactersInTokens..st | 4 + .../instance/maxTokensInStringOfSize..st | 5 + .../AnthropicModel.class/instance/name..st | 4 + .../AnthropicModel.class/instance/name.st | 4 + .../instance/nameForRateLimits.st | 4 + .../instance/pathToEndpoint.st | 4 + .../AnthropicModel.class/instance/printOn..st | 4 + .../instance/requestsPerMinute.st | 4 + .../instance/tokensPerMinute.st | 4 + .../AnthropicModel.class/instance/url.st | 4 + .../methodProperties.json | 33 +++++++ .../AnthropicModel.class/properties.json | 16 +++ .../instance/addModelItemsToWindowMenu..st | 5 + .../methodProperties.json | 2 +- .../instance/asOpenAIObject.st | 2 +- .../instance/basicContent.st | 29 +++++- .../methodProperties.json | 4 +- .../monticello.meta/categories.st | 3 +- 155 files changed, 1758 insertions(+), 6 deletions(-) create mode 100644 packages/SemanticText.package/AnthropicAccount.class/README.md create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/cleanUp..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/isValidUsageTier..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/openExpenseWatcher.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/class/rateLimitsFromSpec..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/apiKey..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/apiKey.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/controlConnectionDuring..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/customRateLimits.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/defaultBaseUrl.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/defaultUsageTier.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerModel.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerUser.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/hasApiKey.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/initialize.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/noteExpense.forUser.model..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/openExpenseWatcher.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitForModel.type.ifUnknown..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/rateLimits.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitsForModel.ifUnknown..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerDayForModel.ifUnknown..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerMinuteForModel.ifUnknown..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/resetExpenses.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/setCustomRateLimitsForModel.to..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerDayForModel.ifUnknown..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerMinuteForModel.ifUnknown..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/totalExpense.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerModel.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerUser.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/usageTier..st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/instance/usageTier.st create mode 100644 packages/SemanticText.package/AnthropicAccount.class/methodProperties.json create mode 100644 packages/SemanticText.package/AnthropicAccount.class/properties.json create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/README.md create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/bestName.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/cheapestName.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Haiku20241022Name.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/claude35HaikuLatestName.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20240620Name.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20241022Name.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/claude35SonnetLatestName.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Haiku20240307Name.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Opus20240229Name.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/claude3OpusLatestName.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Sonnet20240229Name.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/defaultModelNames.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/initialize.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/class/unload.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/assignExpense.toMessages..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerCompletionToken.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerPromptToken.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInConversation..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage.hasMultipleToolMessages..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInToolSpec..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultConfig.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultName.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForReplies.after..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForUsage..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor.config..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/handleAsyncReplies.for.config.logRawOutput.deferStreaming..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/isLegacy.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/maxPromptTokens.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/maxTokens.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor.answers..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunkFrom.toolSpec.addTo..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunksFrom.toolSpec.message..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseTokenProbabilitiesFrom..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallsFrom.toolSpec..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/pathToEndpoint.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionMessage..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionSize..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/priceForPrompt..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunction.on..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionCallArguments.on..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on.indent..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterType.on.indent..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/printOn..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/printToolSpec.on..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/releaseDate.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/resolvedName.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/shouldStreamRequests.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/sortKey.st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json create mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/properties.json create mode 100644 packages/SemanticText.package/AnthropicModel.class/README.md create mode 100644 packages/SemanticText.package/AnthropicModel.class/class/updateModels.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/account..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/account.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/asStringOrText.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/assureAvailableOr..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/baseConfig..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/baseConfig.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/baseUrl.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/countTokensIn..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/defaultAccount.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/defaultConfig.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/defaultName.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/expenseForUsage..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/handleTransientServerErrorsDuring..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/initialize.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput.handleResponse..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/isLegacy.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/maxCharactersInTokens..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/maxTokensInStringOfSize..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/name..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/name.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/nameForRateLimits.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/pathToEndpoint.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/printOn..st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/requestsPerMinute.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/tokensPerMinute.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/instance/url.st create mode 100644 packages/SemanticText.package/AnthropicModel.class/methodProperties.json create mode 100644 packages/SemanticText.package/AnthropicModel.class/properties.json diff --git a/packages/SemanticText.package/AnthropicAccount.class/README.md b/packages/SemanticText.package/AnthropicAccount.class/README.md new file mode 100644 index 0000000..0b5945b --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/README.md @@ -0,0 +1 @@ +I represent an account for the Anthropic Platform (https://console.anthropic.com). I hold information about the subscription state, account-specific rate limits, and track expenses from API calls. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/cleanUp..st b/packages/SemanticText.package/AnthropicAccount.class/class/cleanUp..st new file mode 100644 index 0000000..8ba8c51 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/cleanUp..st @@ -0,0 +1,7 @@ +initialize-release +cleanUp: aggressive + + aggressive ifTrue: [ + self allSubInstancesDo: [:account | + account apiKey: nil]. + self defaultAccount: nil]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount..st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount..st new file mode 100644 index 0000000..08da088 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount..st @@ -0,0 +1,4 @@ +accessing +defaultAccount: anAccount + + DefaultAccount := anAccount. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount.st new file mode 100644 index 0000000..95ab19d --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount.st @@ -0,0 +1,4 @@ +accessing +defaultAccount + + ^ DefaultAccount ifNil: [DefaultAccount := self new] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey..st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey..st new file mode 100644 index 0000000..6d25afb --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey..st @@ -0,0 +1,4 @@ +preferences +defaultApiKey: aString + + self defaultAccount apiKey: (aString ifEmpty: []). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey.st new file mode 100644 index 0000000..2306aed --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey.st @@ -0,0 +1,5 @@ +preferences +defaultApiKey + + + ^ (DefaultAccount ifNotNil: [:account | account apiKey]) ifNil: [''] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl..st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl..st new file mode 100644 index 0000000..802099e --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl..st @@ -0,0 +1,4 @@ +preferences +defaultBaseUrl: aString + + self defaultAccount baseUrl: (aString ifEmpty: []). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl.st new file mode 100644 index 0000000..9b0e31c --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl.st @@ -0,0 +1,5 @@ +preferences +defaultBaseUrl + + + ^ (DefaultAccount ifNotNil: [:account | account baseUrl]) ifNil: [''] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st new file mode 100644 index 0000000..e5421d0 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st @@ -0,0 +1,98 @@ +constants +defaultRateLimitsPerTier + "See: https://platform.openai.com/docs/guides/rate-limits/usage-tiers" + + | flat | + self flag: #modelConstants. + flat := Dictionary new + at: #free put: + (Dictionary new + at: 'gpt-3.5-turbo' put: #(3 200 40000 nil 200000); + at: 'text-embedding-3-large' put: #(3000 200 1000000 nil 3000000); + at: 'text-embedding-3-small' put: #(3000 200 1000000 nil 3000000); + at: 'text-embedding-ada-002' put: #(3000 200 1000000 nil 3000000); + at: 'whisper-1' put: #(3 200 nil nil nil); + at: 'tts-1' put: #(3 200 nil nil nil); + yourself); + at: 1 put: + (Dictionary new + at: 'gpt-4o' put: #(500 nil 30000 nil 90000); + at: 'gpt-4o-mini' put: #(500 10000 200000 nil 2000000); + at: 'gpt-4-turbo' put: #(500 nil 30000 nil 90000); + at: 'gpt-4' put: #(500 10000 10000 nil 100000); + at: 'gpt-3.5-turbo' put: #(3500 10000 200000 nil 2000000); + at: 'text-embedding-3-large' put: #(3000 nil 1000000 nil 3000000); + at: 'text-embedding-3-small' put: #(3000 nil 1000000 nil 3000000); + at: 'text-embedding-ada-002' put: #(3000 nil 1000000 nil 3000000); + at: 'whisper-1' put: #(500 nil nil nil nil); + at: 'tts-1' put: #(500 nil nil nil nil); + at: 'tts-1-hd' put: #(500 nil nil nil nil); + yourself); + at: 2 put: + (Dictionary new + at: 'gpt-4o' put: #(5000 nil 450000 nil 1350000); + at: 'gpt-4o-mini' put: #(5000 nil 2000000 nil 20000000); + at: 'gpt-4-turbo' put: #(5000 nil 450000 nil 1350000); + at: 'gpt-4' put: #(5000 nil 40000 nil 200000); + at: 'gpt-3.5-turbo' put: #(3500 nil 2000000 nil 5000000); + at: 'text-embedding-3-large' put: #(5000 nil 1000000 nil 20000000); + at: 'text-embedding-3-small' put: #(5000 nil 1000000 nil 20000000); + at: 'text-embedding-ada-002' put: #(5000 nil 1000000 nil 20000000); + at: 'whisper-1' put: #(2500 nil nil nil nil); + at: 'tts-1' put: #(2500 nil nil nil nil); + at: 'tts-1-hd' put: #(2500 nil nil nil nil); + yourself); + at: 3 put: + (Dictionary new + at: 'gpt-4o' put: #(5000 nil 800000 nil 50000000); + at: 'gpt-4o-mini' put: #(5000 nil 4000000 nil 40000000); + at: 'gpt-4-turbo' put: #(5000 nil 600000 nil 40000000); + at: 'gpt-4' put: #(5000 nil 80000 nil 5000000); + at: 'gpt-3.5-turbo' put: #(3500 nil 4000000 nil 100000000); + at: 'text-embedding-3-large' put: #(5000 nil 5000000 nil 100000000); + at: 'text-embedding-3-small' put: #(5000 nil 5000000 nil 100000000); + at: 'text-embedding-ada-002' put: #(5000 nil 5000000 nil 100000000); + at: 'whisper-1' put: #(5000 nil nil nil nil); + at: 'tts-1' put: #(5000 nil nil nil nil); + at: 'tts-1-hd' put: #(5000 nil nil nil nil); + yourself); + at: 4 put: + (Dictionary new + at: 'gpt-4o' put: #(10000 nil 2000000 nil 200000000); + at: 'gpt-4o-mini' put: #(10000 nil 10000000 nil 1000000000); + at: 'gpt-4-turbo' put: #(10000 nil 800000 nil 80000000); + at: 'gpt-4' put: #(10000 nil 300000 nil 30000000); + at: 'gpt-3.5-turbo' put: #(10000 nil 10000000 nil 1000000000); + at: 'text-embedding-3-large' put: #(10000 nil 5000000 nil 500000000); + at: 'text-embedding-3-small' put: #(10000 nil 5000000 nil 500000000); + at: 'text-embedding-ada-002' put: #(10000 nil 5000000 nil 500000000); + at: 'whisper-1' put: #(7500 nil nil nil nil); + at: 'tts-1' put: #(7500 nil nil nil nil); + at: 'tts-1-hd' put: #(7500 nil nil nil nil); + yourself); + at: 5 put: + (Dictionary new + at: 'gpt-4o' put: #(10000 nil 30000000 nil 5000000000); + at: 'gpt-4o-mini' put: #(30000 nil 150000000 nil 15000000000); + at: 'gpt-4-turbo' put: #(10000 nil 2000000 nil 300000000); + at: 'gpt-4' put: #(10000 nil 1000000 nil 150000000); + at: 'gpt-3.5-turbo' put: #(10000 nil 50000000 nil 10000000000); + at: 'text-embedding-3-large' put: #(10000 nil 10000000 nil 4000000000); + at: 'text-embedding-3-small' put: #(10000 nil 10000000 nil 4000000000); + at: 'text-embedding-ada-002' put: #(10000 nil 10000000 nil 4000000000); + at: 'whisper-1' put: #(10000 nil nil nil nil); + at: 'tts-1' put: #(10000 nil nil nil nil); + at: 'tts-1-hd' put: #(10000 nil nil nil nil); + yourself); + yourself. + ^ flat collect: [:modelRateLimits | + modelRateLimits collect: [:rateLimits | + (self rateLimitsFromSpec: rateLimits) + collect: [:limit | limit ifNil: [Float infinity]]]] + +"update and extend these constants based on the pasted tables below. do not change the formatting of the source code unless required (also keep the tab indentations) +the task is: output a new version of defaultRateLimitsPerTier based on the numbers provided in the tables. I'm sure you can find out the format on the numbers in the method yourself based on the tables. e.g., 3 rpm for gpt-3.5-turbo in tier free and so on. most numbers may not have changed, but some may have been added, changed, or removed in the screenshots, and I would like you to create a new version of the method that contains all numbers from the tables in the same format, and no other numbers that are not present in the tables. +exclude the dall-e models. + +the tables: +" \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/isValidUsageTier..st b/packages/SemanticText.package/AnthropicAccount.class/class/isValidUsageTier..st new file mode 100644 index 0000000..b4775ac --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/isValidUsageTier..st @@ -0,0 +1,5 @@ +constants +isValidUsageTier: tier + + ^ tier = #free or: + [tier isInteger and: [tier strictlyPositive]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/openExpenseWatcher.st b/packages/SemanticText.package/AnthropicAccount.class/class/openExpenseWatcher.st new file mode 100644 index 0000000..c067965 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/openExpenseWatcher.st @@ -0,0 +1,7 @@ +support +openExpenseWatcher + " + AnthropicAccount openExpenseWatcher + " + + ^ self defaultAccount openExpenseWatcher \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/rateLimitsFromSpec..st b/packages/SemanticText.package/AnthropicAccount.class/class/rateLimitsFromSpec..st new file mode 100644 index 0000000..a247865 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/rateLimitsFromSpec..st @@ -0,0 +1,7 @@ +support +rateLimitsFromSpec: spec + "{requestsPerMinute. requestsPerDay. tokensPerMinute. tokensPerDay. batchQueueLimit}" + + ^ (#(rpm rpd tpm tpd batchQueueLimit) with: spec collect: [:key :limit | + key -> limit]) + as: Dictionary \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey..st b/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey..st new file mode 100644 index 0000000..49d303f --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey..st @@ -0,0 +1,4 @@ +accessing +apiKey: aString + + apiKey := aString. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey.st b/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey.st new file mode 100644 index 0000000..f7400f9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey.st @@ -0,0 +1,4 @@ +accessing +apiKey + + ^ apiKey \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl..st b/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl..st new file mode 100644 index 0000000..12be3db --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl..st @@ -0,0 +1,4 @@ +accessing +baseUrl: aStringOrNil + + baseUrl := aStringOrNil. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl.st b/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl.st new file mode 100644 index 0000000..e9c03f0 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl.st @@ -0,0 +1,4 @@ +accessing +baseUrl + + ^ baseUrl ifNil: [self defaultBaseUrl] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/controlConnectionDuring..st b/packages/SemanticText.package/AnthropicAccount.class/instance/controlConnectionDuring..st new file mode 100644 index 0000000..1cd49ec --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/controlConnectionDuring..st @@ -0,0 +1,6 @@ +connection +controlConnectionDuring: aBlock + + self ignoreCertificate ifFalse: [^ aBlock value]. + + ^ aBlock on: SqueakSSLCertificateError do: [:ex | ex resume] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/customRateLimits.st b/packages/SemanticText.package/AnthropicAccount.class/instance/customRateLimits.st new file mode 100644 index 0000000..9f31706 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/customRateLimits.st @@ -0,0 +1,4 @@ +accessing - rate limits +customRateLimits + + ^ customRateLimits \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/defaultBaseUrl.st b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultBaseUrl.st new file mode 100644 index 0000000..48f2125 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultBaseUrl.st @@ -0,0 +1,4 @@ +initialize-release +defaultBaseUrl + + ^ 'https://api.anthropic.com' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st new file mode 100644 index 0000000..65d08f6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st @@ -0,0 +1,5 @@ +accessing - rate limits +defaultRateLimits + + ^ self class defaultRateLimitsPerTier at: + (self usageTier ifNil: [^ Dictionary new]) \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/defaultUsageTier.st b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultUsageTier.st new file mode 100644 index 0000000..bd058b1 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultUsageTier.st @@ -0,0 +1,4 @@ +initialize-release +defaultUsageTier + + ^ #free \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerModel.st b/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerModel.st new file mode 100644 index 0000000..b1ce7f5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerModel.st @@ -0,0 +1,10 @@ +accessing - expenses +expensesPerModel + + | expensesPerModel | + expensesPerModel := Dictionary new. + self expensesPerUser keysAndValuesDo: [:user :expensePerModel | + expensePerModel keysAndValuesDo: [:model :expense | + (expensesPerModel at: model ifAbsentPut: [Dictionary new]) + at: user put: expense]]. + ^ expensesPerModel \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerUser.st b/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerUser.st new file mode 100644 index 0000000..4237bea --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerUser.st @@ -0,0 +1,5 @@ +accessing - expenses +expensesPerUser + "See https://openai.com/pricing for current prices and https://platform.openai.com/account/usage for your current usage." + + ^ expensesPerUser \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/hasApiKey.st b/packages/SemanticText.package/AnthropicAccount.class/instance/hasApiKey.st new file mode 100644 index 0000000..10f8203 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/hasApiKey.st @@ -0,0 +1,4 @@ +testing +hasApiKey + + ^ self apiKey notNil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate..st b/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate..st new file mode 100644 index 0000000..4b2b32e --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate..st @@ -0,0 +1,5 @@ +accessing +ignoreCertificate: aBoolean + "If set to true, SSL certificates for the baseUrl will be ignored. WARNING: While this still ensures privacy (encryption), this opens the door to man-in-the-middle attacks (i.e., you cannot be sure that the remote host is what he say he is.)! Thus, use with caution." + + ignoreCertificate := aBoolean. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate.st b/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate.st new file mode 100644 index 0000000..886a039 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate.st @@ -0,0 +1,5 @@ +accessing +ignoreCertificate + "If set to true, SSL certificates for the baseUrl will be ignored. WARNING: While this still ensures privacy (encryption), this opens the door to man-in-the-middle attacks (i.e., you cannot be sure that the remote host is what he say he is.)! Thus, use with caution." + + ^ ignoreCertificate \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/initialize.st b/packages/SemanticText.package/AnthropicAccount.class/instance/initialize.st new file mode 100644 index 0000000..a3dd161 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/initialize.st @@ -0,0 +1,9 @@ +initialize-release +initialize + + super initialize. + + ignoreCertificate := false. + usageTier := self defaultUsageTier. + customRateLimits := Dictionary new. + self resetExpenses. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/noteExpense.forUser.model..st b/packages/SemanticText.package/AnthropicAccount.class/instance/noteExpense.forUser.model..st new file mode 100644 index 0000000..9abf757 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/noteExpense.forUser.model..st @@ -0,0 +1,8 @@ +accessing - expenses +noteExpense: expense forUser: userName model: modelName + + | expensesPerModel | + expensesPerModel := expensesPerUser at: userName ifAbsentPut: [Dictionary new]. + expensesPerModel at: modelName put: + (expensesPerModel at: modelName ifAbsent: [OpenAIAmount zero]) + + expense. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/openExpenseWatcher.st b/packages/SemanticText.package/AnthropicAccount.class/instance/openExpenseWatcher.st new file mode 100644 index 0000000..9bc81bd --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/openExpenseWatcher.st @@ -0,0 +1,13 @@ +ui +openExpenseWatcher + "For the actual usage without estimations, see: https://platform.openai.com/account/usage" + + | field inspector | + inspector := Inspector on: self. + field := inspector newCustomField valueGetter: [:account | account totalExpense]. + inspector addCustomField: field. + field rememberInspector. + ^ (World dropInspectorField: field event: self currentEvent) + in: [:answer | + answer = World ifFalse: "Morphic-ct.2143" + [answer openAsTool]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitForModel.type.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitForModel.type.ifUnknown..st new file mode 100644 index 0000000..557af84 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitForModel.type.ifUnknown..st @@ -0,0 +1,6 @@ +accessing - rate limits +rateLimitForModel: modelName type: type ifUnknown: aBlock + + | rateLimit | + rateLimit := self rateLimitsForModel: modelName ifUnknown: [^ aBlock value]. + ^ rateLimit at: type ifAbsent: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimits.st b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimits.st new file mode 100644 index 0000000..12858de --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimits.st @@ -0,0 +1,14 @@ +accessing - rate limits +rateLimits + + | rateLimits | + rateLimits := self defaultRateLimits copy. + "merge" + self customRateLimits keysAndValuesDo: [:modelName :modelLimit | + rateLimits at: modelName put: + (rateLimits + at: modelName + ifPresent: [:defaultModelLimit | + defaultModelLimit , modelLimit] + ifAbsent: [modelLimit])]. + ^ rateLimits \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitsForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitsForModel.ifUnknown..st new file mode 100644 index 0000000..f88d738 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitsForModel.ifUnknown..st @@ -0,0 +1,13 @@ +accessing - rate limits +rateLimitsForModel: modelName ifUnknown: aBlock + "See: https://platform.openai.com/account/rate-limits. Hypothetically we could scrape this, in practice, users may enter relevant rate limits manually." + + ^ self rateLimits at: modelName ifAbsent: + ["search for rate limits for more general model name (prefix, e.g., without version number/context size)" + | parts | + parts := modelName ifNotEmpty: [modelName splitBy: '-']. + parts ifNotEmpty: + [^ self + rateLimitsForModel: (parts allButLast joinSeparatedBy: '-') + ifUnknown: aBlock]. + aBlock value] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerDayForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerDayForModel.ifUnknown..st new file mode 100644 index 0000000..65d8701 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerDayForModel.ifUnknown..st @@ -0,0 +1,4 @@ +accessing - rate limits +requestsPerDayForModel: modelName ifUnknown: aBlock + + ^ self rateLimitForModel: modelName type: #rpd ifUnknown: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerMinuteForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerMinuteForModel.ifUnknown..st new file mode 100644 index 0000000..c20c563 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerMinuteForModel.ifUnknown..st @@ -0,0 +1,4 @@ +accessing - rate limits +requestsPerMinuteForModel: modelName ifUnknown: aBlock + + ^ self rateLimitForModel: modelName type: #rpm ifUnknown: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/resetExpenses.st b/packages/SemanticText.package/AnthropicAccount.class/instance/resetExpenses.st new file mode 100644 index 0000000..08050c8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/resetExpenses.st @@ -0,0 +1,4 @@ +accessing - expenses +resetExpenses + + expensesPerUser := Dictionary new. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/setCustomRateLimitsForModel.to..st b/packages/SemanticText.package/AnthropicAccount.class/instance/setCustomRateLimitsForModel.to..st new file mode 100644 index 0000000..399a414 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/setCustomRateLimitsForModel.to..st @@ -0,0 +1,7 @@ +accessing - rate limits +setCustomRateLimitsForModel: modelName to: rateLimitsSpec + "rateLimitsSpec: {requestsPerMinute. requestsPerDay. tokensPerMinute. tokensPerDay. batchQueueLimit} + See: https://platform.openai.com/account/limits. Hypothetically we could scrape this, in practice, users may enter relevant rate limits manually." + + self customRateLimits at: modelName put: + (self class rateLimitsFromSpec: rateLimitsSpec). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerDayForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerDayForModel.ifUnknown..st new file mode 100644 index 0000000..aeb6ea9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerDayForModel.ifUnknown..st @@ -0,0 +1,4 @@ +accessing - rate limits +tokensPerDayForModel: modelName ifUnknown: aBlock + + ^ self rateLimitForModel: modelName type: #tpd ifUnknown: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerMinuteForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerMinuteForModel.ifUnknown..st new file mode 100644 index 0000000..1ff707f --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerMinuteForModel.ifUnknown..st @@ -0,0 +1,4 @@ +accessing - rate limits +tokensPerMinuteForModel: modelName ifUnknown: aBlock + + ^ self rateLimitForModel: modelName type: #tpm ifUnknown: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpense.st b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpense.st new file mode 100644 index 0000000..68de177 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpense.st @@ -0,0 +1,5 @@ +accessing - expenses +totalExpense + + ^ self expensesPerUser inject: OpenAIAmount zero into: [:sum :expenses | + expenses inject: OpenAIAmount zero into: [:userSum :expense | userSum + expense]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerModel.st b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerModel.st new file mode 100644 index 0000000..61d03e5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerModel.st @@ -0,0 +1,4 @@ +accessing - expenses +totalExpensePerModel + + ^ self expensesPerModel collect: [:expenses | expenses sum] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerUser.st b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerUser.st new file mode 100644 index 0000000..1fb0428 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerUser.st @@ -0,0 +1,4 @@ +accessing - expenses +totalExpensePerUser + + ^ self expensesPerUser collect: [:expenses | expenses sum] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier..st b/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier..st new file mode 100644 index 0000000..43e4c3c --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier..st @@ -0,0 +1,5 @@ +accessing +usageTier: tier + "Rate and usage limits depend on the usage tier of your account, which is assigned by based on the duration of your subscription and the magnitude of your payments. Must be #free or a positive integer. You can find your current tier here: https://platform.openai.com/account/limits" + + usageTier := tier. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier.st b/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier.st new file mode 100644 index 0000000..1b30a9d --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier.st @@ -0,0 +1,5 @@ +accessing +usageTier + "Rate and usage limits depend on the usage tier of your account, which is assigned by based on the duration of your subscription and the magnitude of your payments. Must be #free or a positive integer. You can find your current tier here: https://platform.openai.com/account/limits" + + ^ usageTier \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json b/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json new file mode 100644 index 0000000..3ac2ef9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json @@ -0,0 +1,45 @@ +{ + "class" : { + "cleanUp:" : "ct 8/20/2023 12:56", + "defaultAccount" : "ct 8/20/2023 12:52", + "defaultAccount:" : "ct 8/20/2023 12:52", + "defaultApiKey" : "zakkor 1/10/2025 12:49", + "defaultApiKey:" : "ct 10/15/2023 22:20", + "defaultBaseUrl" : "zakkor 1/10/2025 12:49", + "defaultBaseUrl:" : "ct 12/1/2023 23:30", + "defaultRateLimitsPerTier" : "ct 11/8/2024 00:23", + "isValidUsageTier:" : "ct 11/28/2023 12:39", + "openExpenseWatcher" : "zakkor 1/10/2025 12:50", + "rateLimitsFromSpec:" : "ct 6/20/2024 00:00" }, + "instance" : { + "apiKey" : "ct 8/20/2023 12:55", + "apiKey:" : "ct 8/20/2023 12:55", + "baseUrl" : "ct 12/1/2023 23:29", + "baseUrl:" : "ct 12/1/2023 23:31", + "controlConnectionDuring:" : "ct 12/11/2023 18:35", + "customRateLimits" : "ct 11/28/2023 13:27", + "defaultBaseUrl" : "zakkor 1/10/2025 12:54", + "defaultRateLimits" : "ct 11/28/2023 13:16", + "defaultUsageTier" : "ct 11/28/2023 14:54", + "expensesPerModel" : "ct 8/20/2023 20:15", + "expensesPerUser" : "ct 8/27/2023 20:57", + "hasApiKey" : "ct 10/15/2023 21:44", + "ignoreCertificate" : "ct 12/11/2023 18:44", + "ignoreCertificate:" : "ct 12/11/2023 18:44", + "initialize" : "ct 12/11/2023 18:33", + "noteExpense:forUser:model:" : "ct 8/20/2023 19:11", + "openExpenseWatcher" : "ct 11/26/2023 21:56", + "rateLimitForModel:type:ifUnknown:" : "ct 11/28/2023 18:46", + "rateLimits" : "ct 2/21/2024 15:12", + "rateLimitsForModel:ifUnknown:" : "ct 2/21/2024 15:12", + "requestsPerDayForModel:ifUnknown:" : "ct 11/28/2023 18:46", + "requestsPerMinuteForModel:ifUnknown:" : "ct 11/28/2023 13:33", + "resetExpenses" : "ct 8/20/2023 21:04", + "setCustomRateLimitsForModel:to:" : "ct 4/27/2024 22:13", + "tokensPerDayForModel:ifUnknown:" : "ct 11/28/2023 13:33", + "tokensPerMinuteForModel:ifUnknown:" : "ct 11/28/2023 13:33", + "totalExpense" : "ct 8/20/2023 20:17", + "totalExpensePerModel" : "ct 8/20/2023 19:03", + "totalExpensePerUser" : "ct 8/20/2023 18:52", + "usageTier" : "ct 11/28/2023 14:56", + "usageTier:" : "ct 11/28/2023 14:56" } } diff --git a/packages/SemanticText.package/AnthropicAccount.class/properties.json b/packages/SemanticText.package/AnthropicAccount.class/properties.json new file mode 100644 index 0000000..0c114c8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/properties.json @@ -0,0 +1,19 @@ +{ + "category" : "SemanticText-Providers-Anthropic", + "classinstvars" : [ + ], + "classvars" : [ + "DefaultAccount" ], + "commentStamp" : "Ed 1/10/2025 12:45", + "instvars" : [ + "baseUrl", + "apiKey", + "ignoreCertificate", + "usageTier", + "customRateLimits", + "expensesPerUser" ], + "name" : "AnthropicAccount", + "pools" : [ + ], + "super" : "Object", + "type" : "normal" } diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/README.md b/packages/SemanticText.package/AnthropicConversationModel.class/README.md new file mode 100644 index 0000000..9259a15 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/README.md @@ -0,0 +1 @@ +I generate answers for a SemanticConversation using a large language model (LLM) such as GPT (Generative Pre-Trained Transformer) from the OpenAI API. Colloquially also referred to as ChatGPT, which however is a different service with additional features. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/bestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/bestName.st new file mode 100644 index 0000000..72d296f --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/bestName.st @@ -0,0 +1,3 @@ +constants +bestName + ^ self claude35SonnetLatestName \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/cheapestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/cheapestName.st new file mode 100644 index 0000000..5ffad18 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/cheapestName.st @@ -0,0 +1,3 @@ +constants +cheapestName + ^ self claude35HaikuLatestName \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Haiku20241022Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Haiku20241022Name.st new file mode 100644 index 0000000..89e4b2e --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Haiku20241022Name.st @@ -0,0 +1,3 @@ +constants +claude35Haiku20241022Name + ^ 'claude-3-5-haiku-20241022' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35HaikuLatestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35HaikuLatestName.st new file mode 100644 index 0000000..3fef37b --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35HaikuLatestName.st @@ -0,0 +1,3 @@ +constants +claude35HaikuLatestName + ^ 'claude-3-5-haiku-latest' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20240620Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20240620Name.st new file mode 100644 index 0000000..c746d8f --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20240620Name.st @@ -0,0 +1,3 @@ +constants +claude35Sonnet20240620Name + ^ 'claude-3-5-sonnet-20240620' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20241022Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20241022Name.st new file mode 100644 index 0000000..a7e2933 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20241022Name.st @@ -0,0 +1,3 @@ +constants +claude35Sonnet20241022Name + ^ 'claude-3-5-sonnet-20241022' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35SonnetLatestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35SonnetLatestName.st new file mode 100644 index 0000000..14b84e6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35SonnetLatestName.st @@ -0,0 +1,3 @@ +constants +claude35SonnetLatestName + ^ 'claude-3-5-sonnet-latest' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Haiku20240307Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Haiku20240307Name.st new file mode 100644 index 0000000..c0e7afc --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Haiku20240307Name.st @@ -0,0 +1,3 @@ +constants +claude3Haiku20240307Name + ^ 'claude-3-haiku-20240307' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Opus20240229Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Opus20240229Name.st new file mode 100644 index 0000000..4d9a6fb --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Opus20240229Name.st @@ -0,0 +1,3 @@ +constants +claude3Opus20240229Name + ^ 'claude-3-opus-20240229' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3OpusLatestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3OpusLatestName.st new file mode 100644 index 0000000..99fb8d3 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3OpusLatestName.st @@ -0,0 +1,3 @@ +constants +claude3OpusLatestName + ^ 'claude-3-opus-latest' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Sonnet20240229Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Sonnet20240229Name.st new file mode 100644 index 0000000..57a3218 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Sonnet20240229Name.st @@ -0,0 +1,3 @@ +constants +claude3Sonnet20240229Name + ^ 'claude-3-sonnet-20240229' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/defaultModelNames.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/defaultModelNames.st new file mode 100644 index 0000000..711673f --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/defaultModelNames.st @@ -0,0 +1,18 @@ +constants +defaultModelNames + + self flag: #modelConstants. + + "first is default" + ^ {"recommended" + self claude35SonnetLatestName. + self claude35HaikuLatestName. + self claude3OpusLatestName. + + "all other versions (pinned), ordered by version descending, date descending" + self claude35Sonnet20241022Name. + self claude35Sonnet20240620Name. + self claude35Haiku20241022Name. + self claude3Opus20240229Name. + self claude3Sonnet20240229Name. + self claude3Haiku20240307Name} \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/initialize.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/initialize.st new file mode 100644 index 0000000..19cbcd7 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/initialize.st @@ -0,0 +1,12 @@ +initialize-release +initialize + + self defaultModelNames do: [:modelName | + | model | + model := self new name: modelName. + SemanticText registeredConversationModels + detect: [:ea | ea name = model name] + ifNone: + [SemanticText registerConversationModel: model. + SemanticText defaultConversationModelOrNil ifNil: + [SemanticText defaultConversationModel: model]]]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/unload.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/unload.st new file mode 100644 index 0000000..94f056e --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/unload.st @@ -0,0 +1,4 @@ +initialize-release +unload + + SemanticText unregisterConversationModelsOf: self \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st new file mode 100644 index 0000000..00d3281 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st @@ -0,0 +1,20 @@ +private - requests +addToolSpec: aToolSpec toInput: input + + input tools: + (aToolSpec tools asArray collect: [:tool | tool asOpenAIObject]). + + aToolSpec forcedTools ifNotNil: [:forcedTools | + input tool_choice: + (forcedTools = #any + ifTrue: [#required] + ifFalse: + [forcedTools + ifEmpty: [#none] + ifNotEmpty: + [| forcedTool | + forcedTools size > 1 ifTrue: [^ self error: 'cannot force multiple tools']. + forcedTool := forcedTools anyOne. + (forcedTool isString or: [forcedTool isText]) + ifTrue: [forcedTool := aToolSpec toolNamed: forcedTool]. + forcedTool asOpenAIToolChoiceObject]])] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/assignExpense.toMessages..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/assignExpense.toMessages..st new file mode 100644 index 0000000..0a8f69c --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/assignExpense.toMessages..st @@ -0,0 +1,16 @@ +private +assignExpense: expense toMessages: messages + + messages size = 1 + ifTrue: + [messages first expense: expense] + ifFalse: + [| estimatedTokenCounts | + estimatedTokenCounts := messages collect: [:message | + self countTokensInMessage: message]. + messages + with: + (expense asApproximated "because token counts are estimated" + distributeBulkToShares: estimatedTokenCounts) + do: [:message :unitPrice | + message expense: unitPrice]]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerCompletionToken.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerCompletionToken.st new file mode 100644 index 0000000..993823d --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerCompletionToken.st @@ -0,0 +1,22 @@ +accessing +centsPerCompletionToken + "Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic." + + self flag: #modelConstants. + + ^ self resolvedName caseOf: { + "Claude 3.5 Latest" + [self class claude35SonnetLatestName] -> [0.0015s]. "Output: $15/MTok" + [self class claude35HaikuLatestName] -> [0.0004s]. "Output: $4/MTok" + [self class claude3OpusLatestName] -> [0.0075s]. "Output: $75/MTok" + + "Claude 3.5 Pinned Versions" + [self class claude35Sonnet20241022Name] -> [0.0015s]. "Output: $15/MTok" + [self class claude35Sonnet20240620Name] -> [0.0015s]. "Output: $15/MTok" + [self class claude35Haiku20241022Name] -> [0.0004s]. "Output: $4/MTok" + + "Claude 3 Pinned Versions" + [self class claude3Opus20240229Name] -> [0.0075s]. "Output: $75/MTok" + [self class claude3Sonnet20240229Name] -> [0.0015s]. "Output: $15/MTok" + [self class claude3Haiku20240307Name] -> [0.000125s] "Output: $1.25/MTok" + } \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerPromptToken.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerPromptToken.st new file mode 100644 index 0000000..9696c93 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerPromptToken.st @@ -0,0 +1,22 @@ +accessing +centsPerPromptToken + "Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic." + + self flag: #modelConstants. + + ^ self resolvedName caseOf: { + "Claude 3.5 Latest" + [self class claude35SonnetLatestName] -> [0.0003s]. "Input: $3/MTok" + [self class claude35HaikuLatestName] -> [0.00008s]. "Input: $0.80/MTok" + [self class claude3OpusLatestName] -> [0.0015s]. "Input: $15/MTok" + + "Claude 3.5 Pinned Versions" + [self class claude35Sonnet20241022Name] -> [0.0003s]. "Input: $3/MTok" + [self class claude35Sonnet20240620Name] -> [0.0003s]. "Input: $3/MTok" + [self class claude35Haiku20241022Name] -> [0.00008s]. "Input: $0.80/MTok" + + "Claude 3 Pinned Versions" + [self class claude3Opus20240229Name] -> [0.0015s]. "Input: $15/MTok" + [self class claude3Sonnet20240229Name] -> [0.0003s]. "Input: $3/MTok" + [self class claude3Haiku20240307Name] -> [0.000025s] "Input: $0.25/MTok" + } \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInConversation..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInConversation..st new file mode 100644 index 0000000..77e9078 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInConversation..st @@ -0,0 +1,54 @@ +service +countTokensInConversation: aConversation + "Note: (Upper) approximation! Token counts are approximated (see #countTokensIn:), and the precise tokens generated for tool specs, tool calls, and tool messages are unknown and have been reverse-engineered but not been comprehensively tested. + Adopted from https://github.com/forestwanglin/openai-java/blob/main/jtokkit/src/main/java/xyz/felh/openai/jtokkit/utils/ToolContentFormat.java and https://github.com/hmarr/openai-chat-tokens." + + | tools pendingTools toolMessages hasMultipleToolMessages tokens | + tools := aConversation activeToolSpec ifNotNil: [:toolSpec | toolSpec tools]. + pendingTools := tools. + toolMessages := aConversation messages select: [:message | message role = #tool]. + hasMultipleToolMessages := toolMessages size > 1. + tokens := aConversation messages detectSum: [:message | + | compiledContent | + compiledContent := message basicContent ifNil: ['']. + (pendingTools isEmptyOrNil not and: [message role = #system]) ifTrue: + [compiledContent := compiledContent , String cr. + pendingTools := nil]. + self + countTokensInMessage: + (message shallowCopy + content: compiledContent; + yourself) + hasMultipleToolMessages: hasMultipleToolMessages]. + + tools isEmptyOrNil ifFalse: + [tokens := tokens + (self countTokensInToolSpec: aConversation activeToolSpec). + + (aConversation messages anySatisfy: [:message | message role = #system]) + ifTrue: + ["'Tools typically add a system message, but reuse the first one if it's already there. This offsets the extra 9 tokens added by the tool definitions.'" + tokens := tokens - 4]]. + + hasMultipleToolMessages ifTrue: + [| toolMessagesWithContentCount | + tokens := tokens + (toolMessages size * 2 + 1). + (toolMessagesWithContentCount := toolMessages count: [:message | message contentString isEmptyOrNil not]) > 0 ifTrue: + [tokens := tokens + 1 - toolMessagesWithContentCount]]. + + (aConversation activeToolSpec ifNotNil: [:toolSpec | toolSpec forcedTools]) ifNotNil: [:forcedTools | + tokens := tokens + + (forcedTools isCollection + ifTrue: + [forcedTools + ifEmpty: [1] + ifNotEmpty: + [forcedTools detectSum: [:tool | + | toolName | + toolName := (tool isString or: [tool isText]) + ifTrue: [tool asString] + ifFalse: [tool asOpenAIToolChoiceObject function name]. + (self countTokensIn: toolName) + 4]]] + ifFalse: + [1 flag: #assumption "i don't know"])]. + + ^ tokens \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage..st new file mode 100644 index 0000000..682d5e3 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage..st @@ -0,0 +1,7 @@ +private - tokens +countTokensInMessage: aMessage + "Approximation! Adopted from https://github.com/hmarr/openai-chat-tokens." + + ^ self + countTokensInMessage: aMessage + hasMultipleToolMessages: (aMessage conversation messages count: [:ea | ea isToolMessage]) > 1 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage.hasMultipleToolMessages..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage.hasMultipleToolMessages..st new file mode 100644 index 0000000..9632890 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage.hasMultipleToolMessages..st @@ -0,0 +1,59 @@ +private - tokens +countTokensInMessage: aMessage hasMultipleToolMessages: hasMultipleToolMessages + "Approximation! Adopted from https://github.com/hmarr/openai-chat-tokens." + + | compiledContent tokens | + compiledContent := String streamContents: [:stream | + (aMessage isToolMessage and: [hasMultipleToolMessages]) + ifFalse: + [aMessage basicContent ifNotNil: [:content | + stream nextPutAll: content]] + ifTrue: + [[| object | + object := aMessage basicContent parseAsJson. + object isDictionary ifFalse: [self error: 'content does not represent a JSON object']. + self + printFunctionCallArguments: object + on: stream] + on: Error do: + [stream nextPutAll: aMessage basicContent]]. + + "not yet implemented" + "aMessage messageName ifNotNil: [:messageName | + stream nextPutAll: messageName]."]. + + tokens := self countTokensIn: compiledContent. + + "special tokens per input message" + aMessage isToolMessage + ifFalse: [tokens := tokens + 3] + ifTrue: + [| toolContent | + tokens := tokens + 2. + toolContent := [aMessage basicContent parseAsJson] ifError: [nil]. + (hasMultipleToolMessages and: [toolContent isDictionary]) ifTrue: + [tokens := tokens - toolContent keys size]]. + + "not yet implemented" + "(aMessage messageName notNil and: [aMessage isToolMessage not]) ifTrue: + [tokens := tokens + 1]." + aMessage toolCalls ifNotEmpty: [:toolCalls | + tokens := tokens + + (toolCalls detectSum: [:toolCall | + | toolCallTokens | + toolCallTokens := 3. + toolCallTokens := toolCallTokens + (self countTokensIn: toolCall type). + toolCall type = #function ifTrue: + [toolCallTokens := toolCallTokens + ((self countTokensIn: toolCall toolName) * 2). + toolCall arguments ifNotEmpty: [:arguments | + toolCallTokens := toolCallTokens + + (self countTokensIn: + (String streamContents: [:stream | + self printFunctionCallArguments: arguments on: stream]))]]. + toolCallTokens]). + tokens := tokens + + (toolCalls size > 1 + ifTrue: [15 - (toolCalls size * 5 - 6)] + ifFalse: [-2])]. + + ^ tokens \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInToolSpec..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInToolSpec..st new file mode 100644 index 0000000..b6ebfeb --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInToolSpec..st @@ -0,0 +1,9 @@ +private - tokens +countTokensInToolSpec: aToolSpec + "Approximation! Adopted from https://github.com/forestwanglin/openai-java/blob/main/jtokkit/src/main/java/xyz/felh/openai/jtokkit/utils/ToolContentFormat.java and https://github.com/hmarr/openai-chat-tokens." + + | compiledContent | + compiledContent := String streamContents: [:stream | + self printToolSpec: aToolSpec on: stream]. + + ^ (self countTokensIn: compiledContent) + 9 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultConfig.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultConfig.st new file mode 100644 index 0000000..42ab2b1 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultConfig.st @@ -0,0 +1,4 @@ +initialize-release +defaultConfig + + ^ SemanticConversationConfig new \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultName.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultName.st new file mode 100644 index 0000000..afa7acb --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultName.st @@ -0,0 +1,4 @@ +initialize-release +defaultName + + ^ self class defaultModelNames first \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForReplies.after..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForReplies.after..st new file mode 100644 index 0000000..c674801 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForReplies.after..st @@ -0,0 +1,12 @@ +private +expenseForReplies: messages after: promptTokens + + | expenseForPrompt expenseForReplies | + expenseForPrompt := (OpenAIAmount approximateCents: self centsPerPromptToken) + * promptTokens. + expenseForReplies := (OpenAIAmount approximateCents: self centsPerCompletionToken) + * (messages detectSum: [:message | + message basicContent + ifNil: [0] + ifNotNil: [:content | self countTokensIn: content]]). + ^ expenseForPrompt + expenseForReplies \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForUsage..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForUsage..st new file mode 100644 index 0000000..a4eb5b6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForUsage..st @@ -0,0 +1,17 @@ +private +expenseForUsage: usage + + | inputTokens outputTokens cacheCreationInputTokens cacheReadInputTokens | + inputTokens := (usage at: 'input_tokens') ifNil: 0. + outputTokens := (usage at: 'output_tokens') ifNil: 0. + cacheCreationInputTokens := (usage at: 'cache_creation_input_tokens') ifNil: 0. + cacheReadInputTokens := (usage at: 'cache_read_input_tokens') ifNil: 0. + + "- Cache write tokens are 25% more expensive than base input tokens + - Cache read tokens are 90% cheaper than base input tokens + - Regular input and output tokens are priced at standard rates" + + ^ ((OpenAIAmount exactCents: self centsPerPromptToken) * inputTokens) + + ((OpenAIAmount exactCents: self centsPerCompletionToken) * outputTokens) + + ((OpenAIAmount exactCents: self centsPerPromptToken) * cacheCreationInputTokens * 1.25) + + ((OpenAIAmount exactCents: self centsPerPromptToken) * cacheReadInputTokens * 0.1) \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor..st new file mode 100644 index 0000000..2c401c9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor..st @@ -0,0 +1,4 @@ +service +getAnswerFor: aConversation + + ^ self getAnswers: 1 for: aConversation \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor.config..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor.config..st new file mode 100644 index 0000000..ae8b36e --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor.config..st @@ -0,0 +1,5 @@ +service +getAnswerFor: aConversation config: aConfigOrNil + "Generate an assistant reply in response to aConversation. Answer a new SemanticMessage for the new reply. If #shouldStream is set to true, the answer will be SemanticStreamingMessage that is completed in the background." + + ^ (self getAnswers: 1 for: aConversation config: aConfigOrNil) first \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for..st new file mode 100644 index 0000000..69389c5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for..st @@ -0,0 +1,4 @@ +service +getAnswers: number for: aConversation + + ^ self getAnswers: number for: aConversation config: nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st new file mode 100644 index 0000000..1267e33 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st @@ -0,0 +1,50 @@ +service +getAnswers: number for: aConversation config: aConfigOrNil + "Generate assistant replies in response to aConversation. Answer a collection of new SemanticMessages for each new reply. If #shouldStream is set to true, the answers will be SemanticStreamingMessages that are completed in the background." + + | chatCompletion logRawOutput stream resumeStream | + + "Note: multiple responses via `n` is not supported by the Anthropic API" + number ~= 1 ifTrue: [self error: 'anthropic models do not support requesting multiple assistant replies']. + + chatCompletion := self + invokeWithConfig: aConfigOrNil + editInput: [:input :config | + config user ifNotNil: [:user | + input user: user]. + + config maxTokens ifNotNil: [:maxTokens | + input max_tokens: maxTokens]. + + config temperature ifNotNil: [:temperature | + input temperature: temperature]. + config nucleusSamplingMass ifNotNil: [:p | + input top_p: p]. + + config shouldLogProbabilities ifNotNil: [:shouldLogProbabilities | + input logprobs: shouldLogProbabilities]. + logRawOutput := false. + config shouldLogRawOutput ifNotNil: [:shouldLogRawOutput | + logRawOutput := shouldLogRawOutput]. + + stream := false. + config shouldStream ifNotNil: [:shouldStream | + stream := shouldStream. + input stream: stream]. + + input messages: + (aConversation messages collect: [:message | message asOpenAIObject]). + + aConversation activeToolSpec ifNotNil: [:toolSpec | + self addToolSpec: toolSpec toInput: input]. + + (stream and: [self shouldStreamRequests]) ifTrue: + ["For faster feedback, send even the request asynchronously." + ^ self handleAsyncReplies: number for: aConversation config: config logRawOutput: logRawOutput deferStreaming: + [:resumeBlock | resumeStream := resumeBlock]]] + handleResponse: [:response :config | + (stream and: [response isSuccess]) ifTrue: + [resumeStream ifNotNil: [resumeStream value: response]. + ^ self handleStreamingReplies: number for: aConversation from: response config: config logRawOutput: logRawOutput]]. + + ^ self parseMessagesFrom: chatCompletion for: aConversation logRawOutput: logRawOutput \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleAsyncReplies.for.config.logRawOutput.deferStreaming..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleAsyncReplies.for.config.logRawOutput.deferStreaming..st new file mode 100644 index 0000000..76b7633 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleAsyncReplies.for.config.logRawOutput.deferStreaming..st @@ -0,0 +1,34 @@ +private +handleAsyncReplies: number for: aConversation config: aConfig logRawOutput: logRawOutput deferStreaming: deferBlock + "Black magic that esssentially implements a coroutine to answer a streaming conversation BEFORE returning control to the sender's sender (#invokeWithConfig:editInput:handleResponse:) and only then resumes the invocation and request processing from the background streaming process." + + | context home continue sem result | + "Preserve the remaining invocation stack..." + context := thisContext sender. + home := context home. + continue := context sender cut: home. + context privSender: home. + sem := Semaphore new. + + "...directly return the streaming answers..." + result := self + handleStreamingReplies: number + for: aConversation + from: + ["...and continue the invocation stack from the requestBlock." + | streamContext | + streamContext := thisContext. + sem wait. + self assert: home isDead. + home privSender: streamContext; pc: home endPC. + deferBlock value: [:response | streamContext push: response; jump]. + continue privSender: streamContext. + continue push: nil; jump] + config: aConfig + logRawOutput: logRawOutput. + + "If the activeProcess has a lower priority than the default streaming priority, the streaming process must wait for the activeProcess to return from home." + home insertSender: + (Context contextEnsure: [sem signal]). + + ^ result \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st new file mode 100644 index 0000000..29bab39 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st @@ -0,0 +1,61 @@ +private +handleStreamingReplies: number for: aConversation from: responseBlock config: aConfig logRawOutput: logRawOutput + + | promptTokens toolSpec | + promptTokens := self countTokensInConversation: aConversation. + toolSpec := aConversation activeToolSpec. + + ^ SemanticStreamingMessage + conversation: aConversation + array: number + role: #assistant + inBackgroundDo: [:messages | + | expense | + [| dataStream data | + dataStream := self streamEventDataFrom: responseBlock value. + + logRawOutput ifTrue: + [messages do: [:message | + message rawOutput: + (JsonObject new + chatCompletionChunks: OrderedCollection new; + chatCompletionChunkChoices: OrderedCollection new; + yourself)]]. + + "[DONE] is not a thing for Anthropic. The equivalent is: + event: message_stop + data: {'type:' 'message_stop'}" + self flag: #todo. + [#('[DONE]' nil) includes: (data := dataStream next)] whileFalse: + [| chunk msg | + chunk := data utf8ToSqueak parseAsJson openAIWithSqueakLineEndings. + msg := messages last. + (chunk at: #error) ifNotNil: [:error | + OpenAIError + signalForType: error type + parameter: error param + code: error code + message: error message]. + logRawOutput ifTrue: + [messages do: [:message | + message rawOutput chatCompletionChunks addLast: chunk]]. + + "TODO: Lets do caseOf:otherwise:" + + (chunk type = 'content_block_delta') ifTrue: [ + self parseStreamedChunk: chunk toolSpec: toolSpec addTo: msg. + ]. + (chunk type = 'message_delta') ifTrue: [ + chunk usage ifNotNil: [:usage | + expense := self expenseForUsage: usage. + self assignExpense: expense toMessages: messages] + ]. + (chunk type = 'message_stop') ifTrue: [msg beComplete]]. + self assert: dataStream next isNil] + + ensure: + [self account + noteExpense: + (expense ifNil: [self expenseForReplies: messages after: promptTokens]) + forUser: aConfig user + model: self name]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/isLegacy.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/isLegacy.st new file mode 100644 index 0000000..8b1f1dc --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/isLegacy.st @@ -0,0 +1,6 @@ +testing +isLegacy + + self flag: #modelConstants. + + ^ false \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st new file mode 100644 index 0000000..d895c4f --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st @@ -0,0 +1,11 @@ +accessing +maxCompletionTokens + + self flag: #modelConstants. + + "https://docs.anthropic.com/en/docs/about-claude/models" + "Claude 3.5 family all have 8192 max tokens output, Claude 3 family have 4096." + + (self resolvedName beginsWith: 'claude-3-5-') + ifTrue: [^ 8192] + ifFalse: [^ 4096]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxPromptTokens.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxPromptTokens.st new file mode 100644 index 0000000..caf2f78 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxPromptTokens.st @@ -0,0 +1,4 @@ +accessing +maxPromptTokens + + ^ self maxTokens \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxTokens.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxTokens.st new file mode 100644 index 0000000..bcd9a9d --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxTokens.st @@ -0,0 +1,9 @@ +accessing +maxTokens + + self flag: #modelConstants. + + "https://docs.anthropic.com/en/docs/about-claude/models" + "200K context window for all models across the board." + + ^ 200000 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st new file mode 100644 index 0000000..2de9846 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st @@ -0,0 +1,7 @@ +pricing +maximumPriceFor: aConversation + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self + maximumPriceFor: aConversation + answers: 1 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st new file mode 100644 index 0000000..09cfed7 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st @@ -0,0 +1,8 @@ +pricing +maximumPriceFor: aConversation answers: number + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self + priceFor: aConversation + answers: number + completionSize: nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor..st new file mode 100644 index 0000000..a8e7ee6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor..st @@ -0,0 +1,7 @@ +pricing +minimumPriceFor: aConversation + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self + minimumPriceFor: aConversation + answers: 1 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor.answers..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor.answers..st new file mode 100644 index 0000000..4bd59a8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor.answers..st @@ -0,0 +1,8 @@ +pricing +minimumPriceFor: aConversation answers: number + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self + priceFor: aConversation + answers: number + completionSize: 0 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st new file mode 100644 index 0000000..3028978 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st @@ -0,0 +1,5 @@ +private +nameForRateLimits + + ({self class gpt4_0125Name. self class gpt4_1106Name} includes: self resolvedName) ifTrue: [^ self class gpt4TurboName]. + ^ self resolvedName \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st new file mode 100644 index 0000000..52d50a7 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st @@ -0,0 +1,25 @@ +private - requests +parseMessageFrom: chatCompletionChoice for: aConversation logRawOutput: logRawOutput + + | messageResult message | + messageResult := chatCompletionChoice. + message := SemanticMessage + conversation: aConversation + role: messageResult role + content: (messageResult content first type = 'text' ifTrue: [messageResult content first text] ifFalse: [messageResult content]). + + logRawOutput ifTrue: + [message rawOutput: + (JsonObject new + chatCompletionChoice: chatCompletionChoice; + yourself)]. + + messageResult tool_calls ifNotNil: [:toolCalls | + message toolCalls: + (self parseToolCallsFrom: toolCalls toolSpec: aConversation activeToolSpec)]. + + chatCompletionChoice logprobs ifNotNil: [:logprobs | + message tokenProbabilities: + (self parseTokenProbabilitiesFrom: logprobs)]. + + ^ message \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st new file mode 100644 index 0000000..ea15d19 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st @@ -0,0 +1,21 @@ +private - requests +parseMessagesFrom: chatCompletion for: aConversation logRawOutput: logRawOutput + + | messages | + + messages := + {self + parseMessageFrom: chatCompletion + for: aConversation + logRawOutput: logRawOutput}. + + logRawOutput ifTrue: + [messages do: [:message | + message rawOutput chatCompletion: chatCompletion]]. + + chatCompletion usage ifNotNil: [:usage | + | expense | + expense := self expenseForUsage: usage. + self assignExpense: expense toMessages: messages]. + + ^ messages \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st new file mode 100644 index 0000000..9b93abc --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st @@ -0,0 +1,27 @@ +private - requests +parseStreamedChunk: chatCompletionChoice toolSpec: aToolSpecOrNil addTo: aStreamingMessage + + | chunkText chunkToolCalls chunkTokenProbabilities | + self flag: #todo. + chatCompletionChoice type ~= 'content_block_delta' ifTrue: [ + self error: 'unimplemented: did not expect something other than content_block_delta in here' + ]. + chatCompletionChoice delta type = 'text_delta' ifTrue: [ + chunkText := chatCompletionChoice delta text. + ] ifFalse: [ + self error: 'unimplemented: did not expect something other than text_delta for content_block_delta' + ]. + + aStreamingMessage rawOutput ifNotNil: [:rawOutput | + rawOutput chatCompletionChunkChoices ifNotNil: [:rawChoices | + rawChoices addLast: chatCompletionChoice]]. + + self flag: #todo. + "chunkToolCalls := chatCompletionChoice delta tool_calls ifNotNil: [:toolCalls | + self parseStreamedToolCallChunksFrom: toolCalls toolSpec: aToolSpecOrNil message: aStreamingMessage]." + + "chunkTokenProbabilities := chatCompletionChoice logprobs ifNotNil: [:logprobs | + self parseTokenProbabilitiesFrom: logprobs]." + + (chunkText isEmptyOrNil and: [chunkToolCalls isNil] and: [chunkTokenProbabilities isEmptyOrNil]) ifFalse: + [aStreamingMessage addChunk: chunkText toolCalls: chunkToolCalls tokenProbabilities: chunkTokenProbabilities]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunkFrom.toolSpec.addTo..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunkFrom.toolSpec.addTo..st new file mode 100644 index 0000000..a9ec60d --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunkFrom.toolSpec.addTo..st @@ -0,0 +1,15 @@ +private - requests +parseStreamedToolCallChunkFrom: toolCallChunk toolSpec: aToolSpecOrNil addTo: aStreamingToolCall + + | argumentsChunk functionNameChunk keyChunk | + keyChunk := toolCallChunk id. + functionNameChunk := toolCallChunk function ifNotNil: #name. + argumentsChunk := toolCallChunk function ifNotNil: #arguments. + + aStreamingToolCall addChunkKey: keyChunk tool: functionNameChunk arguments: argumentsChunk do: [:streamingToolCall | + (streamingToolCall tool isString and: [aToolSpecOrNil notNil]) ifTrue: + [aToolSpecOrNil toolNamed: streamingToolCall tool ifPresent: [:tool | + streamingToolCall tool: tool]]. + streamingToolCall arguments isString ifTrue: + [streamingToolCall arguments: + ([streamingToolCall arguments parseAsOrderedJson] ifError: [streamingToolCall arguments])]]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunksFrom.toolSpec.message..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunksFrom.toolSpec.message..st new file mode 100644 index 0000000..19685c7 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunksFrom.toolSpec.message..st @@ -0,0 +1,13 @@ +private - requests +parseStreamedToolCallChunksFrom: toolCallChunks toolSpec: aToolSpecOrNil message: aStreamingMessage + + | newToolCalls | + newToolCalls := Dictionary new: toolCallChunks size. + toolCallChunks do: [:toolCallChunk | + | index toolCall | + index := toolCallChunk index + 1. + toolCall := aStreamingMessage toolCalls at: index ifAbsent: + [newToolCalls at: index put: + (SemanticStreamingToolCall key: '' tool: '' arguments: '')]. + self parseStreamedToolCallChunkFrom: toolCallChunk toolSpec: aToolSpecOrNil addTo: toolCall]. + ^ newToolCalls semanticWithKeysSorted \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseTokenProbabilitiesFrom..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseTokenProbabilitiesFrom..st new file mode 100644 index 0000000..55cb2bf --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseTokenProbabilitiesFrom..st @@ -0,0 +1,4 @@ +private - requests +parseTokenProbabilitiesFrom: logprobs + + ^ logprobs content \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st new file mode 100644 index 0000000..fd3dadf --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st @@ -0,0 +1,15 @@ +private - requests +parseToolCallFrom: toolCall toolSpec: aToolSpecOrNil + + | arguments function functionName | + self assert: toolCall type = #function. + functionName := toolCall function name. + arguments := toolCall function arguments. + + function := aToolSpecOrNil ifNotNil: + [aToolSpecOrNil toolNamed: functionName ifAbsent: []]. + + ^ SemanticToolCall + key: toolCall id + tool: (function ifNil: [functionName]) + arguments: ([arguments parseAsOrderedJson] ifError: [arguments]) \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallsFrom.toolSpec..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallsFrom.toolSpec..st new file mode 100644 index 0000000..fe0c843 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallsFrom.toolSpec..st @@ -0,0 +1,5 @@ +private - requests +parseToolCallsFrom: toolCalls toolSpec: aToolSpecOrNil + + ^ toolCalls collect: [:toolCall | + self parseToolCallFrom: toolCall toolSpec: aToolSpecOrNil] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/pathToEndpoint.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/pathToEndpoint.st new file mode 100644 index 0000000..05a717f --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/pathToEndpoint.st @@ -0,0 +1,4 @@ +private +pathToEndpoint + + ^ '/v1/messages' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor..st new file mode 100644 index 0000000..a9aff8c --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor..st @@ -0,0 +1,5 @@ +pricing +priceFor: aConversation + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self maximumPriceFor: aConversation \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers..st new file mode 100644 index 0000000..96ebe45 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers..st @@ -0,0 +1,5 @@ +pricing +priceFor: aConversation answers: number + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self maximumPriceFor: aConversation answers: number \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st new file mode 100644 index 0000000..75b7e3a --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st @@ -0,0 +1,16 @@ +pricing +priceFor: aConversation answers: number completionSize: completionSizeOrNil + "If completionSizeOrNil is nil, the maximum possible size will be assumed. + Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + | promptTokens completionTokens priceForPrompt priceForCompletion | + promptTokens := aConversation estimatePromptTokens. + completionTokens := aConversation maxCompletionTokens. "this is indeed an upper approximation!" + completionSizeOrNil ifNotNil: + [completionTokens := completionTokens clampHigh: (self maxTokensInStringOfSize: completionSizeOrNil)]. + + priceForPrompt := (OpenAIAmount approximateCents: self centsPerPromptToken) + * promptTokens. + priceForCompletion := (OpenAIAmount approximateCents: self centsPerCompletionToken) + * completionTokens. + ^ priceForPrompt + (priceForCompletion * number) \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionMessage..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionMessage..st new file mode 100644 index 0000000..f93a622 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionMessage..st @@ -0,0 +1,13 @@ +pricing +priceFor: aConversation completionMessage: aMessage + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + | promptTokens completionTokens priceForPrompt priceForCompletion | + promptTokens := aConversation estimatePromptTokens. + completionTokens := self countTokensInMessage: aMessage. + + priceForPrompt := (OpenAIAmount approximateCents: self centsPerPromptToken) + * promptTokens. + priceForCompletion := (OpenAIAmount approximateCents: self centsPerCompletionToken) + * completionTokens. + ^ priceForPrompt + priceForCompletion \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionSize..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionSize..st new file mode 100644 index 0000000..7b7fee5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionSize..st @@ -0,0 +1,8 @@ +pricing +priceFor: aConversation completionSize: completionSize + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self + priceFor: aConversation + answers: 1 + completionSize: completionSize \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceForPrompt..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceForPrompt..st new file mode 100644 index 0000000..301b6b5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceForPrompt..st @@ -0,0 +1,6 @@ +pricing +priceForPrompt: aString + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ (OpenAIAmount approximateCents: self centsPerPromptToken) + * (self countTokensIn: aString) \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunction.on..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunction.on..st new file mode 100644 index 0000000..68cab27 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunction.on..st @@ -0,0 +1,23 @@ +private - tokens +printFunction: aFunction on: aStream + + aFunction description isEmptyOrNil ifFalse: + [aStream + nextPutAll: '// '; + nextPutAll: aFunction description; + cr]. + + aFunction name isEmptyOrNil ifFalse: + [aStream + nextPutAll: 'type '; + nextPutAll: aFunction name; + nextPutAll: ' = (_: {'; + cr]. + + self + printFunctionParameterSchema: aFunction asOpenAIObject function parameters + on: aStream. + + aStream + nextPutAll: '}) => any;'; + cr. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionCallArguments.on..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionCallArguments.on..st new file mode 100644 index 0000000..58252f9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionCallArguments.on..st @@ -0,0 +1,12 @@ +private - tokens +printFunctionCallArguments: arguments on: aStream + + aStream nextPut: ${; cr. + arguments keysAndValuesDo: [:argumentName :argumentValue | + aStream + print: argumentName; + nextPut: $:; + print: argumentValue; + nextPut: $,; + cr]. + aStream nextPut: $}; cr. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on..st new file mode 100644 index 0000000..439d89c --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on..st @@ -0,0 +1,7 @@ +private - tokens +printFunctionParameterSchema: schema on: aStream + + ^ self + printFunctionParameterSchema: schema + on: aStream + indent: 0 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on.indent..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on.indent..st new file mode 100644 index 0000000..9c3403a --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on.indent..st @@ -0,0 +1,22 @@ +private - tokens +printFunctionParameterSchema: schema on: aStream indent: indent + + | required | + required := schema required. + schema properties keysAndValuesDo: [:key :value | + value description isEmptyOrNil ifFalse: + [aStream + space: indent; + nextPutAll: '// '; + nextPutAll: value description; + cr]. + aStream + space: indent; + nextPutAll: key. + (required includes: key) ifFalse: + [aStream nextPut: $?]. + aStream nextPutAll: ': '. + self printFunctionParameterType: value on: aStream indent: indent. + aStream + nextPut: $,; + cr]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterType.on.indent..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterType.on.indent..st new file mode 100644 index 0000000..842b300 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterType.on.indent..st @@ -0,0 +1,50 @@ +private - tokens +printFunctionParameterType: schema on: aStream indent: indent + + true + caseOf: + {[schema type = #string] -> + [schema enum ifNotNil: [:enum | + enum + do: [:value | + aStream + nextPut: $"; + nextPutAll: value; + nextPut: $"] + separatedBy: + [aStream nextPutAll: ' | ']. + ^ self]. + aStream nextPutAll: #string]. + [schema type = #array] -> + [schema items + ifNotNil: [:items | + self printFunctionParameterType: items on: aStream indent: indent] + ifNil: + [aStream nextPutAll: 'any']. + aStream nextPutAll: '[]']. + [schema type = #object] -> + [aStream + nextPut: ${; + cr. + self printFunctionParameterSchema: schema on: aStream indent: indent + 2. + aStream + cr; + nextPut: $}]. + [#(integer number) includes: schema type] -> + [schema enum ifNotNil: [:enum | + enum + do: [:value | + aStream + nextPut: $"; + nextPutAll: value; + nextPut: $"] + separatedBy: + [aStream nextPutAll: ' | ']. + ^ self]. + aStream nextPutAll: 'number']. + [schema type = #boolean] -> + [aStream nextPutAll: 'boolean']. + [schema type = #null] -> + [aStream nextPutAll: 'null']} + otherwise: + []. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printOn..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printOn..st new file mode 100644 index 0000000..6f45c1b --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printOn..st @@ -0,0 +1,16 @@ +printing +printOn: aStream + + super printOn: aStream. + + self resolvedName = self name ifFalse: + [aStream + nextPutAll: ' ('; + nextPutAll: self resolvedName; + nextPut: $)]. + + aStream nextPutAll: + (' - {1} tokens, {2} in/{3} out' format: + {(self maxTokens // 1000) asString , 'K'. + OpenAIAmount exactCents: self centsPerPromptToken. + OpenAIAmount exactCents: self centsPerCompletionToken}). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printToolSpec.on..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printToolSpec.on..st new file mode 100644 index 0000000..55233ea --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printToolSpec.on..st @@ -0,0 +1,12 @@ +private - tokens +printToolSpec: aToolSpec on: aStream + + aStream + nextPutAll: 'namespace functions {'; cr; + cr. + aToolSpec tools do: [:tool | + self assert: tool type = #function. + self printFunction: tool on: aStream]. + aStream + cr; + nextPutAll: '} // namespace functions'. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/releaseDate.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/releaseDate.st new file mode 100644 index 0000000..0e71cc7 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/releaseDate.st @@ -0,0 +1,21 @@ +accessing +releaseDate + + self flag: #modelConstants. + + ^ self name + caseOf: { + "Latest versions - handle specially since date is not in name" + [self class claude35SonnetLatestName] -> [Date year: 2024 month: 10 day: 22]. + [self class claude35HaikuLatestName] -> [Date year: 2024 month: 10 day: 22]. + [self class claude3OpusLatestName] -> [Date year: 2024 month: 2 day: 29]} + otherwise: + [(self name includesSubstring: 'claude') + ifTrue: [ + "Extract date from model name (format: yyyymmdd)" + | dateStr | + dateStr := (self name last: 8). + Date + year: (dateStr first: 4) asNumber + month: ((dateStr copyFrom: 5 to: 6) asNumber) + day: ((dateStr copyFrom: 7 to: 8) asNumber)]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/resolvedName.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/resolvedName.st new file mode 100644 index 0000000..7774509 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/resolvedName.st @@ -0,0 +1,6 @@ +accessing +resolvedName + + self flag: #modelConstants. + "Anthropic define '-latest' models directly, for which we already have definitions. No need to resolve anything." + ^ self name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/shouldStreamRequests.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/shouldStreamRequests.st new file mode 100644 index 0000000..9710aac --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/shouldStreamRequests.st @@ -0,0 +1,5 @@ +service +shouldStreamRequests + "Send even requests asynchronously to reduce the latency of initial #getAnswers: send. Experimental!" + + ^ true \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/sortKey.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/sortKey.st new file mode 100644 index 0000000..ca23dcd --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/sortKey.st @@ -0,0 +1,11 @@ +comparing +sortKey + + "#releaseDate descending , #name ascending" + | recommendationIndex age maxAge | + recommendationIndex := 2 - ({self class bestName. self class cheapestName} indexOf: self name). + age := (Date today - (self releaseDate ifNil: [Date today])) days. + maxAge := 100 "years" * 365 "days". + ^ recommendationIndex asString + , (age printStringPadded: (maxAge log: 10) ceiling) + , self name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st new file mode 100644 index 0000000..2f73285 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st @@ -0,0 +1,46 @@ +private - network +streamEventDataFrom: aWebResponse + "Parse server-sent events (SSE) and stream them onto a new generator." + + | responseStream | + self flag: #moveUpstream. "and add support for named events" + responseStream := Generator on: [:gen | + [aWebResponse streamFrom: aWebResponse stream to: gen size: nil progress: nil] + ensure: [aWebResponse close]]. + + ^ Generator on: [:datasStream | + [| data | + data := String streamContents: [:dataStream | + [responseStream atEnd + ifFalse: + [| line | + line := String streamContents: [:lineStream | + [responseStream atEnd + ifFalse: + [(CharacterSet crlf includes: responseStream peek) + ifTrue: [responseStream next] + ifFalse: [lineStream nextPut: responseStream next]; + yourself] + ifTrue: [true]] + whileFalse]. + line isEmpty + ifFalse: + [Transcript show: line; cr. + self flag: #todo. + "(line beginsWith: 'event: ') ifTrue: [ + dataStream nextPutAll: (line allButFirst: 'event: ' size). + dataStream cr + ]." + (line beginsWith: 'data: ') ifTrue: [ + dataStream nextPutAll: (line allButFirst: 'data: ' size). + dataStream cr + ]. + ]; + yourself] + ifTrue: [true]] + whileFalse]. + data isEmptyOrNil + ifFalse: + [datasStream nextPut: data allButLast]; + yourself] + whileFalse] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json new file mode 100644 index 0000000..077df34 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json @@ -0,0 +1,71 @@ +{ + "class" : { + "bestName" : "zakkor 1/10/2025 16:05", + "cheapestName" : "zakkor 1/10/2025 16:05", + "claude35Haiku20241022Name" : "zakkor 1/10/2025 16:02", + "claude35HaikuLatestName" : "zakkor 1/10/2025 16:03", + "claude35Sonnet20240620Name" : "zakkor 1/10/2025 15:58", + "claude35Sonnet20241022Name" : "zakkor 1/10/2025 15:58", + "claude35SonnetLatestName" : "zakkor 1/10/2025 15:59", + "claude3Haiku20240307Name" : "zakkor 1/10/2025 16:04", + "claude3Opus20240229Name" : "zakkor 1/10/2025 16:03", + "claude3OpusLatestName" : "zakkor 1/10/2025 16:03", + "claude3Sonnet20240229Name" : "zakkor 1/10/2025 16:04", + "defaultModelNames" : "zakkor 1/10/2025 16:08", + "initialize" : "ct 2/4/2024 19:49", + "unload" : "ct 8/17/2023 19:28" }, + "instance" : { + "addToolSpec:toInput:" : "ct 5/25/2024 22:01", + "assignExpense:toMessages:" : "ct 5/25/2024 23:42", + "centsPerCompletionToken" : "zakkor 1/10/2025 19:19", + "centsPerPromptToken" : "zakkor 1/10/2025 19:18", + "countTokensInConversation:" : "ct 6/19/2024 21:04", + "countTokensInMessage:" : "ct 4/30/2024 01:33", + "countTokensInMessage:hasMultipleToolMessages:" : "ct 4/30/2024 18:36", + "countTokensInToolSpec:" : "ct 4/29/2024 22:55", + "defaultConfig" : "ct 8/17/2023 17:49", + "defaultName" : "ct 2/4/2024 19:52", + "expenseForReplies:after:" : "ct 1/16/2024 16:11", + "expenseForUsage:" : "zakkor 1/11/2025 21:25", + "getAnswerFor:" : "ct 1/9/2024 00:14", + "getAnswerFor:config:" : "ct 1/8/2024 23:52", + "getAnswers:for:" : "ct 1/8/2024 22:28", + "getAnswers:for:config:" : "zakkor 1/11/2025 21:13", + "handleAsyncReplies:for:config:logRawOutput:deferStreaming:" : "ct 5/2/2024 15:52", + "handleStreamingReplies:for:from:config:logRawOutput:" : "zakkor 1/11/2025 12:53", + "isLegacy" : "zakkor 1/10/2025 19:02", + "maxCompletionTokens" : "zakkor 1/10/2025 17:18", + "maxPromptTokens" : "ct 2/5/2024 20:48", + "maxTokens" : "zakkor 1/10/2025 17:19", + "maximumPriceFor:" : "zakkor 1/10/2025 20:12", + "maximumPriceFor:answers:" : "zakkor 1/10/2025 20:12", + "minimumPriceFor:" : "zakkor 1/10/2025 20:12", + "minimumPriceFor:answers:" : "zakkor 1/10/2025 20:12", + "nameForRateLimits" : "ct 2/5/2024 20:07", + "parseMessageFrom:for:logRawOutput:" : "zakkor 1/10/2025 18:32", + "parseMessagesFrom:for:logRawOutput:" : "zakkor 1/10/2025 23:48", + "parseStreamedChunk:toolSpec:addTo:" : "zakkor 1/11/2025 12:48", + "parseStreamedToolCallChunkFrom:toolSpec:addTo:" : "ct 1/16/2024 20:52", + "parseStreamedToolCallChunksFrom:toolSpec:message:" : "ct 2/13/2024 00:17", + "parseTokenProbabilitiesFrom:" : "ct 1/15/2024 19:37", + "parseToolCallFrom:toolSpec:" : "ct 1/16/2024 18:39", + "parseToolCallsFrom:toolSpec:" : "ct 1/16/2024 00:47", + "pathToEndpoint" : "zakkor 1/10/2025 16:36", + "priceFor:" : "zakkor 1/10/2025 20:12", + "priceFor:answers:" : "zakkor 1/10/2025 20:12", + "priceFor:answers:completionSize:" : "ct 11/17/2024 19:31", + "priceFor:completionMessage:" : "ct 4/30/2024 01:35", + "priceFor:completionSize:" : "ct 2/3/2024 21:24", + "priceForPrompt:" : "ct 2/3/2024 21:23", + "printFunction:on:" : "ct 2/13/2024 02:25", + "printFunctionCallArguments:on:" : "ct 2/13/2024 01:20", + "printFunctionParameterSchema:on:" : "ct 2/13/2024 02:10", + "printFunctionParameterSchema:on:indent:" : "ct 4/29/2024 21:56", + "printFunctionParameterType:on:indent:" : "ct 2/13/2024 02:23", + "printOn:" : "ct 2/10/2024 00:04", + "printToolSpec:on:" : "ct 4/29/2024 21:44", + "releaseDate" : "zakkor 1/10/2025 19:02", + "resolvedName" : "zakkor 1/10/2025 16:43", + "shouldStreamRequests" : "ct 5/2/2024 01:50", + "sortKey" : "ct 7/31/2024 22:30", + "streamEventDataFrom:" : "zakkor 1/11/2025 00:48" } } diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/properties.json b/packages/SemanticText.package/AnthropicConversationModel.class/properties.json new file mode 100644 index 0000000..fc316fd --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/properties.json @@ -0,0 +1,14 @@ +{ + "category" : "SemanticText-Providers-Anthropic", + "classinstvars" : [ + ], + "classvars" : [ + ], + "commentStamp" : "", + "instvars" : [ + ], + "name" : "AnthropicConversationModel", + "pools" : [ + ], + "super" : "AnthropicModel", + "type" : "normal" } diff --git a/packages/SemanticText.package/AnthropicModel.class/README.md b/packages/SemanticText.package/AnthropicModel.class/README.md new file mode 100644 index 0000000..d77432d --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/README.md @@ -0,0 +1 @@ +I am the abstract superclass for all models from the OpenAI API (https://platform.openai.com/docs/api-reference). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/class/updateModels.st b/packages/SemanticText.package/AnthropicModel.class/class/updateModels.st new file mode 100644 index 0000000..f3f0fc0 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/class/updateModels.st @@ -0,0 +1,8 @@ +support +updateModels + + "There's currently no API for this..." + "See: + * https://openai.com/pricing + * https://platform.openai.com/docs/guides/rate-limits/usage-tiers" + self systemNavigation browseAllCallsOn: #modelConstants localToPackage: self packageInfo name. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/account..st b/packages/SemanticText.package/AnthropicModel.class/instance/account..st new file mode 100644 index 0000000..65b4a66 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/account..st @@ -0,0 +1,4 @@ +accessing +account: anAccount + + account := anAccount. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/account.st b/packages/SemanticText.package/AnthropicModel.class/instance/account.st new file mode 100644 index 0000000..9b69e52 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/account.st @@ -0,0 +1,4 @@ +accessing +account + + ^ account \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/asStringOrText.st b/packages/SemanticText.package/AnthropicModel.class/instance/asStringOrText.st new file mode 100644 index 0000000..51d5b5a --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/asStringOrText.st @@ -0,0 +1,10 @@ +printing +asStringOrText + + | result | + result := super asStringOrText. + self isLegacy ifFalse: [^ result]. + + ^ result asText + addAttributesForDeprecation; + yourself \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/assureAvailableOr..st b/packages/SemanticText.package/AnthropicModel.class/instance/assureAvailableOr..st new file mode 100644 index 0000000..96532ca --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/assureAvailableOr..st @@ -0,0 +1,12 @@ +support +assureAvailableOr: requirementsBlock + + self account ifNil: + [^ requirementsBlock value: + {{#openAIAccount. 'provide an Anthropic account'. [self inspect]}}]. + + self account hasApiKey ifFalse: + [^ requirementsBlock value: + {{#openAIAccount. 'provide an Anthropic API key'. [PreferenceBrowser open searchPattern: 'Anthropic API']}}]. + + ^ nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig..st b/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig..st new file mode 100644 index 0000000..2c52bfd --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig..st @@ -0,0 +1,4 @@ +accessing +baseConfig: aConfig + + baseConfig := aConfig. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig.st b/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig.st new file mode 100644 index 0000000..be773c3 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig.st @@ -0,0 +1,4 @@ +accessing +baseConfig + + ^ baseConfig \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/baseUrl.st b/packages/SemanticText.package/AnthropicModel.class/instance/baseUrl.st new file mode 100644 index 0000000..1cbf068 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/baseUrl.st @@ -0,0 +1,4 @@ +private +baseUrl + + ^ self account baseUrl \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/countTokensIn..st b/packages/SemanticText.package/AnthropicModel.class/instance/countTokensIn..st new file mode 100644 index 0000000..807b4e6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/countTokensIn..st @@ -0,0 +1,6 @@ +service +countTokensIn: aString + + self flag: #approximation. "Upper approximation! Could use something like OpenAI's tiktoken for more precise counts. Should honor different tokenizers!" + + ^ self maxTokensInStringOfSize: aString size \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/defaultAccount.st b/packages/SemanticText.package/AnthropicModel.class/instance/defaultAccount.st new file mode 100644 index 0000000..9844fbc --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/defaultAccount.st @@ -0,0 +1,4 @@ +initialize-release +defaultAccount + + ^ AnthropicAccount defaultAccount \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/defaultConfig.st b/packages/SemanticText.package/AnthropicModel.class/instance/defaultConfig.st new file mode 100644 index 0000000..958bee1 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/defaultConfig.st @@ -0,0 +1,4 @@ +initialize-release +defaultConfig + + ^ SemanticConfig new \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/defaultName.st b/packages/SemanticText.package/AnthropicModel.class/instance/defaultName.st new file mode 100644 index 0000000..09a9775 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/defaultName.st @@ -0,0 +1,4 @@ +initialize-release +defaultName + + ^ nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/expenseForUsage..st b/packages/SemanticText.package/AnthropicModel.class/instance/expenseForUsage..st new file mode 100644 index 0000000..35b9a35 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/expenseForUsage..st @@ -0,0 +1,4 @@ +private +expenseForUsage: usage + + ^ self subclassResponsibility \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/handleTransientServerErrorsDuring..st b/packages/SemanticText.package/AnthropicModel.class/instance/handleTransientServerErrorsDuring..st new file mode 100644 index 0000000..2a4291c --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/handleTransientServerErrorsDuring..st @@ -0,0 +1,17 @@ +private +handleTransientServerErrorsDuring: requestBlock + + | pendingRetries success result | + pendingRetries := 3. + success := false. + [[result := requestBlock value. success := true] + on: ConnectionClosed , ConnectionTimedOut do: [:ex | + (pendingRetries := pendingRetries - 1) <= 0 ifTrue: [ex pass]. + Transcript showln: 'retry: ' , ex] + on: OpenAIError do: [:ex | + ex code = 503 ifFalse: [ex pass]. + (pendingRetries := pendingRetries - 1) <= 0 ifTrue: [ex pass]. + Transcript showln: 'retry: ' , ex]. + success] + whileFalse: [success]. + ^ result \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/initialize.st b/packages/SemanticText.package/AnthropicModel.class/instance/initialize.st new file mode 100644 index 0000000..1bdfca8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/initialize.st @@ -0,0 +1,8 @@ +initialize-release +initialize + + super initialize. + + self name: self defaultName. + self account: self defaultAccount. + self baseConfig: self defaultConfig. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput..st b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput..st new file mode 100644 index 0000000..a7aa99e --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput..st @@ -0,0 +1,8 @@ +private +invokeWithConfig: aConfigOrNil documents: documentsOrNil editInput: inputBlock + + ^ self + invokeWithConfig: aConfigOrNil + documents: documentsOrNil + editInput: inputBlock + handleResponse: [:response |] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st new file mode 100644 index 0000000..eddd8c2 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st @@ -0,0 +1,73 @@ +private +invokeWithConfig: aConfigOrNil documents: documentsOrNil editInput: inputBlock handleResponse: responseBlock + + | config input output response | + config := self baseConfig. + aConfigOrNil ifNotNil: + [config := config updatedWith: aConfigOrNil]. + + input := JsonObject new + model: self name; + "max_tokens is required" + max_tokens: self maxCompletionTokens; + yourself. + + inputBlock ifNotNil: + [inputBlock cull: input cull: config]. + config openAIRawConfig ifNotNil: [:rawConfig | + rawConfig keysAndValuesDo: [:key :value | + input at: key put: value]]. + + self handleTransientServerErrorsDuring: + [response := self account controlConnectionDuring: + [| client authorizeBlock | + client := WebClient new. + authorizeBlock := [:request | + self account apiKey ifNotNil: [:apiKey | + request headerAt: 'x-api-key' put: apiKey. + request headerAt: 'anthropic-version' put: '2023-06-01']]. + documentsOrNil + ifNil: + [client + httpPost: self url + content: input openAIWithUnixLineEndings asJsonString squeakToUtf8 + type: 'application/json' + do: authorizeBlock] + ifNotNil: + [| inputFields documentFields | + inputFields := input openAIWithUnixLineEndings associations gather: [:assoc | + assoc value isArray + ifFalse: + [{assoc key squeakToUtf8 -> assoc value squeakToUtf8}] + ifTrue: + [assoc value collect: [:ea | assoc key squeakToUtf8 -> ea squeakToUtf8]]]. + documentFields := (documentsOrNil as: OrderedDictionary) associations collect: [:assoc | + assoc key squeakToUtf8 -> + (assoc value copy + in: [:copy | + copy content isString ifTrue: + [copy content: assoc value content squeakToUtf8]]; + yourself)]. + client + openAIHttpPost: self url + multipartFields: inputFields , documentFields + do: authorizeBlock]]. + + output := (responseBlock ifNotNil: [responseBlock cull: response cull: config]) + ifNil: [response content utf8ToSqueak withoutTrailingBlanks parseAsJson openAIWithSqueakLineEndings]. + + response isSuccess ifFalse: + [| error | + error := output at: #error. + ^ OpenAIError + signalForType: error type + parameter: error param + code: error code + message: error message]]. + + output usage ifNotNil: [:usage | + | expense | + expense := self expenseForUsage: usage. + self account noteExpense: expense forUser: config user model: self name]. + + ^ output \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput..st b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput..st new file mode 100644 index 0000000..a4efa82 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput..st @@ -0,0 +1,7 @@ +private +invokeWithConfig: aConfigOrNil editInput: inputBlock + + ^ self + invokeWithConfig: aConfigOrNil + editInput: inputBlock + handleResponse: [:response |] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput.handleResponse..st b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput.handleResponse..st new file mode 100644 index 0000000..1b95a45 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput.handleResponse..st @@ -0,0 +1,8 @@ +private +invokeWithConfig: aConfigOrNil editInput: inputBlock handleResponse: responseBlock + + ^ self + invokeWithConfig: aConfigOrNil + documents: nil + editInput: inputBlock + handleResponse: responseBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/isLegacy.st b/packages/SemanticText.package/AnthropicModel.class/instance/isLegacy.st new file mode 100644 index 0000000..89b9cd1 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/isLegacy.st @@ -0,0 +1,4 @@ +testing +isLegacy + + ^ false \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/maxCharactersInTokens..st b/packages/SemanticText.package/AnthropicModel.class/instance/maxCharactersInTokens..st new file mode 100644 index 0000000..d3a0cb8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/maxCharactersInTokens..st @@ -0,0 +1,4 @@ +service +maxCharactersInTokens: numberOfTokens + + ^ numberOfTokens * 4 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/maxTokensInStringOfSize..st b/packages/SemanticText.package/AnthropicModel.class/instance/maxTokensInStringOfSize..st new file mode 100644 index 0000000..9c83400 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/maxTokensInStringOfSize..st @@ -0,0 +1,5 @@ +service +maxTokensInStringOfSize: stringSize + + stringSize = 1 ifTrue: [^ 1]. + ^ stringSize // 2 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/name..st b/packages/SemanticText.package/AnthropicModel.class/instance/name..st new file mode 100644 index 0000000..540ea67 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/name..st @@ -0,0 +1,4 @@ +accessing +name: aString + + name := aString. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/name.st b/packages/SemanticText.package/AnthropicModel.class/instance/name.st new file mode 100644 index 0000000..4b7f350 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/name.st @@ -0,0 +1,4 @@ +accessing +name + + ^ name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/nameForRateLimits.st b/packages/SemanticText.package/AnthropicModel.class/instance/nameForRateLimits.st new file mode 100644 index 0000000..6df4575 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/nameForRateLimits.st @@ -0,0 +1,4 @@ +private +nameForRateLimits + + ^ self name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/pathToEndpoint.st b/packages/SemanticText.package/AnthropicModel.class/instance/pathToEndpoint.st new file mode 100644 index 0000000..8f8fd10 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/pathToEndpoint.st @@ -0,0 +1,4 @@ +private +pathToEndpoint + + ^ self subclassResponsibility \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/printOn..st b/packages/SemanticText.package/AnthropicModel.class/instance/printOn..st new file mode 100644 index 0000000..53f98e6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/printOn..st @@ -0,0 +1,4 @@ +printing +printOn: aStream + + aStream nextPutAll: self name. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/requestsPerMinute.st b/packages/SemanticText.package/AnthropicModel.class/instance/requestsPerMinute.st new file mode 100644 index 0000000..8bdf1b2 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/requestsPerMinute.st @@ -0,0 +1,4 @@ +rate limits +requestsPerMinute + + ^ self account requestsPerMinuteForModel: self nameForRateLimits ifUnknown: [nil] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/tokensPerMinute.st b/packages/SemanticText.package/AnthropicModel.class/instance/tokensPerMinute.st new file mode 100644 index 0000000..4a135c2 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/tokensPerMinute.st @@ -0,0 +1,4 @@ +rate limits +tokensPerMinute + + ^ self account tokensPerMinuteForModel: self nameForRateLimits ifUnknown: [nil] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/url.st b/packages/SemanticText.package/AnthropicModel.class/instance/url.st new file mode 100644 index 0000000..dd5fd56 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/url.st @@ -0,0 +1,4 @@ +private +url + + ^ self baseUrl , self pathToEndpoint \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicModel.class/methodProperties.json new file mode 100644 index 0000000..a260d01 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/methodProperties.json @@ -0,0 +1,33 @@ +{ + "class" : { + "updateModels" : "ct 11/28/2023 12:44" }, + "instance" : { + "account" : "ct 8/17/2023 17:29", + "account:" : "ct 8/17/2023 17:29", + "asStringOrText" : "ct 2/4/2024 20:15", + "assureAvailableOr:" : "zakkor 1/10/2025 20:13", + "baseConfig" : "ct 8/17/2023 17:47", + "baseConfig:" : "ct 8/17/2023 17:48", + "baseUrl" : "ct 12/1/2023 23:28", + "countTokensIn:" : "ct 5/25/2024 22:20", + "defaultAccount" : "zakkor 1/10/2025 15:50", + "defaultConfig" : "ct 8/17/2023 17:48", + "defaultName" : "ct 8/17/2023 18:16", + "expenseForUsage:" : "ct 12/11/2023 15:59", + "handleTransientServerErrorsDuring:" : "ct 2/9/2024 20:21", + "initialize" : "ct 8/17/2023 19:27", + "invokeWithConfig:documents:editInput:" : "ct 6/12/2024 00:38", + "invokeWithConfig:documents:editInput:handleResponse:" : "zakkor 1/11/2025 00:29", + "invokeWithConfig:editInput:" : "ct 8/20/2023 19:43", + "invokeWithConfig:editInput:handleResponse:" : "ct 6/18/2024 21:44", + "isLegacy" : "ct 2/7/2024 17:47", + "maxCharactersInTokens:" : "ct 2/3/2024 21:17", + "maxTokensInStringOfSize:" : "ct 2/7/2024 17:34", + "name" : "ct 8/17/2023 17:25", + "name:" : "ct 8/17/2023 17:25", + "nameForRateLimits" : "ct 2/5/2024 20:07", + "pathToEndpoint" : "ct 8/17/2023 18:08", + "printOn:" : "ct 8/17/2023 20:05", + "requestsPerMinute" : "ct 2/5/2024 20:07", + "tokensPerMinute" : "ct 2/5/2024 20:08", + "url" : "ct 12/1/2023 23:28" } } diff --git a/packages/SemanticText.package/AnthropicModel.class/properties.json b/packages/SemanticText.package/AnthropicModel.class/properties.json new file mode 100644 index 0000000..4fe7b33 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/properties.json @@ -0,0 +1,16 @@ +{ + "category" : "SemanticText-Providers-Anthropic", + "classinstvars" : [ + ], + "classvars" : [ + ], + "commentStamp" : "", + "instvars" : [ + "name", + "account", + "baseConfig" ], + "name" : "AnthropicModel", + "pools" : [ + ], + "super" : "Object", + "type" : "normal" } diff --git a/packages/SemanticText.package/SemanticConversationEditor.class/instance/addModelItemsToWindowMenu..st b/packages/SemanticText.package/SemanticConversationEditor.class/instance/addModelItemsToWindowMenu..st index 582de40..7e213ed 100644 --- a/packages/SemanticText.package/SemanticConversationEditor.class/instance/addModelItemsToWindowMenu..st +++ b/packages/SemanticText.package/SemanticConversationEditor.class/instance/addModelItemsToWindowMenu..st @@ -28,6 +28,11 @@ addModelItemsToWindowMenu: menu add: 'open OpenAI expense watcher' target: accountClass action: #openExpenseWatcher]. + (Smalltalk classNamed: #AnthropicAccount) ifNotNil: [:accountClass | + menu + add: 'open Anthropic expense watcher' + target: accountClass + action: #openExpenseWatcher]. menu add: 'edit preferences' action: #openPreferences. diff --git a/packages/SemanticText.package/SemanticConversationEditor.class/methodProperties.json b/packages/SemanticText.package/SemanticConversationEditor.class/methodProperties.json index e7798d6..a6d56c8 100644 --- a/packages/SemanticText.package/SemanticConversationEditor.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticConversationEditor.class/methodProperties.json @@ -23,7 +23,7 @@ "addDo:" : "ct 2/15/2024 00:57", "addDo:asPlaceholder:" : "ct 2/15/2024 00:58", "addMessage:" : "ct 4/30/2024 23:55", - "addModelItemsToWindowMenu:" : "ct 11/23/2024 22:05", + "addModelItemsToWindowMenu:" : "zakkor 1/10/2025 23:52", "addRoleMessage:" : "ct 10/15/2023 18:05", "addSystemMessage" : "ct 8/12/2023 21:01", "addToolCall" : "ct 4/30/2024 23:50", diff --git a/packages/SemanticText.package/SemanticMessage.class/instance/asOpenAIObject.st b/packages/SemanticText.package/SemanticMessage.class/instance/asOpenAIObject.st index dcdfdc6..2ef82f2 100644 --- a/packages/SemanticText.package/SemanticMessage.class/instance/asOpenAIObject.st +++ b/packages/SemanticText.package/SemanticMessage.class/instance/asOpenAIObject.st @@ -4,7 +4,7 @@ asOpenAIObject | jsonObject | jsonObject := JsonObject new role: self role; - content: self basicContent; + content: self content; yourself. self basicToolCalls ifNotNil: [:calls | jsonObject tool_calls: diff --git a/packages/SemanticText.package/SemanticMessage.class/instance/basicContent.st b/packages/SemanticText.package/SemanticMessage.class/instance/basicContent.st index e55ce28..b5358f9 100644 --- a/packages/SemanticText.package/SemanticMessage.class/instance/basicContent.st +++ b/packages/SemanticText.package/SemanticMessage.class/instance/basicContent.st @@ -1,4 +1,31 @@ accessing basicContent - + "Both OpenAI and Anthropic messages' content can either take the form of a plain string, or in more special cases, an array of objects, for example:" + " + ""content"": [ + {""type"": ""text"", ""text"": ""What's in this image?""}, + { + ""type"": ""image_url"", + ""image_url"": { + ""url"": ""https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"", + }, + }, + ] + " + "If in array of objects format like above, we'll collect the text contents of the objects" + self flag: #todo. "Maybe show something nicer for images and cache_control points" + content isArray ifTrue: [ + | textParts textContent | + textParts := content select: [:message | message type = 'text'] + thenCollect: [:message | message text]. + textContent := ''. + textParts do: [:part | + textContent ifEmpty: [ + textContent := textContent , part + ] ifNotEmpty: [ + textContent := textContent , Character cr , part + ] + ]. + ^ textContent. + ]. ^ content \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticMessage.class/methodProperties.json b/packages/SemanticText.package/SemanticMessage.class/methodProperties.json index a447a9a..1b21148 100644 --- a/packages/SemanticText.package/SemanticMessage.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticMessage.class/methodProperties.json @@ -3,8 +3,8 @@ "conversation:role:content:" : "ct 2/10/2024 20:35", "role:content:" : "ct 6/22/2023 17:28" }, "instance" : { - "asOpenAIObject" : "ct 1/16/2024 20:41", - "basicContent" : "ct 8/13/2023 18:48", + "asOpenAIObject" : "zakkor 1/11/2025 21:10", + "basicContent" : "zakkor 1/11/2025 21:05", "basicToolCalls" : "ct 1/16/2024 20:40", "content" : "ct 6/22/2023 17:28", "content:" : "ct 6/22/2023 17:28", diff --git a/packages/SemanticText.package/monticello.meta/categories.st b/packages/SemanticText.package/monticello.meta/categories.st index b077e98..bf424ed 100644 --- a/packages/SemanticText.package/monticello.meta/categories.st +++ b/packages/SemanticText.package/monticello.meta/categories.st @@ -1,10 +1,11 @@ +SystemOrganization addCategory: #'SemanticText-Help'! SystemOrganization addCategory: #'SemanticText-Model'! SystemOrganization addCategory: #'SemanticText-Model-Agents'! SystemOrganization addCategory: #'SemanticText-Model-Conversation'! SystemOrganization addCategory: #'SemanticText-Model-Conversation-Tests'! SystemOrganization addCategory: #'SemanticText-Model-Search'! SystemOrganization addCategory: #'SemanticText-Model-Speech'! -SystemOrganization addCategory: #'SemanticText-Help'! +SystemOrganization addCategory: #'SemanticText-Providers-Anthropic'! SystemOrganization addCategory: #'SemanticText-Providers-Mocks'! SystemOrganization addCategory: #'SemanticText-Providers-OpenAI'! SystemOrganization addCategory: #'SemanticText-Tools-Conversation'! From 9d77ea4118479d69b14c3be717eb2f6ef87eff08 Mon Sep 17 00:00:00 2001 From: zakkor Date: Mon, 13 Jan 2025 23:11:00 +0200 Subject: [PATCH 2/7] Add `asAnthropicObject` extensions in Semantic* classes, and use this in the Anthropic classes instead of asOpenAIObject; Remove rate limit data from Anthropic models and add a note that they're unimplemented; Make `getAnswers: number` with a number > 1 return an array of completions instead of throwing an error (I am not sure if number=1 should return an array of size 1 or just the completion itself). --- .../class/defaultRateLimitsPerTier.st | 97 +------------------ .../instance/defaultRateLimits.st | 6 +- .../methodProperties.json | 4 +- .../instance/addToolSpec.toInput..st | 4 +- .../instance/getAnswers.for.config..st | 8 +- .../instance/maximumPriceFor..st | 2 +- .../instance/maximumPriceFor.answers..st | 2 +- .../instance/nameForRateLimits.st | 5 +- .../parseMessagesFrom.for.logRawOutput..st | 2 +- .../priceFor.answers.completionSize..st | 2 +- .../methodProperties.json | 14 +-- ...fig.documents.editInput.handleResponse..st | 2 +- .../methodProperties.json | 2 +- .../instance/anthropicRawConfig.st | 5 + .../methodProperties.json | 1 + .../instance/asAnthropicObject.st | 19 ++++ .../instance/asAnthropicToolChoiceObject.st | 9 ++ .../methodProperties.json | 2 + .../instance/asAnthropicObject.st | 12 +++ .../methodProperties.json | 1 + .../instance/asAnthropicObject.st | 12 +++ .../methodProperties.json | 1 + .../instance/asAnthropicObject.st | 12 +++ .../methodProperties.json | 1 + .../instance/asAnthropicObject.st | 8 ++ .../methodProperties.json | 1 + 26 files changed, 114 insertions(+), 120 deletions(-) create mode 100644 packages/SemanticText.package/SemanticConfig.class/instance/anthropicRawConfig.st create mode 100644 packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicObject.st create mode 100644 packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicToolChoiceObject.st create mode 100644 packages/SemanticText.package/SemanticFunctionParameter.class/instance/asAnthropicObject.st create mode 100644 packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st create mode 100644 packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st create mode 100644 packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st index e5421d0..e1fbf0b 100644 --- a/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st @@ -1,98 +1,7 @@ constants defaultRateLimitsPerTier - "See: https://platform.openai.com/docs/guides/rate-limits/usage-tiers" + "Not implemented yet" - | flat | self flag: #modelConstants. - flat := Dictionary new - at: #free put: - (Dictionary new - at: 'gpt-3.5-turbo' put: #(3 200 40000 nil 200000); - at: 'text-embedding-3-large' put: #(3000 200 1000000 nil 3000000); - at: 'text-embedding-3-small' put: #(3000 200 1000000 nil 3000000); - at: 'text-embedding-ada-002' put: #(3000 200 1000000 nil 3000000); - at: 'whisper-1' put: #(3 200 nil nil nil); - at: 'tts-1' put: #(3 200 nil nil nil); - yourself); - at: 1 put: - (Dictionary new - at: 'gpt-4o' put: #(500 nil 30000 nil 90000); - at: 'gpt-4o-mini' put: #(500 10000 200000 nil 2000000); - at: 'gpt-4-turbo' put: #(500 nil 30000 nil 90000); - at: 'gpt-4' put: #(500 10000 10000 nil 100000); - at: 'gpt-3.5-turbo' put: #(3500 10000 200000 nil 2000000); - at: 'text-embedding-3-large' put: #(3000 nil 1000000 nil 3000000); - at: 'text-embedding-3-small' put: #(3000 nil 1000000 nil 3000000); - at: 'text-embedding-ada-002' put: #(3000 nil 1000000 nil 3000000); - at: 'whisper-1' put: #(500 nil nil nil nil); - at: 'tts-1' put: #(500 nil nil nil nil); - at: 'tts-1-hd' put: #(500 nil nil nil nil); - yourself); - at: 2 put: - (Dictionary new - at: 'gpt-4o' put: #(5000 nil 450000 nil 1350000); - at: 'gpt-4o-mini' put: #(5000 nil 2000000 nil 20000000); - at: 'gpt-4-turbo' put: #(5000 nil 450000 nil 1350000); - at: 'gpt-4' put: #(5000 nil 40000 nil 200000); - at: 'gpt-3.5-turbo' put: #(3500 nil 2000000 nil 5000000); - at: 'text-embedding-3-large' put: #(5000 nil 1000000 nil 20000000); - at: 'text-embedding-3-small' put: #(5000 nil 1000000 nil 20000000); - at: 'text-embedding-ada-002' put: #(5000 nil 1000000 nil 20000000); - at: 'whisper-1' put: #(2500 nil nil nil nil); - at: 'tts-1' put: #(2500 nil nil nil nil); - at: 'tts-1-hd' put: #(2500 nil nil nil nil); - yourself); - at: 3 put: - (Dictionary new - at: 'gpt-4o' put: #(5000 nil 800000 nil 50000000); - at: 'gpt-4o-mini' put: #(5000 nil 4000000 nil 40000000); - at: 'gpt-4-turbo' put: #(5000 nil 600000 nil 40000000); - at: 'gpt-4' put: #(5000 nil 80000 nil 5000000); - at: 'gpt-3.5-turbo' put: #(3500 nil 4000000 nil 100000000); - at: 'text-embedding-3-large' put: #(5000 nil 5000000 nil 100000000); - at: 'text-embedding-3-small' put: #(5000 nil 5000000 nil 100000000); - at: 'text-embedding-ada-002' put: #(5000 nil 5000000 nil 100000000); - at: 'whisper-1' put: #(5000 nil nil nil nil); - at: 'tts-1' put: #(5000 nil nil nil nil); - at: 'tts-1-hd' put: #(5000 nil nil nil nil); - yourself); - at: 4 put: - (Dictionary new - at: 'gpt-4o' put: #(10000 nil 2000000 nil 200000000); - at: 'gpt-4o-mini' put: #(10000 nil 10000000 nil 1000000000); - at: 'gpt-4-turbo' put: #(10000 nil 800000 nil 80000000); - at: 'gpt-4' put: #(10000 nil 300000 nil 30000000); - at: 'gpt-3.5-turbo' put: #(10000 nil 10000000 nil 1000000000); - at: 'text-embedding-3-large' put: #(10000 nil 5000000 nil 500000000); - at: 'text-embedding-3-small' put: #(10000 nil 5000000 nil 500000000); - at: 'text-embedding-ada-002' put: #(10000 nil 5000000 nil 500000000); - at: 'whisper-1' put: #(7500 nil nil nil nil); - at: 'tts-1' put: #(7500 nil nil nil nil); - at: 'tts-1-hd' put: #(7500 nil nil nil nil); - yourself); - at: 5 put: - (Dictionary new - at: 'gpt-4o' put: #(10000 nil 30000000 nil 5000000000); - at: 'gpt-4o-mini' put: #(30000 nil 150000000 nil 15000000000); - at: 'gpt-4-turbo' put: #(10000 nil 2000000 nil 300000000); - at: 'gpt-4' put: #(10000 nil 1000000 nil 150000000); - at: 'gpt-3.5-turbo' put: #(10000 nil 50000000 nil 10000000000); - at: 'text-embedding-3-large' put: #(10000 nil 10000000 nil 4000000000); - at: 'text-embedding-3-small' put: #(10000 nil 10000000 nil 4000000000); - at: 'text-embedding-ada-002' put: #(10000 nil 10000000 nil 4000000000); - at: 'whisper-1' put: #(10000 nil nil nil nil); - at: 'tts-1' put: #(10000 nil nil nil nil); - at: 'tts-1-hd' put: #(10000 nil nil nil nil); - yourself); - yourself. - ^ flat collect: [:modelRateLimits | - modelRateLimits collect: [:rateLimits | - (self rateLimitsFromSpec: rateLimits) - collect: [:limit | limit ifNil: [Float infinity]]]] - -"update and extend these constants based on the pasted tables below. do not change the formatting of the source code unless required (also keep the tab indentations) -the task is: output a new version of defaultRateLimitsPerTier based on the numbers provided in the tables. I'm sure you can find out the format on the numbers in the method yourself based on the tables. e.g., 3 rpm for gpt-3.5-turbo in tier free and so on. most numbers may not have changed, but some may have been added, changed, or removed in the screenshots, and I would like you to create a new version of the method that contains all numbers from the tables in the same format, and no other numbers that are not present in the tables. -exclude the dall-e models. - -the tables: -" \ No newline at end of file + self flag: #todo. + ^ nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st index 65d08f6..7ffa45f 100644 --- a/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st @@ -1,5 +1,5 @@ accessing - rate limits defaultRateLimits - - ^ self class defaultRateLimitsPerTier at: - (self usageTier ifNil: [^ Dictionary new]) \ No newline at end of file + "Not implemented" + self flag: #todo. + ^ Dictionary new \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json b/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json index 3ac2ef9..23c1d43 100644 --- a/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json +++ b/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json @@ -7,7 +7,7 @@ "defaultApiKey:" : "ct 10/15/2023 22:20", "defaultBaseUrl" : "zakkor 1/10/2025 12:49", "defaultBaseUrl:" : "ct 12/1/2023 23:30", - "defaultRateLimitsPerTier" : "ct 11/8/2024 00:23", + "defaultRateLimitsPerTier" : "zakkor 1/13/2025 22:32", "isValidUsageTier:" : "ct 11/28/2023 12:39", "openExpenseWatcher" : "zakkor 1/10/2025 12:50", "rateLimitsFromSpec:" : "ct 6/20/2024 00:00" }, @@ -19,7 +19,7 @@ "controlConnectionDuring:" : "ct 12/11/2023 18:35", "customRateLimits" : "ct 11/28/2023 13:27", "defaultBaseUrl" : "zakkor 1/10/2025 12:54", - "defaultRateLimits" : "ct 11/28/2023 13:16", + "defaultRateLimits" : "zakkor 1/13/2025 22:33", "defaultUsageTier" : "ct 11/28/2023 14:54", "expensesPerModel" : "ct 8/20/2023 20:15", "expensesPerUser" : "ct 8/27/2023 20:57", diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st index 00d3281..cc455cb 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st @@ -2,7 +2,7 @@ private - requests addToolSpec: aToolSpec toInput: input input tools: - (aToolSpec tools asArray collect: [:tool | tool asOpenAIObject]). + (aToolSpec tools asArray collect: [:tool | tool asAnthropicObject]). aToolSpec forcedTools ifNotNil: [:forcedTools | input tool_choice: @@ -17,4 +17,4 @@ addToolSpec: aToolSpec toInput: input forcedTool := forcedTools anyOne. (forcedTool isString or: [forcedTool isText]) ifTrue: [forcedTool := aToolSpec toolNamed: forcedTool]. - forcedTool asOpenAIToolChoiceObject]])] \ No newline at end of file + forcedTool asAnthropicToolChoiceObject]])] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st index 1267e33..8d0bcf6 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st @@ -4,8 +4,10 @@ getAnswers: number for: aConversation config: aConfigOrNil | chatCompletion logRawOutput stream resumeStream | - "Note: multiple responses via `n` is not supported by the Anthropic API" - number ~= 1 ifTrue: [self error: 'anthropic models do not support requesting multiple assistant replies']. + "Multiple responses via `n` are not directly supported by the Anthropic API" + number > 1 ifTrue: [ + ^ (1 to: number) collect: [:i | self getAnswers: 1 for: aConversation config: aConfigOrNil] + ]. chatCompletion := self invokeWithConfig: aConfigOrNil @@ -33,7 +35,7 @@ getAnswers: number for: aConversation config: aConfigOrNil input stream: stream]. input messages: - (aConversation messages collect: [:message | message asOpenAIObject]). + (aConversation messages collect: [:message | message asAnthropicObject]). aConversation activeToolSpec ifNotNil: [:toolSpec | self addToolSpec: toolSpec toInput: input]. diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st index 2de9846..0d58857 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st @@ -1,6 +1,6 @@ pricing maximumPriceFor: aConversation - "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + "Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic. Token counts are (upper) approximations." ^ self maximumPriceFor: aConversation diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st index 09cfed7..14ee997 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st @@ -1,6 +1,6 @@ pricing maximumPriceFor: aConversation answers: number - "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + "Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic. Token counts are (upper) approximations." ^ self priceFor: aConversation diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st index 3028978..49ad7c2 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st @@ -1,5 +1,4 @@ private nameForRateLimits - - ({self class gpt4_0125Name. self class gpt4_1106Name} includes: self resolvedName) ifTrue: [^ self class gpt4TurboName]. - ^ self resolvedName \ No newline at end of file + + ^ self name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st index ea15d19..8cd95c3 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st @@ -11,7 +11,7 @@ parseMessagesFrom: chatCompletion for: aConversation logRawOutput: logRawOutput logRawOutput ifTrue: [messages do: [:message | - message rawOutput chatCompletion: chatCompletion]]. + message rawOutput: chatCompletion]]. chatCompletion usage ifNotNil: [:usage | | expense | diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st index 75b7e3a..5abfef8 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st @@ -1,7 +1,7 @@ pricing priceFor: aConversation answers: number completionSize: completionSizeOrNil "If completionSizeOrNil is nil, the maximum possible size will be assumed. - Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic. Token counts are (upper) approximations." | promptTokens completionTokens priceForPrompt priceForCompletion | promptTokens := aConversation estimatePromptTokens. diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json index 077df34..873506d 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json +++ b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json @@ -15,7 +15,7 @@ "initialize" : "ct 2/4/2024 19:49", "unload" : "ct 8/17/2023 19:28" }, "instance" : { - "addToolSpec:toInput:" : "ct 5/25/2024 22:01", + "addToolSpec:toInput:" : "zakkor 1/13/2025 18:51", "assignExpense:toMessages:" : "ct 5/25/2024 23:42", "centsPerCompletionToken" : "zakkor 1/10/2025 19:19", "centsPerPromptToken" : "zakkor 1/10/2025 19:18", @@ -30,20 +30,20 @@ "getAnswerFor:" : "ct 1/9/2024 00:14", "getAnswerFor:config:" : "ct 1/8/2024 23:52", "getAnswers:for:" : "ct 1/8/2024 22:28", - "getAnswers:for:config:" : "zakkor 1/11/2025 21:13", + "getAnswers:for:config:" : "zakkor 1/13/2025 22:53", "handleAsyncReplies:for:config:logRawOutput:deferStreaming:" : "ct 5/2/2024 15:52", "handleStreamingReplies:for:from:config:logRawOutput:" : "zakkor 1/11/2025 12:53", "isLegacy" : "zakkor 1/10/2025 19:02", "maxCompletionTokens" : "zakkor 1/10/2025 17:18", "maxPromptTokens" : "ct 2/5/2024 20:48", "maxTokens" : "zakkor 1/10/2025 17:19", - "maximumPriceFor:" : "zakkor 1/10/2025 20:12", - "maximumPriceFor:answers:" : "zakkor 1/10/2025 20:12", + "maximumPriceFor:" : "zakkor 1/13/2025 22:36", + "maximumPriceFor:answers:" : "zakkor 1/13/2025 22:36", "minimumPriceFor:" : "zakkor 1/10/2025 20:12", "minimumPriceFor:answers:" : "zakkor 1/10/2025 20:12", - "nameForRateLimits" : "ct 2/5/2024 20:07", + "nameForRateLimits" : "zakkor 1/13/2025 22:29", "parseMessageFrom:for:logRawOutput:" : "zakkor 1/10/2025 18:32", - "parseMessagesFrom:for:logRawOutput:" : "zakkor 1/10/2025 23:48", + "parseMessagesFrom:for:logRawOutput:" : "zakkor 1/13/2025 22:59", "parseStreamedChunk:toolSpec:addTo:" : "zakkor 1/11/2025 12:48", "parseStreamedToolCallChunkFrom:toolSpec:addTo:" : "ct 1/16/2024 20:52", "parseStreamedToolCallChunksFrom:toolSpec:message:" : "ct 2/13/2024 00:17", @@ -53,7 +53,7 @@ "pathToEndpoint" : "zakkor 1/10/2025 16:36", "priceFor:" : "zakkor 1/10/2025 20:12", "priceFor:answers:" : "zakkor 1/10/2025 20:12", - "priceFor:answers:completionSize:" : "ct 11/17/2024 19:31", + "priceFor:answers:completionSize:" : "zakkor 1/13/2025 22:37", "priceFor:completionMessage:" : "ct 4/30/2024 01:35", "priceFor:completionSize:" : "ct 2/3/2024 21:24", "priceForPrompt:" : "ct 2/3/2024 21:23", diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st index eddd8c2..01be36c 100644 --- a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st +++ b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st @@ -14,7 +14,7 @@ invokeWithConfig: aConfigOrNil documents: documentsOrNil editInput: inputBlock h inputBlock ifNotNil: [inputBlock cull: input cull: config]. - config openAIRawConfig ifNotNil: [:rawConfig | + config anthropicRawConfig ifNotNil: [:rawConfig | rawConfig keysAndValuesDo: [:key :value | input at: key put: value]]. diff --git a/packages/SemanticText.package/AnthropicModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicModel.class/methodProperties.json index a260d01..059d3b3 100644 --- a/packages/SemanticText.package/AnthropicModel.class/methodProperties.json +++ b/packages/SemanticText.package/AnthropicModel.class/methodProperties.json @@ -17,7 +17,7 @@ "handleTransientServerErrorsDuring:" : "ct 2/9/2024 20:21", "initialize" : "ct 8/17/2023 19:27", "invokeWithConfig:documents:editInput:" : "ct 6/12/2024 00:38", - "invokeWithConfig:documents:editInput:handleResponse:" : "zakkor 1/11/2025 00:29", + "invokeWithConfig:documents:editInput:handleResponse:" : "zakkor 1/13/2025 18:35", "invokeWithConfig:editInput:" : "ct 8/20/2023 19:43", "invokeWithConfig:editInput:handleResponse:" : "ct 6/18/2024 21:44", "isLegacy" : "ct 2/7/2024 17:47", diff --git a/packages/SemanticText.package/SemanticConfig.class/instance/anthropicRawConfig.st b/packages/SemanticText.package/SemanticConfig.class/instance/anthropicRawConfig.st new file mode 100644 index 0000000..961b38a --- /dev/null +++ b/packages/SemanticText.package/SemanticConfig.class/instance/anthropicRawConfig.st @@ -0,0 +1,5 @@ +*SemanticText-Providers-Anthropic-accessing +anthropicRawConfig + "Parameters that are directly added to the Anthropic model request. This will override any conflicting other properties of the config." + + ^ self argumentAt: #anthropicRawConfig ifAbsentPut: [JsonObject new] \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConfig.class/methodProperties.json b/packages/SemanticText.package/SemanticConfig.class/methodProperties.json index 669a0a2..3acc640 100644 --- a/packages/SemanticText.package/SemanticConfig.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticConfig.class/methodProperties.json @@ -2,6 +2,7 @@ "class" : { }, "instance" : { + "anthropicRawConfig" : "zakkor 1/13/2025 18:33", "argumentAt:" : "ct 8/17/2023 19:43", "argumentAt:ifAbsentPut:" : "ct 1/8/2024 16:36", "argumentAt:put:" : "ct 8/17/2023 17:44", diff --git a/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..e8a7eef --- /dev/null +++ b/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicObject.st @@ -0,0 +1,19 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + + ^ OrderedJsonObject new + name: self name; + in: [:json | + self description ifNotNil: + [json description: self description]]; + input_schema: + (JsonObject new + type: #object; + properties: + (self parameters collect: [:parameter | parameter asAnthropicObject]); + required: + (self parameters asArray + select: [:parameter | parameter required] + thenCollect: [:parameter | parameter name]); + yourself); + yourself \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicToolChoiceObject.st b/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicToolChoiceObject.st new file mode 100644 index 0000000..638dd7a --- /dev/null +++ b/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicToolChoiceObject.st @@ -0,0 +1,9 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicToolChoiceObject + + ^ JsonObject new + type: self type; + function: + (JsonObject new + name: self name; + yourself) \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticFunction.class/methodProperties.json b/packages/SemanticText.package/SemanticFunction.class/methodProperties.json index 74b8282..419cd1f 100644 --- a/packages/SemanticText.package/SemanticFunction.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticFunction.class/methodProperties.json @@ -18,6 +18,8 @@ "addParameter:type:" : "ct 1/16/2024 00:30", "addParameter:type:required:" : "ct 1/16/2024 00:30", "argsAction:" : "ct 11/7/2024 23:16", + "asAnthropicObject" : "zakkor 1/13/2025 18:55", + "asAnthropicToolChoiceObject" : "zakkor 1/13/2025 18:42", "asOpenAIObject" : "ct 2/13/2024 02:02", "asOpenAIToolChoiceObject" : "ct 1/16/2024 00:19", "asToolCall" : "ct 4/30/2024 21:52", diff --git a/packages/SemanticText.package/SemanticFunctionParameter.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticFunctionParameter.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..0e724a2 --- /dev/null +++ b/packages/SemanticText.package/SemanticFunctionParameter.class/instance/asAnthropicObject.st @@ -0,0 +1,12 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + "Note: this is exactly the same as SemanticFunctionParameter>>asOpenAIObject" + ^ (true + caseOf: + {[self type isNil] -> [JsonObject new]. + [self type isString] -> [JsonObject new type: self type; yourself]} + otherwise: [self type]) + in: [:json | + self description ifNotNil: + [json description: self description]]; + yourself \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticFunctionParameter.class/methodProperties.json b/packages/SemanticText.package/SemanticFunctionParameter.class/methodProperties.json index c0cb284..14a01e4 100644 --- a/packages/SemanticText.package/SemanticFunctionParameter.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticFunctionParameter.class/methodProperties.json @@ -3,6 +3,7 @@ "name:description:type:required:" : "ct 10/13/2023 23:30", "name:type:required:" : "ct 1/15/2024 23:31" }, "instance" : { + "asAnthropicObject" : "zakkor 1/13/2025 18:55", "asOpenAIObject" : "ct 1/17/2024 02:32", "asSignatureString" : "ct 1/15/2024 23:10", "defaultRequired" : "ct 10/13/2023 23:27", diff --git a/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..160ce68 --- /dev/null +++ b/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st @@ -0,0 +1,12 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + + | jsonObject | + jsonObject := JsonObject new + role: self role; + content: self content; + yourself. + self basicToolCalls ifNotNil: [:calls | + jsonObject tool_calls: + (calls collect: [:toolCall | toolCall asAnthropicObject])]. + ^ jsonObject \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticMessage.class/methodProperties.json b/packages/SemanticText.package/SemanticMessage.class/methodProperties.json index 1b21148..efd0034 100644 --- a/packages/SemanticText.package/SemanticMessage.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticMessage.class/methodProperties.json @@ -3,6 +3,7 @@ "conversation:role:content:" : "ct 2/10/2024 20:35", "role:content:" : "ct 6/22/2023 17:28" }, "instance" : { + "asAnthropicObject" : "zakkor 1/13/2025 18:56", "asOpenAIObject" : "zakkor 1/11/2025 21:10", "basicContent" : "zakkor 1/11/2025 21:05", "basicToolCalls" : "ct 1/16/2024 20:40", diff --git a/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..2ee301d --- /dev/null +++ b/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st @@ -0,0 +1,12 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + + self assert: (self type ifNil: [#function]) = #function. + ^ JsonObject new + id: self key; + type: (self type ifNil: [#function]); + function: + (JsonObject new + name: self toolName; + arguments: self argumentsJsonString); + yourself \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json b/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json index f124173..29c9c50 100644 --- a/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json @@ -7,6 +7,7 @@ "arguments" : "ct 1/17/2024 01:20", "arguments:" : "ct 1/17/2024 01:19", "argumentsJsonString" : "ct 1/16/2024 18:18", + "asAnthropicObject" : "zakkor 1/13/2025 18:45", "asOpenAIObject" : "ct 1/16/2024 16:17", "asToolMessage" : "ct 1/16/2024 13:05", "asToolMessageWithResult" : "ct 1/16/2024 20:36", diff --git a/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..3cebca0 --- /dev/null +++ b/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st @@ -0,0 +1,8 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + + | jsonObject | + jsonObject := super asAnthropicObject. + jsonObject tool_call_id: self key. + jsonObject content: self basicContent. + ^ jsonObject \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json b/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json index 52111c9..3c40223 100644 --- a/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json @@ -3,6 +3,7 @@ "toolCall:" : "ct 1/16/2024 13:05", "toolCall:content:" : "ct 1/16/2024 01:39" }, "instance" : { + "asAnthropicObject" : "zakkor 1/13/2025 18:56", "asOpenAIObject" : "ct 1/16/2024 20:05", "basicContent" : "ct 1/16/2024 20:06", "content" : "ct 1/16/2024 20:09", From bcd9c2ce67b710a313f048e2d4bdf36d85f52f72 Mon Sep 17 00:00:00 2001 From: zakkor Date: Mon, 13 Jan 2025 23:13:12 +0200 Subject: [PATCH 3/7] Update packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st Co-authored-by: Christoph Thiede <38782922+LinqLover@users.noreply.github.com> --- .../instance/maxCompletionTokens.st | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st index d895c4f..02975f7 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st @@ -6,6 +6,6 @@ maxCompletionTokens "https://docs.anthropic.com/en/docs/about-claude/models" "Claude 3.5 family all have 8192 max tokens output, Claude 3 family have 4096." - (self resolvedName beginsWith: 'claude-3-5-') - ifTrue: [^ 8192] - ifFalse: [^ 4096]. \ No newline at end of file + ^ (self resolvedName beginsWith: 'claude-3-5-') + ifTrue: [8192] + ifFalse: [4096] \ No newline at end of file From 661619cb8b6950d72528925965ada46e2ec63b04 Mon Sep 17 00:00:00 2001 From: zakkor Date: Wed, 15 Jan 2025 15:55:09 +0200 Subject: [PATCH 4/7] Make tool calls (non-streamed) work --- .../parseMessageFrom.for.logRawOutput..st | 7 ++++--- .../instance/parseToolCallFrom.toolSpec..st | 16 +++++++++------- .../methodProperties.json | 4 ++-- .../instance/asAnthropicObject.st | 10 ++++++++-- .../SemanticMessage.class/methodProperties.json | 2 +- .../instance/asAnthropicObject.st | 9 +++------ .../SemanticToolCall.class/methodProperties.json | 2 +- .../instance/asAnthropicObject.st | 10 ++++++++-- .../methodProperties.json | 2 +- 9 files changed, 37 insertions(+), 25 deletions(-) diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st index 52d50a7..0ff6d65 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st @@ -1,7 +1,7 @@ private - requests parseMessageFrom: chatCompletionChoice for: aConversation logRawOutput: logRawOutput - | messageResult message | + | messageResult message toolUseBlocks | messageResult := chatCompletionChoice. message := SemanticMessage conversation: aConversation @@ -14,9 +14,10 @@ parseMessageFrom: chatCompletionChoice for: aConversation logRawOutput: logRawOu chatCompletionChoice: chatCompletionChoice; yourself)]. - messageResult tool_calls ifNotNil: [:toolCalls | + toolUseBlocks := messageResult content select: [:contentBlock | contentBlock type = 'tool_use']. + toolUseBlocks size > 0 ifTrue: [ message toolCalls: - (self parseToolCallsFrom: toolCalls toolSpec: aConversation activeToolSpec)]. + (self parseToolCallsFrom: toolUseBlocks toolSpec: aConversation activeToolSpec)]. chatCompletionChoice logprobs ifNotNil: [:logprobs | message tokenProbabilities: diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st index fd3dadf..f478fa6 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st @@ -1,15 +1,17 @@ private - requests parseToolCallFrom: toolCall toolSpec: aToolSpecOrNil - | arguments function functionName | - self assert: toolCall type = #function. - functionName := toolCall function name. - arguments := toolCall function arguments. + | name input function | + name := toolCall name. + input := toolCall input. function := aToolSpecOrNil ifNotNil: - [aToolSpecOrNil toolNamed: functionName ifAbsent: []]. + [aToolSpecOrNil toolNamed: name ifAbsent: []]. + "Below was `input parseAsOrderedJson`, but I don't know how to convert from JsonObject to OrderedJsonObject. I just left it as JsonObject for now." + self flag: #todo. ^ SemanticToolCall key: toolCall id - tool: (function ifNil: [functionName]) - arguments: ([arguments parseAsOrderedJson] ifError: [arguments]) \ No newline at end of file + tool: (function ifNil: [name]) + "It seems like OpenAI gives us the input as a plain string, whereas Anthropic gives us a JSON object" + arguments: input \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json index 873506d..3d31e63 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json +++ b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json @@ -42,13 +42,13 @@ "minimumPriceFor:" : "zakkor 1/10/2025 20:12", "minimumPriceFor:answers:" : "zakkor 1/10/2025 20:12", "nameForRateLimits" : "zakkor 1/13/2025 22:29", - "parseMessageFrom:for:logRawOutput:" : "zakkor 1/10/2025 18:32", + "parseMessageFrom:for:logRawOutput:" : "zakkor 1/15/2025 14:21", "parseMessagesFrom:for:logRawOutput:" : "zakkor 1/13/2025 22:59", "parseStreamedChunk:toolSpec:addTo:" : "zakkor 1/11/2025 12:48", "parseStreamedToolCallChunkFrom:toolSpec:addTo:" : "ct 1/16/2024 20:52", "parseStreamedToolCallChunksFrom:toolSpec:message:" : "ct 2/13/2024 00:17", "parseTokenProbabilitiesFrom:" : "ct 1/15/2024 19:37", - "parseToolCallFrom:toolSpec:" : "ct 1/16/2024 18:39", + "parseToolCallFrom:toolSpec:" : "zakkor 1/15/2025 14:33", "parseToolCallsFrom:toolSpec:" : "ct 1/16/2024 00:47", "pathToEndpoint" : "zakkor 1/10/2025 16:36", "priceFor:" : "zakkor 1/10/2025 20:12", diff --git a/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st index 160ce68..8eb7f65 100644 --- a/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st +++ b/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st @@ -6,7 +6,13 @@ asAnthropicObject role: self role; content: self content; yourself. + "If any tool calls are present, we'll have to return an array of content blocks, not a plain string" self basicToolCalls ifNotNil: [:calls | - jsonObject tool_calls: - (calls collect: [:toolCall | toolCall asAnthropicObject])]. + | textContent contentBlocks | + textContent := self content isString ifTrue: [self content] ifFalse: [self content first text]. + contentBlocks := + {JsonObject new type: 'text'; text: textContent; yourself} , + (calls collect: [:toolCall | toolCall asAnthropicObject]). + jsonObject content: contentBlocks + ]. ^ jsonObject \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticMessage.class/methodProperties.json b/packages/SemanticText.package/SemanticMessage.class/methodProperties.json index efd0034..3355c26 100644 --- a/packages/SemanticText.package/SemanticMessage.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticMessage.class/methodProperties.json @@ -3,7 +3,7 @@ "conversation:role:content:" : "ct 2/10/2024 20:35", "role:content:" : "ct 6/22/2023 17:28" }, "instance" : { - "asAnthropicObject" : "zakkor 1/13/2025 18:56", + "asAnthropicObject" : "zakkor 1/15/2025 15:39", "asOpenAIObject" : "zakkor 1/11/2025 21:10", "basicContent" : "zakkor 1/11/2025 21:05", "basicToolCalls" : "ct 1/16/2024 20:40", diff --git a/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st index 2ee301d..b3e148d 100644 --- a/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st +++ b/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st @@ -1,12 +1,9 @@ *SemanticText-Providers-Anthropic-converting asAnthropicObject - self assert: (self type ifNil: [#function]) = #function. ^ JsonObject new id: self key; - type: (self type ifNil: [#function]); - function: - (JsonObject new - name: self toolName; - arguments: self argumentsJsonString); + type: 'tool_use'; + name: self toolName; + input: self arguments; yourself \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json b/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json index 29c9c50..f79661b 100644 --- a/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json @@ -7,7 +7,7 @@ "arguments" : "ct 1/17/2024 01:20", "arguments:" : "ct 1/17/2024 01:19", "argumentsJsonString" : "ct 1/16/2024 18:18", - "asAnthropicObject" : "zakkor 1/13/2025 18:45", + "asAnthropicObject" : "zakkor 1/15/2025 15:43", "asOpenAIObject" : "ct 1/16/2024 16:17", "asToolMessage" : "ct 1/16/2024 13:05", "asToolMessageWithResult" : "ct 1/16/2024 20:36", diff --git a/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st index 3cebca0..0b77e59 100644 --- a/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st +++ b/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st @@ -3,6 +3,12 @@ asAnthropicObject | jsonObject | jsonObject := super asAnthropicObject. - jsonObject tool_call_id: self key. - jsonObject content: self basicContent. + jsonObject role: 'user'. + jsonObject content: { + (JsonObject new + type: 'tool_result'; + tool_use_id: self key; + content: self basicContent; + yourself) + }. ^ jsonObject \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json b/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json index 3c40223..c6af88b 100644 --- a/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json @@ -3,7 +3,7 @@ "toolCall:" : "ct 1/16/2024 13:05", "toolCall:content:" : "ct 1/16/2024 01:39" }, "instance" : { - "asAnthropicObject" : "zakkor 1/13/2025 18:56", + "asAnthropicObject" : "zakkor 1/15/2025 15:24", "asOpenAIObject" : "ct 1/16/2024 20:05", "basicContent" : "ct 1/16/2024 20:06", "content" : "ct 1/16/2024 20:09", From 193631aaaf3518f4dd0daaa8c1d8e7f633ee47e3 Mon Sep 17 00:00:00 2001 From: zakkor Date: Thu, 16 Jan 2025 00:14:45 +0200 Subject: [PATCH 5/7] Add `topKSampling` (Anthropic only). Cleanup, rename things to match more closely what the Anthropic API sends us, handle more streaming events in preparation for streaming tool calls. Fix rawOutput tracking for Anthropic models. Fix an issue with getting multiple assistant replies. --- .../class/defaultRateLimitsPerTier.st | 2 +- .../methodProperties.json | 2 +- .../instance/getAnswers.for.config..st | 9 ++-- ...ngReplies.for.from.config.logRawOutput..st | 45 ++++++++++--------- .../parseMessageFrom.for.logRawOutput..st | 21 +++++---- .../parseMessagesFrom.for.logRawOutput..st | 19 +++----- .../parseStreamedChunk.toolSpec.addTo..st | 35 +++++++-------- .../instance/streamEventDataFrom..st | 10 ++--- .../methodProperties.json | 12 ++--- .../instance/nucleusSamplingMass..st | 2 +- .../instance/nucleusSamplingMass.st | 2 +- .../instance/topKSampling..st | 5 +++ .../instance/topKSampling.st | 5 +++ .../methodProperties.json | 8 ++-- 14 files changed, 92 insertions(+), 85 deletions(-) create mode 100644 packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling..st create mode 100644 packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling.st diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st index e1fbf0b..fd65bd9 100644 --- a/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st @@ -1,6 +1,6 @@ constants defaultRateLimitsPerTier - "Not implemented yet" + "Not implemented" self flag: #modelConstants. self flag: #todo. diff --git a/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json b/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json index 23c1d43..a6f4b83 100644 --- a/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json +++ b/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json @@ -7,7 +7,7 @@ "defaultApiKey:" : "ct 10/15/2023 22:20", "defaultBaseUrl" : "zakkor 1/10/2025 12:49", "defaultBaseUrl:" : "ct 12/1/2023 23:30", - "defaultRateLimitsPerTier" : "zakkor 1/13/2025 22:32", + "defaultRateLimitsPerTier" : "zakkor 1/15/2025 21:54", "isValidUsageTier:" : "ct 11/28/2023 12:39", "openExpenseWatcher" : "zakkor 1/10/2025 12:50", "rateLimitsFromSpec:" : "ct 6/20/2024 00:00" }, diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st index 8d0bcf6..5a0b10d 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st @@ -6,8 +6,7 @@ getAnswers: number for: aConversation config: aConfigOrNil "Multiple responses via `n` are not directly supported by the Anthropic API" number > 1 ifTrue: [ - ^ (1 to: number) collect: [:i | self getAnswers: 1 for: aConversation config: aConfigOrNil] - ]. + ^ ((1 to: number) collect: [:i | self getAnswers: 1 for: aConversation config: aConfigOrNil]) flattened]. chatCompletion := self invokeWithConfig: aConfigOrNil @@ -20,11 +19,13 @@ getAnswers: number for: aConversation config: aConfigOrNil config temperature ifNotNil: [:temperature | input temperature: temperature]. + config nucleusSamplingMass ifNotNil: [:p | input top_p: p]. - config shouldLogProbabilities ifNotNil: [:shouldLogProbabilities | - input logprobs: shouldLogProbabilities]. + config topKSampling ifNotNil: [:k | + input top_k: k]. + logRawOutput := false. config shouldLogRawOutput ifNotNil: [:shouldLogRawOutput | logRawOutput := shouldLogRawOutput]. diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st index 29bab39..8d5a9a9 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st @@ -1,7 +1,7 @@ private handleStreamingReplies: number for: aConversation from: responseBlock config: aConfig logRawOutput: logRawOutput - | promptTokens toolSpec | + | promptTokens toolSpec trackExpense | promptTokens := self countTokensInConversation: aConversation. toolSpec := aConversation activeToolSpec. @@ -18,15 +18,16 @@ handleStreamingReplies: number for: aConversation from: responseBlock config: aC [messages do: [:message | message rawOutput: (JsonObject new - chatCompletionChunks: OrderedCollection new; - chatCompletionChunkChoices: OrderedCollection new; + messageChunks: OrderedCollection new; yourself)]]. + + trackExpense := [:usage | + usage ifNotNil: [:u | + expense := self expenseForUsage: u. + self assignExpense: expense toMessages: messages]]. - "[DONE] is not a thing for Anthropic. The equivalent is: - event: message_stop - data: {'type:' 'message_stop'}" self flag: #todo. - [#('[DONE]' nil) includes: (data := dataStream next)] whileFalse: + [(data := dataStream next) = nil] whileFalse: [| chunk msg | chunk := data utf8ToSqueak parseAsJson openAIWithSqueakLineEndings. msg := messages last. @@ -38,21 +39,25 @@ handleStreamingReplies: number for: aConversation from: responseBlock config: aC message: error message]. logRawOutput ifTrue: [messages do: [:message | - message rawOutput chatCompletionChunks addLast: chunk]]. + message rawOutput messageChunks addLast: chunk]]. - "TODO: Lets do caseOf:otherwise:" - - (chunk type = 'content_block_delta') ifTrue: [ - self parseStreamedChunk: chunk toolSpec: toolSpec addTo: msg. - ]. - (chunk type = 'message_delta') ifTrue: [ - chunk usage ifNotNil: [:usage | - expense := self expenseForUsage: usage. - self assignExpense: expense toMessages: messages] - ]. - (chunk type = 'message_stop') ifTrue: [msg beComplete]]. + chunk type caseOf: { + "Is fired at the very beginning. Only useful because it contains a `usage` field with the initial input tokens consumed (and a single output token)." + ['message_start'] -> [trackExpense value: chunk message usage]. + "Signals the start of a new content block (either text or tool_use). The tool_use one contains the name and ID of the function being called." + ['content_block_start'] -> [ + self flag: #todo. + ]. + "This contains the actual message text deltas" + ['content_block_delta'] -> [self parseStreamedChunk: chunk toolSpec: toolSpec addTo: msg]. + "Fired near the end, contains only `usage`" + ['message_delta'] -> [trackExpense value: chunk usage]. + "Fired when message is complete" + ['message_stop'] -> [msg beComplete] + } otherwise: ["Some events are not relevant and can be safely ignored"]]. + self assert: dataStream next isNil] - + ensure: [self account noteExpense: diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st index 0ff6d65..a82f868 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st @@ -1,26 +1,25 @@ private - requests -parseMessageFrom: chatCompletionChoice for: aConversation logRawOutput: logRawOutput +parseMessageFrom: messageCompletion for: aConversation logRawOutput: logRawOutput - | messageResult message toolUseBlocks | - messageResult := chatCompletionChoice. + | message toolUseBlocks | message := SemanticMessage conversation: aConversation - role: messageResult role - content: (messageResult content first type = 'text' ifTrue: [messageResult content first text] ifFalse: [messageResult content]). + role: messageCompletion role + content: (messageCompletion content first type = 'text' + ifTrue: [ + messageCompletion content first text] + ifFalse: [ + messageCompletion content]). logRawOutput ifTrue: [message rawOutput: (JsonObject new - chatCompletionChoice: chatCompletionChoice; + messageCompletion: messageCompletion; yourself)]. - toolUseBlocks := messageResult content select: [:contentBlock | contentBlock type = 'tool_use']. + toolUseBlocks := messageCompletion content select: [:contentBlock | contentBlock type = 'tool_use']. toolUseBlocks size > 0 ifTrue: [ message toolCalls: (self parseToolCallsFrom: toolUseBlocks toolSpec: aConversation activeToolSpec)]. - chatCompletionChoice logprobs ifNotNil: [:logprobs | - message tokenProbabilities: - (self parseTokenProbabilitiesFrom: logprobs)]. - ^ message \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st index 8cd95c3..9063e9b 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st @@ -1,19 +1,14 @@ private - requests -parseMessagesFrom: chatCompletion for: aConversation logRawOutput: logRawOutput +parseMessagesFrom: messageCompletion for: aConversation logRawOutput: logRawOutput - | messages | + | message messages | - messages := - {self - parseMessageFrom: chatCompletion - for: aConversation - logRawOutput: logRawOutput}. + message := self parseMessageFrom: messageCompletion + for: aConversation + logRawOutput: logRawOutput. - logRawOutput ifTrue: - [messages do: [:message | - message rawOutput: chatCompletion]]. - - chatCompletion usage ifNotNil: [:usage | + messages := {message}. + messageCompletion usage ifNotNil: [:usage | | expense | expense := self expenseForUsage: usage. self assignExpense: expense toMessages: messages]. diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st index 9b93abc..d9f1e01 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st @@ -1,27 +1,26 @@ private - requests -parseStreamedChunk: chatCompletionChoice toolSpec: aToolSpecOrNil addTo: aStreamingMessage +parseStreamedChunk: chunk toolSpec: aToolSpecOrNil addTo: aStreamingMessage - | chunkText chunkToolCalls chunkTokenProbabilities | - self flag: #todo. - chatCompletionChoice type ~= 'content_block_delta' ifTrue: [ - self error: 'unimplemented: did not expect something other than content_block_delta in here' - ]. - chatCompletionChoice delta type = 'text_delta' ifTrue: [ - chunkText := chatCompletionChoice delta text. - ] ifFalse: [ - self error: 'unimplemented: did not expect something other than text_delta for content_block_delta' - ]. + | chunkText chunkToolCalls | + + chunk type caseOf: { + ['content_block_delta'] -> [ + chunk delta type caseOf: { + ['text_delta'] -> [chunkText := chunk delta text]. + ['input_json_delta'] -> [chunkToolCalls := self parseStreamedToolCallChunksFrom: chunk delta partial_json + toolSpec: aToolSpecOrNil + message: aStreamingMessage] + } otherwise: [:deltaType | self error: 'got unknown content_block_delta type: ' , deltaType]. + ] + } otherwise: [:type | self error: 'got unknown chunk type: ' , type]. aStreamingMessage rawOutput ifNotNil: [:rawOutput | - rawOutput chatCompletionChunkChoices ifNotNil: [:rawChoices | - rawChoices addLast: chatCompletionChoice]]. + rawOutput messageChunks ifNotNil: [:rawChunks | + rawChunks addLast: chunk]]. self flag: #todo. "chunkToolCalls := chatCompletionChoice delta tool_calls ifNotNil: [:toolCalls | self parseStreamedToolCallChunksFrom: toolCalls toolSpec: aToolSpecOrNil message: aStreamingMessage]." - "chunkTokenProbabilities := chatCompletionChoice logprobs ifNotNil: [:logprobs | - self parseTokenProbabilitiesFrom: logprobs]." - - (chunkText isEmptyOrNil and: [chunkToolCalls isNil] and: [chunkTokenProbabilities isEmptyOrNil]) ifFalse: - [aStreamingMessage addChunk: chunkText toolCalls: chunkToolCalls tokenProbabilities: chunkTokenProbabilities]. \ No newline at end of file + (chunkText isEmptyOrNil and: [chunkToolCalls isNil]) ifFalse: + [aStreamingMessage addChunk: chunkText toolCalls: chunkToolCalls tokenProbabilities: nil]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st index 2f73285..e9b3402 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st @@ -24,13 +24,9 @@ streamEventDataFrom: aWebResponse ifTrue: [true]] whileFalse]. line isEmpty - ifFalse: - [Transcript show: line; cr. - self flag: #todo. - "(line beginsWith: 'event: ') ifTrue: [ - dataStream nextPutAll: (line allButFirst: 'event: ' size). - dataStream cr - ]." + ifFalse: [ + "Each event is a pair of 'event: ...' and 'data: ...' separated by a newline." + "Since the event type itself is included in the 'data:' section, we just ignore the 'event:' lines entirely." (line beginsWith: 'data: ') ifTrue: [ dataStream nextPutAll: (line allButFirst: 'data: ' size). dataStream cr diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json index 3d31e63..2cb036b 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json +++ b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json @@ -30,9 +30,9 @@ "getAnswerFor:" : "ct 1/9/2024 00:14", "getAnswerFor:config:" : "ct 1/8/2024 23:52", "getAnswers:for:" : "ct 1/8/2024 22:28", - "getAnswers:for:config:" : "zakkor 1/13/2025 22:53", + "getAnswers:for:config:" : "zakkor 1/15/2025 23:02", "handleAsyncReplies:for:config:logRawOutput:deferStreaming:" : "ct 5/2/2024 15:52", - "handleStreamingReplies:for:from:config:logRawOutput:" : "zakkor 1/11/2025 12:53", + "handleStreamingReplies:for:from:config:logRawOutput:" : "zakkor 1/16/2025 00:09", "isLegacy" : "zakkor 1/10/2025 19:02", "maxCompletionTokens" : "zakkor 1/10/2025 17:18", "maxPromptTokens" : "ct 2/5/2024 20:48", @@ -42,9 +42,9 @@ "minimumPriceFor:" : "zakkor 1/10/2025 20:12", "minimumPriceFor:answers:" : "zakkor 1/10/2025 20:12", "nameForRateLimits" : "zakkor 1/13/2025 22:29", - "parseMessageFrom:for:logRawOutput:" : "zakkor 1/15/2025 14:21", - "parseMessagesFrom:for:logRawOutput:" : "zakkor 1/13/2025 22:59", - "parseStreamedChunk:toolSpec:addTo:" : "zakkor 1/11/2025 12:48", + "parseMessageFrom:for:logRawOutput:" : "zakkor 1/15/2025 23:16", + "parseMessagesFrom:for:logRawOutput:" : "zakkor 1/15/2025 23:23", + "parseStreamedChunk:toolSpec:addTo:" : "zakkor 1/15/2025 23:53", "parseStreamedToolCallChunkFrom:toolSpec:addTo:" : "ct 1/16/2024 20:52", "parseStreamedToolCallChunksFrom:toolSpec:message:" : "ct 2/13/2024 00:17", "parseTokenProbabilitiesFrom:" : "ct 1/15/2024 19:37", @@ -68,4 +68,4 @@ "resolvedName" : "zakkor 1/10/2025 16:43", "shouldStreamRequests" : "ct 5/2/2024 01:50", "sortKey" : "ct 7/31/2024 22:30", - "streamEventDataFrom:" : "zakkor 1/11/2025 00:48" } } + "streamEventDataFrom:" : "zakkor 1/15/2025 21:42" } } diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass..st b/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass..st index 1d6c59d..12ffda5 100644 --- a/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass..st +++ b/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass..st @@ -1,5 +1,5 @@ accessing nucleusSamplingMass: aNumber - "Probabilitiy mass for nucleus sampling in (0, 1] (defaults to 1). Controls the coherence of outputs. A lower sampling mass excludes less probable words in the output, resulting in higher coherence and reduced creativity. Also referred to as top-p sampling. See also #temperature. It is disrecommended to modify both temperature and nucleusSamplingMass. For deciding between both parameters, experimentation with either is adviced." + "Probability mass for nucleus sampling in (0, 1] (defaults to 1). Controls the coherence of outputs. A lower sampling mass excludes less probable words in the output, resulting in higher coherence and reduced creativity. Also referred to as top-p sampling. See also #temperature. It is disrecommended to modify both temperature and nucleusSamplingMass. For deciding between both parameters, experimentation with either is adviced." self argumentAt: #nucleusSamplingMass put: aNumber. \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass.st b/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass.st index 1558b7e..0263792 100644 --- a/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass.st +++ b/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass.st @@ -1,5 +1,5 @@ accessing nucleusSamplingMass - "Probabilitiy mass for nucleus sampling in (0, 1] (defaults to 1). Controls the coherence of outputs. A lower sampling mass excludes less probable words in the output, resulting in higher coherence and reduced creativity. Also referred to as top-p sampling. See also #temperature. It is disrecommended to modify both temperature and nucleusSamplingMass. For deciding between both parameters, experimentation with either is adviced." + "Probability mass for nucleus sampling in (0, 1] (defaults to 1). Controls the coherence of outputs. A lower sampling mass excludes less probable words in the output, resulting in higher coherence and reduced creativity. Also referred to as top-p sampling. See also #temperature. It is disrecommended to modify both temperature and nucleusSamplingMass. For deciding between both parameters, experimentation with either is adviced." ^ self argumentAt: #nucleusSamplingMass \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling..st b/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling..st new file mode 100644 index 0000000..94db2b3 --- /dev/null +++ b/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling..st @@ -0,0 +1,5 @@ +accessing +topKSampling: aNumber + "Only available for Anthropic models. Only sample from the top K options for each subsequent token. Used to remove ""long tail"" low probability responses. Learn more technical details here: https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277. Recommended for advanced use cases only. You usually only need to use temperature. Required range: aNumber > 0" + + self argumentAt: #topKSampling put: aNumber. \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling.st b/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling.st new file mode 100644 index 0000000..95423d9 --- /dev/null +++ b/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling.st @@ -0,0 +1,5 @@ +accessing +topKSampling + "Only available for Anthropic models. Only sample from the top K options for each subsequent token. Used to remove ""long tail"" low probability responses. Learn more technical details here: https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277. Recommended for advanced use cases only. You usually only need to use temperature. Required range: aNumber > 0" + + ^ self argumentAt: #topKSampling \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/methodProperties.json b/packages/SemanticText.package/SemanticConversationConfig.class/methodProperties.json index 0cbba8b..b582121 100644 --- a/packages/SemanticText.package/SemanticConversationConfig.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticConversationConfig.class/methodProperties.json @@ -4,11 +4,13 @@ "instance" : { "maxTokens" : "ct 2/5/2024 16:20", "maxTokens:" : "ct 2/7/2024 21:50", - "nucleusSamplingMass" : "ct 1/15/2024 17:05", - "nucleusSamplingMass:" : "ct 1/15/2024 17:05", + "nucleusSamplingMass" : "zakkor 1/15/2025 21:47", + "nucleusSamplingMass:" : "zakkor 1/15/2025 21:47", "shouldLogProbabilities" : "ct 1/15/2024 20:25", "shouldLogProbabilities:" : "ct 1/15/2024 20:25", "shouldStream" : "ct 8/19/2023 21:37", "shouldStream:" : "ct 8/19/2023 21:37", "temperature" : "ct 1/5/2024 20:57", - "temperature:" : "ct 1/5/2024 20:57" } } + "temperature:" : "ct 1/5/2024 20:57", + "topKSampling" : "zakkor 1/15/2025 22:46", + "topKSampling:" : "zakkor 1/15/2025 22:46" } } From 5d13cdefaa4cb1d342be503c30ed1b882f2196ea Mon Sep 17 00:00:00 2001 From: zakkor Date: Thu, 16 Jan 2025 00:40:16 +0200 Subject: [PATCH 6/7] Add support for system messages --- .../instance/getAnswers.for.config..st | 8 +++++++- .../methodProperties.json | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st index 5a0b10d..69266d7 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st @@ -35,8 +35,14 @@ getAnswers: number for: aConversation config: aConfigOrNil stream := shouldStream. input stream: stream]. + aConversation messages first role = 'system' ifTrue: [ + input system: (aConversation messages first content)]. + input messages: - (aConversation messages collect: [:message | message asAnthropicObject]). + (aConversation messages + "System messages are specified on the top-level `input.system` field" + select: [:message | message role ~= 'system'] + thenCollect: [:message | message asAnthropicObject]). aConversation activeToolSpec ifNotNil: [:toolSpec | self addToolSpec: toolSpec toInput: input]. diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json index 2cb036b..18fd6ef 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json +++ b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json @@ -30,7 +30,7 @@ "getAnswerFor:" : "ct 1/9/2024 00:14", "getAnswerFor:config:" : "ct 1/8/2024 23:52", "getAnswers:for:" : "ct 1/8/2024 22:28", - "getAnswers:for:config:" : "zakkor 1/15/2025 23:02", + "getAnswers:for:config:" : "zakkor 1/16/2025 00:34", "handleAsyncReplies:for:config:logRawOutput:deferStreaming:" : "ct 5/2/2024 15:52", "handleStreamingReplies:for:from:config:logRawOutput:" : "zakkor 1/16/2025 00:09", "isLegacy" : "zakkor 1/10/2025 19:02", From d397fcb7ac95c53d44fdaa903eefedb2327db4bd Mon Sep 17 00:00:00 2001 From: zakkor Date: Tue, 21 Jan 2025 15:19:46 +0200 Subject: [PATCH 7/7] Add streaming tool call implementation for Anthropic models --- ...ngReplies.for.from.config.logRawOutput..st | 47 ++++++++++++++++--- .../parseStreamedChunk.toolSpec.addTo..st | 26 ---------- ...reamedToolCallChunkFrom.toolSpec.addTo..st | 15 ------ ...medToolCallChunksFrom.toolSpec.message..st | 13 ----- .../instance/parseTokenProbabilitiesFrom..st | 4 -- .../methodProperties.json | 6 +-- 6 files changed, 42 insertions(+), 69 deletions(-) delete mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st delete mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunkFrom.toolSpec.addTo..st delete mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunksFrom.toolSpec.message..st delete mode 100644 packages/SemanticText.package/AnthropicConversationModel.class/instance/parseTokenProbabilitiesFrom..st diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st index 8d5a9a9..999dd81 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st @@ -26,9 +26,8 @@ handleStreamingReplies: number for: aConversation from: responseBlock config: aC expense := self expenseForUsage: u. self assignExpense: expense toMessages: messages]]. - self flag: #todo. [(data := dataStream next) = nil] whileFalse: - [| chunk msg | + [| chunk msg chunkText chunkToolCall | chunk := data utf8ToSqueak parseAsJson openAIWithSqueakLineEndings. msg := messages last. (chunk at: #error) ifNotNil: [:error | @@ -46,15 +45,51 @@ handleStreamingReplies: number for: aConversation from: responseBlock config: aC ['message_start'] -> [trackExpense value: chunk message usage]. "Signals the start of a new content block (either text or tool_use). The tool_use one contains the name and ID of the function being called." ['content_block_start'] -> [ - self flag: #todo. + "Create new toolcall" + chunk content_block type = 'tool_use' ifTrue: [ + chunkToolCall := (SemanticStreamingToolCall + key: chunk content_block id + tool: chunk content_block name + arguments: chunk content_block input)] + ]. + "This contains the actual message text deltas or tool use JSON input deltas" + ['content_block_delta'] -> [ + chunk delta type caseOf: { + ['text_delta'] -> [chunkText := chunk delta text]. + ['input_json_delta'] -> [ + | aStreamingToolCall argumentsChunk | + "This should be getting the tool call using `chunk index`, but `chunk index` represents the index of the content block (so the first tool call would have an index of 1 (with 0-based indexing)), since the content_block at index 0 is a text content block." + self flag: #todo. + aStreamingToolCall := msg toolCalls last. + argumentsChunk := chunk delta partial_json. + argumentsChunk ifNotEmpty: [ + aStreamingToolCall arguments: + (aStreamingToolCall arguments isString ifTrue: [aStreamingToolCall arguments , argumentsChunk] + ifFalse: [argumentsChunk])] + ] + } otherwise: [:deltaType | self error: 'got unknown content_block_delta type: ' , deltaType]. + ]. + "Useful for knowing when a tool call has finished streaming so we can parse its input JSON" + ['content_block_stop'] -> [ + | toolCall | + self flag: #todo. "See above note" + toolCall := msg toolCalls ifNotEmpty: [msg toolCalls last] ifEmpty: [nil]. + toolCall ifNotNil: [ + (toolCall tool isString and: [toolSpec notNil]) ifTrue: + [toolSpec toolNamed: toolCall tool ifPresent: [:tool | + toolCall tool: tool]]. + toolCall arguments: + ([toolCall arguments parseAsOrderedJson] ifError: [toolCall arguments]) + ] ]. - "This contains the actual message text deltas" - ['content_block_delta'] -> [self parseStreamedChunk: chunk toolSpec: toolSpec addTo: msg]. "Fired near the end, contains only `usage`" ['message_delta'] -> [trackExpense value: chunk usage]. "Fired when message is complete" ['message_stop'] -> [msg beComplete] - } otherwise: ["Some events are not relevant and can be safely ignored"]]. + } otherwise: ["Some events are not relevant and can be safely ignored"]. + + (chunkText isEmptyOrNil and: [chunkToolCall isNil]) ifFalse: + [msg addChunk: chunkText toolCalls: (chunkToolCall ifNotNil: [{chunkToolCall}]) tokenProbabilities: nil]]. self assert: dataStream next isNil] diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st deleted file mode 100644 index d9f1e01..0000000 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedChunk.toolSpec.addTo..st +++ /dev/null @@ -1,26 +0,0 @@ -private - requests -parseStreamedChunk: chunk toolSpec: aToolSpecOrNil addTo: aStreamingMessage - - | chunkText chunkToolCalls | - - chunk type caseOf: { - ['content_block_delta'] -> [ - chunk delta type caseOf: { - ['text_delta'] -> [chunkText := chunk delta text]. - ['input_json_delta'] -> [chunkToolCalls := self parseStreamedToolCallChunksFrom: chunk delta partial_json - toolSpec: aToolSpecOrNil - message: aStreamingMessage] - } otherwise: [:deltaType | self error: 'got unknown content_block_delta type: ' , deltaType]. - ] - } otherwise: [:type | self error: 'got unknown chunk type: ' , type]. - - aStreamingMessage rawOutput ifNotNil: [:rawOutput | - rawOutput messageChunks ifNotNil: [:rawChunks | - rawChunks addLast: chunk]]. - - self flag: #todo. - "chunkToolCalls := chatCompletionChoice delta tool_calls ifNotNil: [:toolCalls | - self parseStreamedToolCallChunksFrom: toolCalls toolSpec: aToolSpecOrNil message: aStreamingMessage]." - - (chunkText isEmptyOrNil and: [chunkToolCalls isNil]) ifFalse: - [aStreamingMessage addChunk: chunkText toolCalls: chunkToolCalls tokenProbabilities: nil]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunkFrom.toolSpec.addTo..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunkFrom.toolSpec.addTo..st deleted file mode 100644 index a9ec60d..0000000 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunkFrom.toolSpec.addTo..st +++ /dev/null @@ -1,15 +0,0 @@ -private - requests -parseStreamedToolCallChunkFrom: toolCallChunk toolSpec: aToolSpecOrNil addTo: aStreamingToolCall - - | argumentsChunk functionNameChunk keyChunk | - keyChunk := toolCallChunk id. - functionNameChunk := toolCallChunk function ifNotNil: #name. - argumentsChunk := toolCallChunk function ifNotNil: #arguments. - - aStreamingToolCall addChunkKey: keyChunk tool: functionNameChunk arguments: argumentsChunk do: [:streamingToolCall | - (streamingToolCall tool isString and: [aToolSpecOrNil notNil]) ifTrue: - [aToolSpecOrNil toolNamed: streamingToolCall tool ifPresent: [:tool | - streamingToolCall tool: tool]]. - streamingToolCall arguments isString ifTrue: - [streamingToolCall arguments: - ([streamingToolCall arguments parseAsOrderedJson] ifError: [streamingToolCall arguments])]]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunksFrom.toolSpec.message..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunksFrom.toolSpec.message..st deleted file mode 100644 index 19685c7..0000000 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseStreamedToolCallChunksFrom.toolSpec.message..st +++ /dev/null @@ -1,13 +0,0 @@ -private - requests -parseStreamedToolCallChunksFrom: toolCallChunks toolSpec: aToolSpecOrNil message: aStreamingMessage - - | newToolCalls | - newToolCalls := Dictionary new: toolCallChunks size. - toolCallChunks do: [:toolCallChunk | - | index toolCall | - index := toolCallChunk index + 1. - toolCall := aStreamingMessage toolCalls at: index ifAbsent: - [newToolCalls at: index put: - (SemanticStreamingToolCall key: '' tool: '' arguments: '')]. - self parseStreamedToolCallChunkFrom: toolCallChunk toolSpec: aToolSpecOrNil addTo: toolCall]. - ^ newToolCalls semanticWithKeysSorted \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseTokenProbabilitiesFrom..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseTokenProbabilitiesFrom..st deleted file mode 100644 index 55cb2bf..0000000 --- a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseTokenProbabilitiesFrom..st +++ /dev/null @@ -1,4 +0,0 @@ -private - requests -parseTokenProbabilitiesFrom: logprobs - - ^ logprobs content \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json index 18fd6ef..a864e18 100644 --- a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json +++ b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json @@ -32,7 +32,7 @@ "getAnswers:for:" : "ct 1/8/2024 22:28", "getAnswers:for:config:" : "zakkor 1/16/2025 00:34", "handleAsyncReplies:for:config:logRawOutput:deferStreaming:" : "ct 5/2/2024 15:52", - "handleStreamingReplies:for:from:config:logRawOutput:" : "zakkor 1/16/2025 00:09", + "handleStreamingReplies:for:from:config:logRawOutput:" : "zakkor 1/21/2025 15:12", "isLegacy" : "zakkor 1/10/2025 19:02", "maxCompletionTokens" : "zakkor 1/10/2025 17:18", "maxPromptTokens" : "ct 2/5/2024 20:48", @@ -44,10 +44,6 @@ "nameForRateLimits" : "zakkor 1/13/2025 22:29", "parseMessageFrom:for:logRawOutput:" : "zakkor 1/15/2025 23:16", "parseMessagesFrom:for:logRawOutput:" : "zakkor 1/15/2025 23:23", - "parseStreamedChunk:toolSpec:addTo:" : "zakkor 1/15/2025 23:53", - "parseStreamedToolCallChunkFrom:toolSpec:addTo:" : "ct 1/16/2024 20:52", - "parseStreamedToolCallChunksFrom:toolSpec:message:" : "ct 2/13/2024 00:17", - "parseTokenProbabilitiesFrom:" : "ct 1/15/2024 19:37", "parseToolCallFrom:toolSpec:" : "zakkor 1/15/2025 14:33", "parseToolCallsFrom:toolSpec:" : "ct 1/16/2024 00:47", "pathToEndpoint" : "zakkor 1/10/2025 16:36",