diff --git a/packages/SemanticText.package/AnthropicAccount.class/README.md b/packages/SemanticText.package/AnthropicAccount.class/README.md new file mode 100644 index 0000000..0b5945b --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/README.md @@ -0,0 +1 @@ +I represent an account for the Anthropic Platform (https://console.anthropic.com). I hold information about the subscription state, account-specific rate limits, and track expenses from API calls. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/cleanUp..st b/packages/SemanticText.package/AnthropicAccount.class/class/cleanUp..st new file mode 100644 index 0000000..8ba8c51 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/cleanUp..st @@ -0,0 +1,7 @@ +initialize-release +cleanUp: aggressive + + aggressive ifTrue: [ + self allSubInstancesDo: [:account | + account apiKey: nil]. + self defaultAccount: nil]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount..st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount..st new file mode 100644 index 0000000..08da088 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount..st @@ -0,0 +1,4 @@ +accessing +defaultAccount: anAccount + + DefaultAccount := anAccount. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount.st new file mode 100644 index 0000000..95ab19d --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultAccount.st @@ -0,0 +1,4 @@ +accessing +defaultAccount + + ^ DefaultAccount ifNil: [DefaultAccount := self new] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey..st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey..st new file mode 100644 index 0000000..6d25afb --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey..st @@ -0,0 +1,4 @@ +preferences +defaultApiKey: aString + + self defaultAccount apiKey: (aString ifEmpty: []). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey.st new file mode 100644 index 0000000..2306aed --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultApiKey.st @@ -0,0 +1,5 @@ +preferences +defaultApiKey + + + ^ (DefaultAccount ifNotNil: [:account | account apiKey]) ifNil: [''] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl..st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl..st new file mode 100644 index 0000000..802099e --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl..st @@ -0,0 +1,4 @@ +preferences +defaultBaseUrl: aString + + self defaultAccount baseUrl: (aString ifEmpty: []). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl.st new file mode 100644 index 0000000..9b0e31c --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultBaseUrl.st @@ -0,0 +1,5 @@ +preferences +defaultBaseUrl + + + ^ (DefaultAccount ifNotNil: [:account | account baseUrl]) ifNil: [''] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st b/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st new file mode 100644 index 0000000..fd65bd9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/defaultRateLimitsPerTier.st @@ -0,0 +1,7 @@ +constants +defaultRateLimitsPerTier + "Not implemented" + + self flag: #modelConstants. + self flag: #todo. + ^ nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/isValidUsageTier..st b/packages/SemanticText.package/AnthropicAccount.class/class/isValidUsageTier..st new file mode 100644 index 0000000..b4775ac --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/isValidUsageTier..st @@ -0,0 +1,5 @@ +constants +isValidUsageTier: tier + + ^ tier = #free or: + [tier isInteger and: [tier strictlyPositive]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/openExpenseWatcher.st b/packages/SemanticText.package/AnthropicAccount.class/class/openExpenseWatcher.st new file mode 100644 index 0000000..c067965 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/openExpenseWatcher.st @@ -0,0 +1,7 @@ +support +openExpenseWatcher + " + AnthropicAccount openExpenseWatcher + " + + ^ self defaultAccount openExpenseWatcher \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/class/rateLimitsFromSpec..st b/packages/SemanticText.package/AnthropicAccount.class/class/rateLimitsFromSpec..st new file mode 100644 index 0000000..a247865 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/class/rateLimitsFromSpec..st @@ -0,0 +1,7 @@ +support +rateLimitsFromSpec: spec + "{requestsPerMinute. requestsPerDay. tokensPerMinute. tokensPerDay. batchQueueLimit}" + + ^ (#(rpm rpd tpm tpd batchQueueLimit) with: spec collect: [:key :limit | + key -> limit]) + as: Dictionary \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey..st b/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey..st new file mode 100644 index 0000000..49d303f --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey..st @@ -0,0 +1,4 @@ +accessing +apiKey: aString + + apiKey := aString. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey.st b/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey.st new file mode 100644 index 0000000..f7400f9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/apiKey.st @@ -0,0 +1,4 @@ +accessing +apiKey + + ^ apiKey \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl..st b/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl..st new file mode 100644 index 0000000..12be3db --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl..st @@ -0,0 +1,4 @@ +accessing +baseUrl: aStringOrNil + + baseUrl := aStringOrNil. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl.st b/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl.st new file mode 100644 index 0000000..e9c03f0 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/baseUrl.st @@ -0,0 +1,4 @@ +accessing +baseUrl + + ^ baseUrl ifNil: [self defaultBaseUrl] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/controlConnectionDuring..st b/packages/SemanticText.package/AnthropicAccount.class/instance/controlConnectionDuring..st new file mode 100644 index 0000000..1cd49ec --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/controlConnectionDuring..st @@ -0,0 +1,6 @@ +connection +controlConnectionDuring: aBlock + + self ignoreCertificate ifFalse: [^ aBlock value]. + + ^ aBlock on: SqueakSSLCertificateError do: [:ex | ex resume] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/customRateLimits.st b/packages/SemanticText.package/AnthropicAccount.class/instance/customRateLimits.st new file mode 100644 index 0000000..9f31706 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/customRateLimits.st @@ -0,0 +1,4 @@ +accessing - rate limits +customRateLimits + + ^ customRateLimits \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/defaultBaseUrl.st b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultBaseUrl.st new file mode 100644 index 0000000..48f2125 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultBaseUrl.st @@ -0,0 +1,4 @@ +initialize-release +defaultBaseUrl + + ^ 'https://api.anthropic.com' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st new file mode 100644 index 0000000..7ffa45f --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultRateLimits.st @@ -0,0 +1,5 @@ +accessing - rate limits +defaultRateLimits + "Not implemented" + self flag: #todo. + ^ Dictionary new \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/defaultUsageTier.st b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultUsageTier.st new file mode 100644 index 0000000..bd058b1 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/defaultUsageTier.st @@ -0,0 +1,4 @@ +initialize-release +defaultUsageTier + + ^ #free \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerModel.st b/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerModel.st new file mode 100644 index 0000000..b1ce7f5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerModel.st @@ -0,0 +1,10 @@ +accessing - expenses +expensesPerModel + + | expensesPerModel | + expensesPerModel := Dictionary new. + self expensesPerUser keysAndValuesDo: [:user :expensePerModel | + expensePerModel keysAndValuesDo: [:model :expense | + (expensesPerModel at: model ifAbsentPut: [Dictionary new]) + at: user put: expense]]. + ^ expensesPerModel \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerUser.st b/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerUser.st new file mode 100644 index 0000000..4237bea --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/expensesPerUser.st @@ -0,0 +1,5 @@ +accessing - expenses +expensesPerUser + "See https://openai.com/pricing for current prices and https://platform.openai.com/account/usage for your current usage." + + ^ expensesPerUser \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/hasApiKey.st b/packages/SemanticText.package/AnthropicAccount.class/instance/hasApiKey.st new file mode 100644 index 0000000..10f8203 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/hasApiKey.st @@ -0,0 +1,4 @@ +testing +hasApiKey + + ^ self apiKey notNil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate..st b/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate..st new file mode 100644 index 0000000..4b2b32e --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate..st @@ -0,0 +1,5 @@ +accessing +ignoreCertificate: aBoolean + "If set to true, SSL certificates for the baseUrl will be ignored. WARNING: While this still ensures privacy (encryption), this opens the door to man-in-the-middle attacks (i.e., you cannot be sure that the remote host is what he say he is.)! Thus, use with caution." + + ignoreCertificate := aBoolean. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate.st b/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate.st new file mode 100644 index 0000000..886a039 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/ignoreCertificate.st @@ -0,0 +1,5 @@ +accessing +ignoreCertificate + "If set to true, SSL certificates for the baseUrl will be ignored. WARNING: While this still ensures privacy (encryption), this opens the door to man-in-the-middle attacks (i.e., you cannot be sure that the remote host is what he say he is.)! Thus, use with caution." + + ^ ignoreCertificate \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/initialize.st b/packages/SemanticText.package/AnthropicAccount.class/instance/initialize.st new file mode 100644 index 0000000..a3dd161 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/initialize.st @@ -0,0 +1,9 @@ +initialize-release +initialize + + super initialize. + + ignoreCertificate := false. + usageTier := self defaultUsageTier. + customRateLimits := Dictionary new. + self resetExpenses. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/noteExpense.forUser.model..st b/packages/SemanticText.package/AnthropicAccount.class/instance/noteExpense.forUser.model..st new file mode 100644 index 0000000..9abf757 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/noteExpense.forUser.model..st @@ -0,0 +1,8 @@ +accessing - expenses +noteExpense: expense forUser: userName model: modelName + + | expensesPerModel | + expensesPerModel := expensesPerUser at: userName ifAbsentPut: [Dictionary new]. + expensesPerModel at: modelName put: + (expensesPerModel at: modelName ifAbsent: [OpenAIAmount zero]) + + expense. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/openExpenseWatcher.st b/packages/SemanticText.package/AnthropicAccount.class/instance/openExpenseWatcher.st new file mode 100644 index 0000000..9bc81bd --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/openExpenseWatcher.st @@ -0,0 +1,13 @@ +ui +openExpenseWatcher + "For the actual usage without estimations, see: https://platform.openai.com/account/usage" + + | field inspector | + inspector := Inspector on: self. + field := inspector newCustomField valueGetter: [:account | account totalExpense]. + inspector addCustomField: field. + field rememberInspector. + ^ (World dropInspectorField: field event: self currentEvent) + in: [:answer | + answer = World ifFalse: "Morphic-ct.2143" + [answer openAsTool]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitForModel.type.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitForModel.type.ifUnknown..st new file mode 100644 index 0000000..557af84 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitForModel.type.ifUnknown..st @@ -0,0 +1,6 @@ +accessing - rate limits +rateLimitForModel: modelName type: type ifUnknown: aBlock + + | rateLimit | + rateLimit := self rateLimitsForModel: modelName ifUnknown: [^ aBlock value]. + ^ rateLimit at: type ifAbsent: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimits.st b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimits.st new file mode 100644 index 0000000..12858de --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimits.st @@ -0,0 +1,14 @@ +accessing - rate limits +rateLimits + + | rateLimits | + rateLimits := self defaultRateLimits copy. + "merge" + self customRateLimits keysAndValuesDo: [:modelName :modelLimit | + rateLimits at: modelName put: + (rateLimits + at: modelName + ifPresent: [:defaultModelLimit | + defaultModelLimit , modelLimit] + ifAbsent: [modelLimit])]. + ^ rateLimits \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitsForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitsForModel.ifUnknown..st new file mode 100644 index 0000000..f88d738 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/rateLimitsForModel.ifUnknown..st @@ -0,0 +1,13 @@ +accessing - rate limits +rateLimitsForModel: modelName ifUnknown: aBlock + "See: https://platform.openai.com/account/rate-limits. Hypothetically we could scrape this, in practice, users may enter relevant rate limits manually." + + ^ self rateLimits at: modelName ifAbsent: + ["search for rate limits for more general model name (prefix, e.g., without version number/context size)" + | parts | + parts := modelName ifNotEmpty: [modelName splitBy: '-']. + parts ifNotEmpty: + [^ self + rateLimitsForModel: (parts allButLast joinSeparatedBy: '-') + ifUnknown: aBlock]. + aBlock value] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerDayForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerDayForModel.ifUnknown..st new file mode 100644 index 0000000..65d8701 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerDayForModel.ifUnknown..st @@ -0,0 +1,4 @@ +accessing - rate limits +requestsPerDayForModel: modelName ifUnknown: aBlock + + ^ self rateLimitForModel: modelName type: #rpd ifUnknown: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerMinuteForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerMinuteForModel.ifUnknown..st new file mode 100644 index 0000000..c20c563 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/requestsPerMinuteForModel.ifUnknown..st @@ -0,0 +1,4 @@ +accessing - rate limits +requestsPerMinuteForModel: modelName ifUnknown: aBlock + + ^ self rateLimitForModel: modelName type: #rpm ifUnknown: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/resetExpenses.st b/packages/SemanticText.package/AnthropicAccount.class/instance/resetExpenses.st new file mode 100644 index 0000000..08050c8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/resetExpenses.st @@ -0,0 +1,4 @@ +accessing - expenses +resetExpenses + + expensesPerUser := Dictionary new. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/setCustomRateLimitsForModel.to..st b/packages/SemanticText.package/AnthropicAccount.class/instance/setCustomRateLimitsForModel.to..st new file mode 100644 index 0000000..399a414 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/setCustomRateLimitsForModel.to..st @@ -0,0 +1,7 @@ +accessing - rate limits +setCustomRateLimitsForModel: modelName to: rateLimitsSpec + "rateLimitsSpec: {requestsPerMinute. requestsPerDay. tokensPerMinute. tokensPerDay. batchQueueLimit} + See: https://platform.openai.com/account/limits. Hypothetically we could scrape this, in practice, users may enter relevant rate limits manually." + + self customRateLimits at: modelName put: + (self class rateLimitsFromSpec: rateLimitsSpec). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerDayForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerDayForModel.ifUnknown..st new file mode 100644 index 0000000..aeb6ea9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerDayForModel.ifUnknown..st @@ -0,0 +1,4 @@ +accessing - rate limits +tokensPerDayForModel: modelName ifUnknown: aBlock + + ^ self rateLimitForModel: modelName type: #tpd ifUnknown: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerMinuteForModel.ifUnknown..st b/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerMinuteForModel.ifUnknown..st new file mode 100644 index 0000000..1ff707f --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/tokensPerMinuteForModel.ifUnknown..st @@ -0,0 +1,4 @@ +accessing - rate limits +tokensPerMinuteForModel: modelName ifUnknown: aBlock + + ^ self rateLimitForModel: modelName type: #tpm ifUnknown: aBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpense.st b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpense.st new file mode 100644 index 0000000..68de177 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpense.st @@ -0,0 +1,5 @@ +accessing - expenses +totalExpense + + ^ self expensesPerUser inject: OpenAIAmount zero into: [:sum :expenses | + expenses inject: OpenAIAmount zero into: [:userSum :expense | userSum + expense]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerModel.st b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerModel.st new file mode 100644 index 0000000..61d03e5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerModel.st @@ -0,0 +1,4 @@ +accessing - expenses +totalExpensePerModel + + ^ self expensesPerModel collect: [:expenses | expenses sum] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerUser.st b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerUser.st new file mode 100644 index 0000000..1fb0428 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/totalExpensePerUser.st @@ -0,0 +1,4 @@ +accessing - expenses +totalExpensePerUser + + ^ self expensesPerUser collect: [:expenses | expenses sum] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier..st b/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier..st new file mode 100644 index 0000000..43e4c3c --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier..st @@ -0,0 +1,5 @@ +accessing +usageTier: tier + "Rate and usage limits depend on the usage tier of your account, which is assigned by based on the duration of your subscription and the magnitude of your payments. Must be #free or a positive integer. You can find your current tier here: https://platform.openai.com/account/limits" + + usageTier := tier. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier.st b/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier.st new file mode 100644 index 0000000..1b30a9d --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/instance/usageTier.st @@ -0,0 +1,5 @@ +accessing +usageTier + "Rate and usage limits depend on the usage tier of your account, which is assigned by based on the duration of your subscription and the magnitude of your payments. Must be #free or a positive integer. You can find your current tier here: https://platform.openai.com/account/limits" + + ^ usageTier \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json b/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json new file mode 100644 index 0000000..a6f4b83 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/methodProperties.json @@ -0,0 +1,45 @@ +{ + "class" : { + "cleanUp:" : "ct 8/20/2023 12:56", + "defaultAccount" : "ct 8/20/2023 12:52", + "defaultAccount:" : "ct 8/20/2023 12:52", + "defaultApiKey" : "zakkor 1/10/2025 12:49", + "defaultApiKey:" : "ct 10/15/2023 22:20", + "defaultBaseUrl" : "zakkor 1/10/2025 12:49", + "defaultBaseUrl:" : "ct 12/1/2023 23:30", + "defaultRateLimitsPerTier" : "zakkor 1/15/2025 21:54", + "isValidUsageTier:" : "ct 11/28/2023 12:39", + "openExpenseWatcher" : "zakkor 1/10/2025 12:50", + "rateLimitsFromSpec:" : "ct 6/20/2024 00:00" }, + "instance" : { + "apiKey" : "ct 8/20/2023 12:55", + "apiKey:" : "ct 8/20/2023 12:55", + "baseUrl" : "ct 12/1/2023 23:29", + "baseUrl:" : "ct 12/1/2023 23:31", + "controlConnectionDuring:" : "ct 12/11/2023 18:35", + "customRateLimits" : "ct 11/28/2023 13:27", + "defaultBaseUrl" : "zakkor 1/10/2025 12:54", + "defaultRateLimits" : "zakkor 1/13/2025 22:33", + "defaultUsageTier" : "ct 11/28/2023 14:54", + "expensesPerModel" : "ct 8/20/2023 20:15", + "expensesPerUser" : "ct 8/27/2023 20:57", + "hasApiKey" : "ct 10/15/2023 21:44", + "ignoreCertificate" : "ct 12/11/2023 18:44", + "ignoreCertificate:" : "ct 12/11/2023 18:44", + "initialize" : "ct 12/11/2023 18:33", + "noteExpense:forUser:model:" : "ct 8/20/2023 19:11", + "openExpenseWatcher" : "ct 11/26/2023 21:56", + "rateLimitForModel:type:ifUnknown:" : "ct 11/28/2023 18:46", + "rateLimits" : "ct 2/21/2024 15:12", + "rateLimitsForModel:ifUnknown:" : "ct 2/21/2024 15:12", + "requestsPerDayForModel:ifUnknown:" : "ct 11/28/2023 18:46", + "requestsPerMinuteForModel:ifUnknown:" : "ct 11/28/2023 13:33", + "resetExpenses" : "ct 8/20/2023 21:04", + "setCustomRateLimitsForModel:to:" : "ct 4/27/2024 22:13", + "tokensPerDayForModel:ifUnknown:" : "ct 11/28/2023 13:33", + "tokensPerMinuteForModel:ifUnknown:" : "ct 11/28/2023 13:33", + "totalExpense" : "ct 8/20/2023 20:17", + "totalExpensePerModel" : "ct 8/20/2023 19:03", + "totalExpensePerUser" : "ct 8/20/2023 18:52", + "usageTier" : "ct 11/28/2023 14:56", + "usageTier:" : "ct 11/28/2023 14:56" } } diff --git a/packages/SemanticText.package/AnthropicAccount.class/properties.json b/packages/SemanticText.package/AnthropicAccount.class/properties.json new file mode 100644 index 0000000..0c114c8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicAccount.class/properties.json @@ -0,0 +1,19 @@ +{ + "category" : "SemanticText-Providers-Anthropic", + "classinstvars" : [ + ], + "classvars" : [ + "DefaultAccount" ], + "commentStamp" : "Ed 1/10/2025 12:45", + "instvars" : [ + "baseUrl", + "apiKey", + "ignoreCertificate", + "usageTier", + "customRateLimits", + "expensesPerUser" ], + "name" : "AnthropicAccount", + "pools" : [ + ], + "super" : "Object", + "type" : "normal" } diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/README.md b/packages/SemanticText.package/AnthropicConversationModel.class/README.md new file mode 100644 index 0000000..9259a15 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/README.md @@ -0,0 +1 @@ +I generate answers for a SemanticConversation using a large language model (LLM) such as GPT (Generative Pre-Trained Transformer) from the OpenAI API. Colloquially also referred to as ChatGPT, which however is a different service with additional features. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/bestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/bestName.st new file mode 100644 index 0000000..72d296f --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/bestName.st @@ -0,0 +1,3 @@ +constants +bestName + ^ self claude35SonnetLatestName \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/cheapestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/cheapestName.st new file mode 100644 index 0000000..5ffad18 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/cheapestName.st @@ -0,0 +1,3 @@ +constants +cheapestName + ^ self claude35HaikuLatestName \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Haiku20241022Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Haiku20241022Name.st new file mode 100644 index 0000000..89e4b2e --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Haiku20241022Name.st @@ -0,0 +1,3 @@ +constants +claude35Haiku20241022Name + ^ 'claude-3-5-haiku-20241022' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35HaikuLatestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35HaikuLatestName.st new file mode 100644 index 0000000..3fef37b --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35HaikuLatestName.st @@ -0,0 +1,3 @@ +constants +claude35HaikuLatestName + ^ 'claude-3-5-haiku-latest' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20240620Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20240620Name.st new file mode 100644 index 0000000..c746d8f --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20240620Name.st @@ -0,0 +1,3 @@ +constants +claude35Sonnet20240620Name + ^ 'claude-3-5-sonnet-20240620' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20241022Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20241022Name.st new file mode 100644 index 0000000..a7e2933 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35Sonnet20241022Name.st @@ -0,0 +1,3 @@ +constants +claude35Sonnet20241022Name + ^ 'claude-3-5-sonnet-20241022' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35SonnetLatestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35SonnetLatestName.st new file mode 100644 index 0000000..14b84e6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude35SonnetLatestName.st @@ -0,0 +1,3 @@ +constants +claude35SonnetLatestName + ^ 'claude-3-5-sonnet-latest' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Haiku20240307Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Haiku20240307Name.st new file mode 100644 index 0000000..c0e7afc --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Haiku20240307Name.st @@ -0,0 +1,3 @@ +constants +claude3Haiku20240307Name + ^ 'claude-3-haiku-20240307' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Opus20240229Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Opus20240229Name.st new file mode 100644 index 0000000..4d9a6fb --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Opus20240229Name.st @@ -0,0 +1,3 @@ +constants +claude3Opus20240229Name + ^ 'claude-3-opus-20240229' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3OpusLatestName.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3OpusLatestName.st new file mode 100644 index 0000000..99fb8d3 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3OpusLatestName.st @@ -0,0 +1,3 @@ +constants +claude3OpusLatestName + ^ 'claude-3-opus-latest' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Sonnet20240229Name.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Sonnet20240229Name.st new file mode 100644 index 0000000..57a3218 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/claude3Sonnet20240229Name.st @@ -0,0 +1,3 @@ +constants +claude3Sonnet20240229Name + ^ 'claude-3-sonnet-20240229' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/defaultModelNames.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/defaultModelNames.st new file mode 100644 index 0000000..711673f --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/defaultModelNames.st @@ -0,0 +1,18 @@ +constants +defaultModelNames + + self flag: #modelConstants. + + "first is default" + ^ {"recommended" + self claude35SonnetLatestName. + self claude35HaikuLatestName. + self claude3OpusLatestName. + + "all other versions (pinned), ordered by version descending, date descending" + self claude35Sonnet20241022Name. + self claude35Sonnet20240620Name. + self claude35Haiku20241022Name. + self claude3Opus20240229Name. + self claude3Sonnet20240229Name. + self claude3Haiku20240307Name} \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/initialize.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/initialize.st new file mode 100644 index 0000000..19cbcd7 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/initialize.st @@ -0,0 +1,12 @@ +initialize-release +initialize + + self defaultModelNames do: [:modelName | + | model | + model := self new name: modelName. + SemanticText registeredConversationModels + detect: [:ea | ea name = model name] + ifNone: + [SemanticText registerConversationModel: model. + SemanticText defaultConversationModelOrNil ifNil: + [SemanticText defaultConversationModel: model]]]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/class/unload.st b/packages/SemanticText.package/AnthropicConversationModel.class/class/unload.st new file mode 100644 index 0000000..94f056e --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/class/unload.st @@ -0,0 +1,4 @@ +initialize-release +unload + + SemanticText unregisterConversationModelsOf: self \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st new file mode 100644 index 0000000..cc455cb --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/addToolSpec.toInput..st @@ -0,0 +1,20 @@ +private - requests +addToolSpec: aToolSpec toInput: input + + input tools: + (aToolSpec tools asArray collect: [:tool | tool asAnthropicObject]). + + aToolSpec forcedTools ifNotNil: [:forcedTools | + input tool_choice: + (forcedTools = #any + ifTrue: [#required] + ifFalse: + [forcedTools + ifEmpty: [#none] + ifNotEmpty: + [| forcedTool | + forcedTools size > 1 ifTrue: [^ self error: 'cannot force multiple tools']. + forcedTool := forcedTools anyOne. + (forcedTool isString or: [forcedTool isText]) + ifTrue: [forcedTool := aToolSpec toolNamed: forcedTool]. + forcedTool asAnthropicToolChoiceObject]])] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/assignExpense.toMessages..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/assignExpense.toMessages..st new file mode 100644 index 0000000..0a8f69c --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/assignExpense.toMessages..st @@ -0,0 +1,16 @@ +private +assignExpense: expense toMessages: messages + + messages size = 1 + ifTrue: + [messages first expense: expense] + ifFalse: + [| estimatedTokenCounts | + estimatedTokenCounts := messages collect: [:message | + self countTokensInMessage: message]. + messages + with: + (expense asApproximated "because token counts are estimated" + distributeBulkToShares: estimatedTokenCounts) + do: [:message :unitPrice | + message expense: unitPrice]]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerCompletionToken.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerCompletionToken.st new file mode 100644 index 0000000..993823d --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerCompletionToken.st @@ -0,0 +1,22 @@ +accessing +centsPerCompletionToken + "Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic." + + self flag: #modelConstants. + + ^ self resolvedName caseOf: { + "Claude 3.5 Latest" + [self class claude35SonnetLatestName] -> [0.0015s]. "Output: $15/MTok" + [self class claude35HaikuLatestName] -> [0.0004s]. "Output: $4/MTok" + [self class claude3OpusLatestName] -> [0.0075s]. "Output: $75/MTok" + + "Claude 3.5 Pinned Versions" + [self class claude35Sonnet20241022Name] -> [0.0015s]. "Output: $15/MTok" + [self class claude35Sonnet20240620Name] -> [0.0015s]. "Output: $15/MTok" + [self class claude35Haiku20241022Name] -> [0.0004s]. "Output: $4/MTok" + + "Claude 3 Pinned Versions" + [self class claude3Opus20240229Name] -> [0.0075s]. "Output: $75/MTok" + [self class claude3Sonnet20240229Name] -> [0.0015s]. "Output: $15/MTok" + [self class claude3Haiku20240307Name] -> [0.000125s] "Output: $1.25/MTok" + } \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerPromptToken.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerPromptToken.st new file mode 100644 index 0000000..9696c93 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/centsPerPromptToken.st @@ -0,0 +1,22 @@ +accessing +centsPerPromptToken + "Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic." + + self flag: #modelConstants. + + ^ self resolvedName caseOf: { + "Claude 3.5 Latest" + [self class claude35SonnetLatestName] -> [0.0003s]. "Input: $3/MTok" + [self class claude35HaikuLatestName] -> [0.00008s]. "Input: $0.80/MTok" + [self class claude3OpusLatestName] -> [0.0015s]. "Input: $15/MTok" + + "Claude 3.5 Pinned Versions" + [self class claude35Sonnet20241022Name] -> [0.0003s]. "Input: $3/MTok" + [self class claude35Sonnet20240620Name] -> [0.0003s]. "Input: $3/MTok" + [self class claude35Haiku20241022Name] -> [0.00008s]. "Input: $0.80/MTok" + + "Claude 3 Pinned Versions" + [self class claude3Opus20240229Name] -> [0.0015s]. "Input: $15/MTok" + [self class claude3Sonnet20240229Name] -> [0.0003s]. "Input: $3/MTok" + [self class claude3Haiku20240307Name] -> [0.000025s] "Input: $0.25/MTok" + } \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInConversation..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInConversation..st new file mode 100644 index 0000000..77e9078 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInConversation..st @@ -0,0 +1,54 @@ +service +countTokensInConversation: aConversation + "Note: (Upper) approximation! Token counts are approximated (see #countTokensIn:), and the precise tokens generated for tool specs, tool calls, and tool messages are unknown and have been reverse-engineered but not been comprehensively tested. + Adopted from https://github.com/forestwanglin/openai-java/blob/main/jtokkit/src/main/java/xyz/felh/openai/jtokkit/utils/ToolContentFormat.java and https://github.com/hmarr/openai-chat-tokens." + + | tools pendingTools toolMessages hasMultipleToolMessages tokens | + tools := aConversation activeToolSpec ifNotNil: [:toolSpec | toolSpec tools]. + pendingTools := tools. + toolMessages := aConversation messages select: [:message | message role = #tool]. + hasMultipleToolMessages := toolMessages size > 1. + tokens := aConversation messages detectSum: [:message | + | compiledContent | + compiledContent := message basicContent ifNil: ['']. + (pendingTools isEmptyOrNil not and: [message role = #system]) ifTrue: + [compiledContent := compiledContent , String cr. + pendingTools := nil]. + self + countTokensInMessage: + (message shallowCopy + content: compiledContent; + yourself) + hasMultipleToolMessages: hasMultipleToolMessages]. + + tools isEmptyOrNil ifFalse: + [tokens := tokens + (self countTokensInToolSpec: aConversation activeToolSpec). + + (aConversation messages anySatisfy: [:message | message role = #system]) + ifTrue: + ["'Tools typically add a system message, but reuse the first one if it's already there. This offsets the extra 9 tokens added by the tool definitions.'" + tokens := tokens - 4]]. + + hasMultipleToolMessages ifTrue: + [| toolMessagesWithContentCount | + tokens := tokens + (toolMessages size * 2 + 1). + (toolMessagesWithContentCount := toolMessages count: [:message | message contentString isEmptyOrNil not]) > 0 ifTrue: + [tokens := tokens + 1 - toolMessagesWithContentCount]]. + + (aConversation activeToolSpec ifNotNil: [:toolSpec | toolSpec forcedTools]) ifNotNil: [:forcedTools | + tokens := tokens + + (forcedTools isCollection + ifTrue: + [forcedTools + ifEmpty: [1] + ifNotEmpty: + [forcedTools detectSum: [:tool | + | toolName | + toolName := (tool isString or: [tool isText]) + ifTrue: [tool asString] + ifFalse: [tool asOpenAIToolChoiceObject function name]. + (self countTokensIn: toolName) + 4]]] + ifFalse: + [1 flag: #assumption "i don't know"])]. + + ^ tokens \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage..st new file mode 100644 index 0000000..682d5e3 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage..st @@ -0,0 +1,7 @@ +private - tokens +countTokensInMessage: aMessage + "Approximation! Adopted from https://github.com/hmarr/openai-chat-tokens." + + ^ self + countTokensInMessage: aMessage + hasMultipleToolMessages: (aMessage conversation messages count: [:ea | ea isToolMessage]) > 1 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage.hasMultipleToolMessages..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage.hasMultipleToolMessages..st new file mode 100644 index 0000000..9632890 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInMessage.hasMultipleToolMessages..st @@ -0,0 +1,59 @@ +private - tokens +countTokensInMessage: aMessage hasMultipleToolMessages: hasMultipleToolMessages + "Approximation! Adopted from https://github.com/hmarr/openai-chat-tokens." + + | compiledContent tokens | + compiledContent := String streamContents: [:stream | + (aMessage isToolMessage and: [hasMultipleToolMessages]) + ifFalse: + [aMessage basicContent ifNotNil: [:content | + stream nextPutAll: content]] + ifTrue: + [[| object | + object := aMessage basicContent parseAsJson. + object isDictionary ifFalse: [self error: 'content does not represent a JSON object']. + self + printFunctionCallArguments: object + on: stream] + on: Error do: + [stream nextPutAll: aMessage basicContent]]. + + "not yet implemented" + "aMessage messageName ifNotNil: [:messageName | + stream nextPutAll: messageName]."]. + + tokens := self countTokensIn: compiledContent. + + "special tokens per input message" + aMessage isToolMessage + ifFalse: [tokens := tokens + 3] + ifTrue: + [| toolContent | + tokens := tokens + 2. + toolContent := [aMessage basicContent parseAsJson] ifError: [nil]. + (hasMultipleToolMessages and: [toolContent isDictionary]) ifTrue: + [tokens := tokens - toolContent keys size]]. + + "not yet implemented" + "(aMessage messageName notNil and: [aMessage isToolMessage not]) ifTrue: + [tokens := tokens + 1]." + aMessage toolCalls ifNotEmpty: [:toolCalls | + tokens := tokens + + (toolCalls detectSum: [:toolCall | + | toolCallTokens | + toolCallTokens := 3. + toolCallTokens := toolCallTokens + (self countTokensIn: toolCall type). + toolCall type = #function ifTrue: + [toolCallTokens := toolCallTokens + ((self countTokensIn: toolCall toolName) * 2). + toolCall arguments ifNotEmpty: [:arguments | + toolCallTokens := toolCallTokens + + (self countTokensIn: + (String streamContents: [:stream | + self printFunctionCallArguments: arguments on: stream]))]]. + toolCallTokens]). + tokens := tokens + + (toolCalls size > 1 + ifTrue: [15 - (toolCalls size * 5 - 6)] + ifFalse: [-2])]. + + ^ tokens \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInToolSpec..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInToolSpec..st new file mode 100644 index 0000000..b6ebfeb --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/countTokensInToolSpec..st @@ -0,0 +1,9 @@ +private - tokens +countTokensInToolSpec: aToolSpec + "Approximation! Adopted from https://github.com/forestwanglin/openai-java/blob/main/jtokkit/src/main/java/xyz/felh/openai/jtokkit/utils/ToolContentFormat.java and https://github.com/hmarr/openai-chat-tokens." + + | compiledContent | + compiledContent := String streamContents: [:stream | + self printToolSpec: aToolSpec on: stream]. + + ^ (self countTokensIn: compiledContent) + 9 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultConfig.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultConfig.st new file mode 100644 index 0000000..42ab2b1 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultConfig.st @@ -0,0 +1,4 @@ +initialize-release +defaultConfig + + ^ SemanticConversationConfig new \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultName.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultName.st new file mode 100644 index 0000000..afa7acb --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/defaultName.st @@ -0,0 +1,4 @@ +initialize-release +defaultName + + ^ self class defaultModelNames first \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForReplies.after..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForReplies.after..st new file mode 100644 index 0000000..c674801 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForReplies.after..st @@ -0,0 +1,12 @@ +private +expenseForReplies: messages after: promptTokens + + | expenseForPrompt expenseForReplies | + expenseForPrompt := (OpenAIAmount approximateCents: self centsPerPromptToken) + * promptTokens. + expenseForReplies := (OpenAIAmount approximateCents: self centsPerCompletionToken) + * (messages detectSum: [:message | + message basicContent + ifNil: [0] + ifNotNil: [:content | self countTokensIn: content]]). + ^ expenseForPrompt + expenseForReplies \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForUsage..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForUsage..st new file mode 100644 index 0000000..a4eb5b6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/expenseForUsage..st @@ -0,0 +1,17 @@ +private +expenseForUsage: usage + + | inputTokens outputTokens cacheCreationInputTokens cacheReadInputTokens | + inputTokens := (usage at: 'input_tokens') ifNil: 0. + outputTokens := (usage at: 'output_tokens') ifNil: 0. + cacheCreationInputTokens := (usage at: 'cache_creation_input_tokens') ifNil: 0. + cacheReadInputTokens := (usage at: 'cache_read_input_tokens') ifNil: 0. + + "- Cache write tokens are 25% more expensive than base input tokens + - Cache read tokens are 90% cheaper than base input tokens + - Regular input and output tokens are priced at standard rates" + + ^ ((OpenAIAmount exactCents: self centsPerPromptToken) * inputTokens) + + ((OpenAIAmount exactCents: self centsPerCompletionToken) * outputTokens) + + ((OpenAIAmount exactCents: self centsPerPromptToken) * cacheCreationInputTokens * 1.25) + + ((OpenAIAmount exactCents: self centsPerPromptToken) * cacheReadInputTokens * 0.1) \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor..st new file mode 100644 index 0000000..2c401c9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor..st @@ -0,0 +1,4 @@ +service +getAnswerFor: aConversation + + ^ self getAnswers: 1 for: aConversation \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor.config..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor.config..st new file mode 100644 index 0000000..ae8b36e --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswerFor.config..st @@ -0,0 +1,5 @@ +service +getAnswerFor: aConversation config: aConfigOrNil + "Generate an assistant reply in response to aConversation. Answer a new SemanticMessage for the new reply. If #shouldStream is set to true, the answer will be SemanticStreamingMessage that is completed in the background." + + ^ (self getAnswers: 1 for: aConversation config: aConfigOrNil) first \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for..st new file mode 100644 index 0000000..69389c5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for..st @@ -0,0 +1,4 @@ +service +getAnswers: number for: aConversation + + ^ self getAnswers: number for: aConversation config: nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st new file mode 100644 index 0000000..69266d7 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/getAnswers.for.config..st @@ -0,0 +1,59 @@ +service +getAnswers: number for: aConversation config: aConfigOrNil + "Generate assistant replies in response to aConversation. Answer a collection of new SemanticMessages for each new reply. If #shouldStream is set to true, the answers will be SemanticStreamingMessages that are completed in the background." + + | chatCompletion logRawOutput stream resumeStream | + + "Multiple responses via `n` are not directly supported by the Anthropic API" + number > 1 ifTrue: [ + ^ ((1 to: number) collect: [:i | self getAnswers: 1 for: aConversation config: aConfigOrNil]) flattened]. + + chatCompletion := self + invokeWithConfig: aConfigOrNil + editInput: [:input :config | + config user ifNotNil: [:user | + input user: user]. + + config maxTokens ifNotNil: [:maxTokens | + input max_tokens: maxTokens]. + + config temperature ifNotNil: [:temperature | + input temperature: temperature]. + + config nucleusSamplingMass ifNotNil: [:p | + input top_p: p]. + + config topKSampling ifNotNil: [:k | + input top_k: k]. + + logRawOutput := false. + config shouldLogRawOutput ifNotNil: [:shouldLogRawOutput | + logRawOutput := shouldLogRawOutput]. + + stream := false. + config shouldStream ifNotNil: [:shouldStream | + stream := shouldStream. + input stream: stream]. + + aConversation messages first role = 'system' ifTrue: [ + input system: (aConversation messages first content)]. + + input messages: + (aConversation messages + "System messages are specified on the top-level `input.system` field" + select: [:message | message role ~= 'system'] + thenCollect: [:message | message asAnthropicObject]). + + aConversation activeToolSpec ifNotNil: [:toolSpec | + self addToolSpec: toolSpec toInput: input]. + + (stream and: [self shouldStreamRequests]) ifTrue: + ["For faster feedback, send even the request asynchronously." + ^ self handleAsyncReplies: number for: aConversation config: config logRawOutput: logRawOutput deferStreaming: + [:resumeBlock | resumeStream := resumeBlock]]] + handleResponse: [:response :config | + (stream and: [response isSuccess]) ifTrue: + [resumeStream ifNotNil: [resumeStream value: response]. + ^ self handleStreamingReplies: number for: aConversation from: response config: config logRawOutput: logRawOutput]]. + + ^ self parseMessagesFrom: chatCompletion for: aConversation logRawOutput: logRawOutput \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleAsyncReplies.for.config.logRawOutput.deferStreaming..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleAsyncReplies.for.config.logRawOutput.deferStreaming..st new file mode 100644 index 0000000..76b7633 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleAsyncReplies.for.config.logRawOutput.deferStreaming..st @@ -0,0 +1,34 @@ +private +handleAsyncReplies: number for: aConversation config: aConfig logRawOutput: logRawOutput deferStreaming: deferBlock + "Black magic that esssentially implements a coroutine to answer a streaming conversation BEFORE returning control to the sender's sender (#invokeWithConfig:editInput:handleResponse:) and only then resumes the invocation and request processing from the background streaming process." + + | context home continue sem result | + "Preserve the remaining invocation stack..." + context := thisContext sender. + home := context home. + continue := context sender cut: home. + context privSender: home. + sem := Semaphore new. + + "...directly return the streaming answers..." + result := self + handleStreamingReplies: number + for: aConversation + from: + ["...and continue the invocation stack from the requestBlock." + | streamContext | + streamContext := thisContext. + sem wait. + self assert: home isDead. + home privSender: streamContext; pc: home endPC. + deferBlock value: [:response | streamContext push: response; jump]. + continue privSender: streamContext. + continue push: nil; jump] + config: aConfig + logRawOutput: logRawOutput. + + "If the activeProcess has a lower priority than the default streaming priority, the streaming process must wait for the activeProcess to return from home." + home insertSender: + (Context contextEnsure: [sem signal]). + + ^ result \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st new file mode 100644 index 0000000..999dd81 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/handleStreamingReplies.for.from.config.logRawOutput..st @@ -0,0 +1,101 @@ +private +handleStreamingReplies: number for: aConversation from: responseBlock config: aConfig logRawOutput: logRawOutput + + | promptTokens toolSpec trackExpense | + promptTokens := self countTokensInConversation: aConversation. + toolSpec := aConversation activeToolSpec. + + ^ SemanticStreamingMessage + conversation: aConversation + array: number + role: #assistant + inBackgroundDo: [:messages | + | expense | + [| dataStream data | + dataStream := self streamEventDataFrom: responseBlock value. + + logRawOutput ifTrue: + [messages do: [:message | + message rawOutput: + (JsonObject new + messageChunks: OrderedCollection new; + yourself)]]. + + trackExpense := [:usage | + usage ifNotNil: [:u | + expense := self expenseForUsage: u. + self assignExpense: expense toMessages: messages]]. + + [(data := dataStream next) = nil] whileFalse: + [| chunk msg chunkText chunkToolCall | + chunk := data utf8ToSqueak parseAsJson openAIWithSqueakLineEndings. + msg := messages last. + (chunk at: #error) ifNotNil: [:error | + OpenAIError + signalForType: error type + parameter: error param + code: error code + message: error message]. + logRawOutput ifTrue: + [messages do: [:message | + message rawOutput messageChunks addLast: chunk]]. + + chunk type caseOf: { + "Is fired at the very beginning. Only useful because it contains a `usage` field with the initial input tokens consumed (and a single output token)." + ['message_start'] -> [trackExpense value: chunk message usage]. + "Signals the start of a new content block (either text or tool_use). The tool_use one contains the name and ID of the function being called." + ['content_block_start'] -> [ + "Create new toolcall" + chunk content_block type = 'tool_use' ifTrue: [ + chunkToolCall := (SemanticStreamingToolCall + key: chunk content_block id + tool: chunk content_block name + arguments: chunk content_block input)] + ]. + "This contains the actual message text deltas or tool use JSON input deltas" + ['content_block_delta'] -> [ + chunk delta type caseOf: { + ['text_delta'] -> [chunkText := chunk delta text]. + ['input_json_delta'] -> [ + | aStreamingToolCall argumentsChunk | + "This should be getting the tool call using `chunk index`, but `chunk index` represents the index of the content block (so the first tool call would have an index of 1 (with 0-based indexing)), since the content_block at index 0 is a text content block." + self flag: #todo. + aStreamingToolCall := msg toolCalls last. + argumentsChunk := chunk delta partial_json. + argumentsChunk ifNotEmpty: [ + aStreamingToolCall arguments: + (aStreamingToolCall arguments isString ifTrue: [aStreamingToolCall arguments , argumentsChunk] + ifFalse: [argumentsChunk])] + ] + } otherwise: [:deltaType | self error: 'got unknown content_block_delta type: ' , deltaType]. + ]. + "Useful for knowing when a tool call has finished streaming so we can parse its input JSON" + ['content_block_stop'] -> [ + | toolCall | + self flag: #todo. "See above note" + toolCall := msg toolCalls ifNotEmpty: [msg toolCalls last] ifEmpty: [nil]. + toolCall ifNotNil: [ + (toolCall tool isString and: [toolSpec notNil]) ifTrue: + [toolSpec toolNamed: toolCall tool ifPresent: [:tool | + toolCall tool: tool]]. + toolCall arguments: + ([toolCall arguments parseAsOrderedJson] ifError: [toolCall arguments]) + ] + ]. + "Fired near the end, contains only `usage`" + ['message_delta'] -> [trackExpense value: chunk usage]. + "Fired when message is complete" + ['message_stop'] -> [msg beComplete] + } otherwise: ["Some events are not relevant and can be safely ignored"]. + + (chunkText isEmptyOrNil and: [chunkToolCall isNil]) ifFalse: + [msg addChunk: chunkText toolCalls: (chunkToolCall ifNotNil: [{chunkToolCall}]) tokenProbabilities: nil]]. + + self assert: dataStream next isNil] + + ensure: + [self account + noteExpense: + (expense ifNil: [self expenseForReplies: messages after: promptTokens]) + forUser: aConfig user + model: self name]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/isLegacy.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/isLegacy.st new file mode 100644 index 0000000..8b1f1dc --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/isLegacy.st @@ -0,0 +1,6 @@ +testing +isLegacy + + self flag: #modelConstants. + + ^ false \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st new file mode 100644 index 0000000..02975f7 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxCompletionTokens.st @@ -0,0 +1,11 @@ +accessing +maxCompletionTokens + + self flag: #modelConstants. + + "https://docs.anthropic.com/en/docs/about-claude/models" + "Claude 3.5 family all have 8192 max tokens output, Claude 3 family have 4096." + + ^ (self resolvedName beginsWith: 'claude-3-5-') + ifTrue: [8192] + ifFalse: [4096] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxPromptTokens.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxPromptTokens.st new file mode 100644 index 0000000..caf2f78 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxPromptTokens.st @@ -0,0 +1,4 @@ +accessing +maxPromptTokens + + ^ self maxTokens \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxTokens.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxTokens.st new file mode 100644 index 0000000..bcd9a9d --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maxTokens.st @@ -0,0 +1,9 @@ +accessing +maxTokens + + self flag: #modelConstants. + + "https://docs.anthropic.com/en/docs/about-claude/models" + "200K context window for all models across the board." + + ^ 200000 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st new file mode 100644 index 0000000..0d58857 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor..st @@ -0,0 +1,7 @@ +pricing +maximumPriceFor: aConversation + "Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic. Token counts are (upper) approximations." + + ^ self + maximumPriceFor: aConversation + answers: 1 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st new file mode 100644 index 0000000..14ee997 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/maximumPriceFor.answers..st @@ -0,0 +1,8 @@ +pricing +maximumPriceFor: aConversation answers: number + "Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic. Token counts are (upper) approximations." + + ^ self + priceFor: aConversation + answers: number + completionSize: nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor..st new file mode 100644 index 0000000..a8e7ee6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor..st @@ -0,0 +1,7 @@ +pricing +minimumPriceFor: aConversation + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self + minimumPriceFor: aConversation + answers: 1 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor.answers..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor.answers..st new file mode 100644 index 0000000..4bd59a8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/minimumPriceFor.answers..st @@ -0,0 +1,8 @@ +pricing +minimumPriceFor: aConversation answers: number + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self + priceFor: aConversation + answers: number + completionSize: 0 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st new file mode 100644 index 0000000..49ad7c2 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/nameForRateLimits.st @@ -0,0 +1,4 @@ +private +nameForRateLimits + + ^ self name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st new file mode 100644 index 0000000..a82f868 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessageFrom.for.logRawOutput..st @@ -0,0 +1,25 @@ +private - requests +parseMessageFrom: messageCompletion for: aConversation logRawOutput: logRawOutput + + | message toolUseBlocks | + message := SemanticMessage + conversation: aConversation + role: messageCompletion role + content: (messageCompletion content first type = 'text' + ifTrue: [ + messageCompletion content first text] + ifFalse: [ + messageCompletion content]). + + logRawOutput ifTrue: + [message rawOutput: + (JsonObject new + messageCompletion: messageCompletion; + yourself)]. + + toolUseBlocks := messageCompletion content select: [:contentBlock | contentBlock type = 'tool_use']. + toolUseBlocks size > 0 ifTrue: [ + message toolCalls: + (self parseToolCallsFrom: toolUseBlocks toolSpec: aConversation activeToolSpec)]. + + ^ message \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st new file mode 100644 index 0000000..9063e9b --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseMessagesFrom.for.logRawOutput..st @@ -0,0 +1,16 @@ +private - requests +parseMessagesFrom: messageCompletion for: aConversation logRawOutput: logRawOutput + + | message messages | + + message := self parseMessageFrom: messageCompletion + for: aConversation + logRawOutput: logRawOutput. + + messages := {message}. + messageCompletion usage ifNotNil: [:usage | + | expense | + expense := self expenseForUsage: usage. + self assignExpense: expense toMessages: messages]. + + ^ messages \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st new file mode 100644 index 0000000..f478fa6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallFrom.toolSpec..st @@ -0,0 +1,17 @@ +private - requests +parseToolCallFrom: toolCall toolSpec: aToolSpecOrNil + + | name input function | + name := toolCall name. + input := toolCall input. + + function := aToolSpecOrNil ifNotNil: + [aToolSpecOrNil toolNamed: name ifAbsent: []]. + + "Below was `input parseAsOrderedJson`, but I don't know how to convert from JsonObject to OrderedJsonObject. I just left it as JsonObject for now." + self flag: #todo. + ^ SemanticToolCall + key: toolCall id + tool: (function ifNil: [name]) + "It seems like OpenAI gives us the input as a plain string, whereas Anthropic gives us a JSON object" + arguments: input \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallsFrom.toolSpec..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallsFrom.toolSpec..st new file mode 100644 index 0000000..fe0c843 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/parseToolCallsFrom.toolSpec..st @@ -0,0 +1,5 @@ +private - requests +parseToolCallsFrom: toolCalls toolSpec: aToolSpecOrNil + + ^ toolCalls collect: [:toolCall | + self parseToolCallFrom: toolCall toolSpec: aToolSpecOrNil] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/pathToEndpoint.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/pathToEndpoint.st new file mode 100644 index 0000000..05a717f --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/pathToEndpoint.st @@ -0,0 +1,4 @@ +private +pathToEndpoint + + ^ '/v1/messages' \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor..st new file mode 100644 index 0000000..a9aff8c --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor..st @@ -0,0 +1,5 @@ +pricing +priceFor: aConversation + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self maximumPriceFor: aConversation \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers..st new file mode 100644 index 0000000..96ebe45 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers..st @@ -0,0 +1,5 @@ +pricing +priceFor: aConversation answers: number + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self maximumPriceFor: aConversation answers: number \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st new file mode 100644 index 0000000..5abfef8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.answers.completionSize..st @@ -0,0 +1,16 @@ +pricing +priceFor: aConversation answers: number completionSize: completionSizeOrNil + "If completionSizeOrNil is nil, the maximum possible size will be assumed. + Note: Prices are hardcoded and might not encompass any recent pricing updates by Anthropic. Token counts are (upper) approximations." + + | promptTokens completionTokens priceForPrompt priceForCompletion | + promptTokens := aConversation estimatePromptTokens. + completionTokens := aConversation maxCompletionTokens. "this is indeed an upper approximation!" + completionSizeOrNil ifNotNil: + [completionTokens := completionTokens clampHigh: (self maxTokensInStringOfSize: completionSizeOrNil)]. + + priceForPrompt := (OpenAIAmount approximateCents: self centsPerPromptToken) + * promptTokens. + priceForCompletion := (OpenAIAmount approximateCents: self centsPerCompletionToken) + * completionTokens. + ^ priceForPrompt + (priceForCompletion * number) \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionMessage..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionMessage..st new file mode 100644 index 0000000..f93a622 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionMessage..st @@ -0,0 +1,13 @@ +pricing +priceFor: aConversation completionMessage: aMessage + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + | promptTokens completionTokens priceForPrompt priceForCompletion | + promptTokens := aConversation estimatePromptTokens. + completionTokens := self countTokensInMessage: aMessage. + + priceForPrompt := (OpenAIAmount approximateCents: self centsPerPromptToken) + * promptTokens. + priceForCompletion := (OpenAIAmount approximateCents: self centsPerCompletionToken) + * completionTokens. + ^ priceForPrompt + priceForCompletion \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionSize..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionSize..st new file mode 100644 index 0000000..7b7fee5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceFor.completionSize..st @@ -0,0 +1,8 @@ +pricing +priceFor: aConversation completionSize: completionSize + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ self + priceFor: aConversation + answers: 1 + completionSize: completionSize \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceForPrompt..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceForPrompt..st new file mode 100644 index 0000000..301b6b5 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/priceForPrompt..st @@ -0,0 +1,6 @@ +pricing +priceForPrompt: aString + "Note: Prices are hardcoded and might not encompass any recent pricing updates by OpenAI. Token counts are (upper) approximations." + + ^ (OpenAIAmount approximateCents: self centsPerPromptToken) + * (self countTokensIn: aString) \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunction.on..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunction.on..st new file mode 100644 index 0000000..68cab27 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunction.on..st @@ -0,0 +1,23 @@ +private - tokens +printFunction: aFunction on: aStream + + aFunction description isEmptyOrNil ifFalse: + [aStream + nextPutAll: '// '; + nextPutAll: aFunction description; + cr]. + + aFunction name isEmptyOrNil ifFalse: + [aStream + nextPutAll: 'type '; + nextPutAll: aFunction name; + nextPutAll: ' = (_: {'; + cr]. + + self + printFunctionParameterSchema: aFunction asOpenAIObject function parameters + on: aStream. + + aStream + nextPutAll: '}) => any;'; + cr. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionCallArguments.on..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionCallArguments.on..st new file mode 100644 index 0000000..58252f9 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionCallArguments.on..st @@ -0,0 +1,12 @@ +private - tokens +printFunctionCallArguments: arguments on: aStream + + aStream nextPut: ${; cr. + arguments keysAndValuesDo: [:argumentName :argumentValue | + aStream + print: argumentName; + nextPut: $:; + print: argumentValue; + nextPut: $,; + cr]. + aStream nextPut: $}; cr. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on..st new file mode 100644 index 0000000..439d89c --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on..st @@ -0,0 +1,7 @@ +private - tokens +printFunctionParameterSchema: schema on: aStream + + ^ self + printFunctionParameterSchema: schema + on: aStream + indent: 0 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on.indent..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on.indent..st new file mode 100644 index 0000000..9c3403a --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterSchema.on.indent..st @@ -0,0 +1,22 @@ +private - tokens +printFunctionParameterSchema: schema on: aStream indent: indent + + | required | + required := schema required. + schema properties keysAndValuesDo: [:key :value | + value description isEmptyOrNil ifFalse: + [aStream + space: indent; + nextPutAll: '// '; + nextPutAll: value description; + cr]. + aStream + space: indent; + nextPutAll: key. + (required includes: key) ifFalse: + [aStream nextPut: $?]. + aStream nextPutAll: ': '. + self printFunctionParameterType: value on: aStream indent: indent. + aStream + nextPut: $,; + cr]. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterType.on.indent..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterType.on.indent..st new file mode 100644 index 0000000..842b300 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printFunctionParameterType.on.indent..st @@ -0,0 +1,50 @@ +private - tokens +printFunctionParameterType: schema on: aStream indent: indent + + true + caseOf: + {[schema type = #string] -> + [schema enum ifNotNil: [:enum | + enum + do: [:value | + aStream + nextPut: $"; + nextPutAll: value; + nextPut: $"] + separatedBy: + [aStream nextPutAll: ' | ']. + ^ self]. + aStream nextPutAll: #string]. + [schema type = #array] -> + [schema items + ifNotNil: [:items | + self printFunctionParameterType: items on: aStream indent: indent] + ifNil: + [aStream nextPutAll: 'any']. + aStream nextPutAll: '[]']. + [schema type = #object] -> + [aStream + nextPut: ${; + cr. + self printFunctionParameterSchema: schema on: aStream indent: indent + 2. + aStream + cr; + nextPut: $}]. + [#(integer number) includes: schema type] -> + [schema enum ifNotNil: [:enum | + enum + do: [:value | + aStream + nextPut: $"; + nextPutAll: value; + nextPut: $"] + separatedBy: + [aStream nextPutAll: ' | ']. + ^ self]. + aStream nextPutAll: 'number']. + [schema type = #boolean] -> + [aStream nextPutAll: 'boolean']. + [schema type = #null] -> + [aStream nextPutAll: 'null']} + otherwise: + []. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printOn..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printOn..st new file mode 100644 index 0000000..6f45c1b --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printOn..st @@ -0,0 +1,16 @@ +printing +printOn: aStream + + super printOn: aStream. + + self resolvedName = self name ifFalse: + [aStream + nextPutAll: ' ('; + nextPutAll: self resolvedName; + nextPut: $)]. + + aStream nextPutAll: + (' - {1} tokens, {2} in/{3} out' format: + {(self maxTokens // 1000) asString , 'K'. + OpenAIAmount exactCents: self centsPerPromptToken. + OpenAIAmount exactCents: self centsPerCompletionToken}). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/printToolSpec.on..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printToolSpec.on..st new file mode 100644 index 0000000..55233ea --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/printToolSpec.on..st @@ -0,0 +1,12 @@ +private - tokens +printToolSpec: aToolSpec on: aStream + + aStream + nextPutAll: 'namespace functions {'; cr; + cr. + aToolSpec tools do: [:tool | + self assert: tool type = #function. + self printFunction: tool on: aStream]. + aStream + cr; + nextPutAll: '} // namespace functions'. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/releaseDate.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/releaseDate.st new file mode 100644 index 0000000..0e71cc7 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/releaseDate.st @@ -0,0 +1,21 @@ +accessing +releaseDate + + self flag: #modelConstants. + + ^ self name + caseOf: { + "Latest versions - handle specially since date is not in name" + [self class claude35SonnetLatestName] -> [Date year: 2024 month: 10 day: 22]. + [self class claude35HaikuLatestName] -> [Date year: 2024 month: 10 day: 22]. + [self class claude3OpusLatestName] -> [Date year: 2024 month: 2 day: 29]} + otherwise: + [(self name includesSubstring: 'claude') + ifTrue: [ + "Extract date from model name (format: yyyymmdd)" + | dateStr | + dateStr := (self name last: 8). + Date + year: (dateStr first: 4) asNumber + month: ((dateStr copyFrom: 5 to: 6) asNumber) + day: ((dateStr copyFrom: 7 to: 8) asNumber)]] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/resolvedName.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/resolvedName.st new file mode 100644 index 0000000..7774509 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/resolvedName.st @@ -0,0 +1,6 @@ +accessing +resolvedName + + self flag: #modelConstants. + "Anthropic define '-latest' models directly, for which we already have definitions. No need to resolve anything." + ^ self name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/shouldStreamRequests.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/shouldStreamRequests.st new file mode 100644 index 0000000..9710aac --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/shouldStreamRequests.st @@ -0,0 +1,5 @@ +service +shouldStreamRequests + "Send even requests asynchronously to reduce the latency of initial #getAnswers: send. Experimental!" + + ^ true \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/sortKey.st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/sortKey.st new file mode 100644 index 0000000..ca23dcd --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/sortKey.st @@ -0,0 +1,11 @@ +comparing +sortKey + + "#releaseDate descending , #name ascending" + | recommendationIndex age maxAge | + recommendationIndex := 2 - ({self class bestName. self class cheapestName} indexOf: self name). + age := (Date today - (self releaseDate ifNil: [Date today])) days. + maxAge := 100 "years" * 365 "days". + ^ recommendationIndex asString + , (age printStringPadded: (maxAge log: 10) ceiling) + , self name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st b/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st new file mode 100644 index 0000000..e9b3402 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/instance/streamEventDataFrom..st @@ -0,0 +1,42 @@ +private - network +streamEventDataFrom: aWebResponse + "Parse server-sent events (SSE) and stream them onto a new generator." + + | responseStream | + self flag: #moveUpstream. "and add support for named events" + responseStream := Generator on: [:gen | + [aWebResponse streamFrom: aWebResponse stream to: gen size: nil progress: nil] + ensure: [aWebResponse close]]. + + ^ Generator on: [:datasStream | + [| data | + data := String streamContents: [:dataStream | + [responseStream atEnd + ifFalse: + [| line | + line := String streamContents: [:lineStream | + [responseStream atEnd + ifFalse: + [(CharacterSet crlf includes: responseStream peek) + ifTrue: [responseStream next] + ifFalse: [lineStream nextPut: responseStream next]; + yourself] + ifTrue: [true]] + whileFalse]. + line isEmpty + ifFalse: [ + "Each event is a pair of 'event: ...' and 'data: ...' separated by a newline." + "Since the event type itself is included in the 'data:' section, we just ignore the 'event:' lines entirely." + (line beginsWith: 'data: ') ifTrue: [ + dataStream nextPutAll: (line allButFirst: 'data: ' size). + dataStream cr + ]. + ]; + yourself] + ifTrue: [true]] + whileFalse]. + data isEmptyOrNil + ifFalse: + [datasStream nextPut: data allButLast]; + yourself] + whileFalse] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json new file mode 100644 index 0000000..a864e18 --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/methodProperties.json @@ -0,0 +1,67 @@ +{ + "class" : { + "bestName" : "zakkor 1/10/2025 16:05", + "cheapestName" : "zakkor 1/10/2025 16:05", + "claude35Haiku20241022Name" : "zakkor 1/10/2025 16:02", + "claude35HaikuLatestName" : "zakkor 1/10/2025 16:03", + "claude35Sonnet20240620Name" : "zakkor 1/10/2025 15:58", + "claude35Sonnet20241022Name" : "zakkor 1/10/2025 15:58", + "claude35SonnetLatestName" : "zakkor 1/10/2025 15:59", + "claude3Haiku20240307Name" : "zakkor 1/10/2025 16:04", + "claude3Opus20240229Name" : "zakkor 1/10/2025 16:03", + "claude3OpusLatestName" : "zakkor 1/10/2025 16:03", + "claude3Sonnet20240229Name" : "zakkor 1/10/2025 16:04", + "defaultModelNames" : "zakkor 1/10/2025 16:08", + "initialize" : "ct 2/4/2024 19:49", + "unload" : "ct 8/17/2023 19:28" }, + "instance" : { + "addToolSpec:toInput:" : "zakkor 1/13/2025 18:51", + "assignExpense:toMessages:" : "ct 5/25/2024 23:42", + "centsPerCompletionToken" : "zakkor 1/10/2025 19:19", + "centsPerPromptToken" : "zakkor 1/10/2025 19:18", + "countTokensInConversation:" : "ct 6/19/2024 21:04", + "countTokensInMessage:" : "ct 4/30/2024 01:33", + "countTokensInMessage:hasMultipleToolMessages:" : "ct 4/30/2024 18:36", + "countTokensInToolSpec:" : "ct 4/29/2024 22:55", + "defaultConfig" : "ct 8/17/2023 17:49", + "defaultName" : "ct 2/4/2024 19:52", + "expenseForReplies:after:" : "ct 1/16/2024 16:11", + "expenseForUsage:" : "zakkor 1/11/2025 21:25", + "getAnswerFor:" : "ct 1/9/2024 00:14", + "getAnswerFor:config:" : "ct 1/8/2024 23:52", + "getAnswers:for:" : "ct 1/8/2024 22:28", + "getAnswers:for:config:" : "zakkor 1/16/2025 00:34", + "handleAsyncReplies:for:config:logRawOutput:deferStreaming:" : "ct 5/2/2024 15:52", + "handleStreamingReplies:for:from:config:logRawOutput:" : "zakkor 1/21/2025 15:12", + "isLegacy" : "zakkor 1/10/2025 19:02", + "maxCompletionTokens" : "zakkor 1/10/2025 17:18", + "maxPromptTokens" : "ct 2/5/2024 20:48", + "maxTokens" : "zakkor 1/10/2025 17:19", + "maximumPriceFor:" : "zakkor 1/13/2025 22:36", + "maximumPriceFor:answers:" : "zakkor 1/13/2025 22:36", + "minimumPriceFor:" : "zakkor 1/10/2025 20:12", + "minimumPriceFor:answers:" : "zakkor 1/10/2025 20:12", + "nameForRateLimits" : "zakkor 1/13/2025 22:29", + "parseMessageFrom:for:logRawOutput:" : "zakkor 1/15/2025 23:16", + "parseMessagesFrom:for:logRawOutput:" : "zakkor 1/15/2025 23:23", + "parseToolCallFrom:toolSpec:" : "zakkor 1/15/2025 14:33", + "parseToolCallsFrom:toolSpec:" : "ct 1/16/2024 00:47", + "pathToEndpoint" : "zakkor 1/10/2025 16:36", + "priceFor:" : "zakkor 1/10/2025 20:12", + "priceFor:answers:" : "zakkor 1/10/2025 20:12", + "priceFor:answers:completionSize:" : "zakkor 1/13/2025 22:37", + "priceFor:completionMessage:" : "ct 4/30/2024 01:35", + "priceFor:completionSize:" : "ct 2/3/2024 21:24", + "priceForPrompt:" : "ct 2/3/2024 21:23", + "printFunction:on:" : "ct 2/13/2024 02:25", + "printFunctionCallArguments:on:" : "ct 2/13/2024 01:20", + "printFunctionParameterSchema:on:" : "ct 2/13/2024 02:10", + "printFunctionParameterSchema:on:indent:" : "ct 4/29/2024 21:56", + "printFunctionParameterType:on:indent:" : "ct 2/13/2024 02:23", + "printOn:" : "ct 2/10/2024 00:04", + "printToolSpec:on:" : "ct 4/29/2024 21:44", + "releaseDate" : "zakkor 1/10/2025 19:02", + "resolvedName" : "zakkor 1/10/2025 16:43", + "shouldStreamRequests" : "ct 5/2/2024 01:50", + "sortKey" : "ct 7/31/2024 22:30", + "streamEventDataFrom:" : "zakkor 1/15/2025 21:42" } } diff --git a/packages/SemanticText.package/AnthropicConversationModel.class/properties.json b/packages/SemanticText.package/AnthropicConversationModel.class/properties.json new file mode 100644 index 0000000..fc316fd --- /dev/null +++ b/packages/SemanticText.package/AnthropicConversationModel.class/properties.json @@ -0,0 +1,14 @@ +{ + "category" : "SemanticText-Providers-Anthropic", + "classinstvars" : [ + ], + "classvars" : [ + ], + "commentStamp" : "", + "instvars" : [ + ], + "name" : "AnthropicConversationModel", + "pools" : [ + ], + "super" : "AnthropicModel", + "type" : "normal" } diff --git a/packages/SemanticText.package/AnthropicModel.class/README.md b/packages/SemanticText.package/AnthropicModel.class/README.md new file mode 100644 index 0000000..d77432d --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/README.md @@ -0,0 +1 @@ +I am the abstract superclass for all models from the OpenAI API (https://platform.openai.com/docs/api-reference). \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/class/updateModels.st b/packages/SemanticText.package/AnthropicModel.class/class/updateModels.st new file mode 100644 index 0000000..f3f0fc0 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/class/updateModels.st @@ -0,0 +1,8 @@ +support +updateModels + + "There's currently no API for this..." + "See: + * https://openai.com/pricing + * https://platform.openai.com/docs/guides/rate-limits/usage-tiers" + self systemNavigation browseAllCallsOn: #modelConstants localToPackage: self packageInfo name. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/account..st b/packages/SemanticText.package/AnthropicModel.class/instance/account..st new file mode 100644 index 0000000..65b4a66 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/account..st @@ -0,0 +1,4 @@ +accessing +account: anAccount + + account := anAccount. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/account.st b/packages/SemanticText.package/AnthropicModel.class/instance/account.st new file mode 100644 index 0000000..9b69e52 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/account.st @@ -0,0 +1,4 @@ +accessing +account + + ^ account \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/asStringOrText.st b/packages/SemanticText.package/AnthropicModel.class/instance/asStringOrText.st new file mode 100644 index 0000000..51d5b5a --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/asStringOrText.st @@ -0,0 +1,10 @@ +printing +asStringOrText + + | result | + result := super asStringOrText. + self isLegacy ifFalse: [^ result]. + + ^ result asText + addAttributesForDeprecation; + yourself \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/assureAvailableOr..st b/packages/SemanticText.package/AnthropicModel.class/instance/assureAvailableOr..st new file mode 100644 index 0000000..96532ca --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/assureAvailableOr..st @@ -0,0 +1,12 @@ +support +assureAvailableOr: requirementsBlock + + self account ifNil: + [^ requirementsBlock value: + {{#openAIAccount. 'provide an Anthropic account'. [self inspect]}}]. + + self account hasApiKey ifFalse: + [^ requirementsBlock value: + {{#openAIAccount. 'provide an Anthropic API key'. [PreferenceBrowser open searchPattern: 'Anthropic API']}}]. + + ^ nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig..st b/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig..st new file mode 100644 index 0000000..2c52bfd --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig..st @@ -0,0 +1,4 @@ +accessing +baseConfig: aConfig + + baseConfig := aConfig. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig.st b/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig.st new file mode 100644 index 0000000..be773c3 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/baseConfig.st @@ -0,0 +1,4 @@ +accessing +baseConfig + + ^ baseConfig \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/baseUrl.st b/packages/SemanticText.package/AnthropicModel.class/instance/baseUrl.st new file mode 100644 index 0000000..1cbf068 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/baseUrl.st @@ -0,0 +1,4 @@ +private +baseUrl + + ^ self account baseUrl \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/countTokensIn..st b/packages/SemanticText.package/AnthropicModel.class/instance/countTokensIn..st new file mode 100644 index 0000000..807b4e6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/countTokensIn..st @@ -0,0 +1,6 @@ +service +countTokensIn: aString + + self flag: #approximation. "Upper approximation! Could use something like OpenAI's tiktoken for more precise counts. Should honor different tokenizers!" + + ^ self maxTokensInStringOfSize: aString size \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/defaultAccount.st b/packages/SemanticText.package/AnthropicModel.class/instance/defaultAccount.st new file mode 100644 index 0000000..9844fbc --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/defaultAccount.st @@ -0,0 +1,4 @@ +initialize-release +defaultAccount + + ^ AnthropicAccount defaultAccount \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/defaultConfig.st b/packages/SemanticText.package/AnthropicModel.class/instance/defaultConfig.st new file mode 100644 index 0000000..958bee1 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/defaultConfig.st @@ -0,0 +1,4 @@ +initialize-release +defaultConfig + + ^ SemanticConfig new \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/defaultName.st b/packages/SemanticText.package/AnthropicModel.class/instance/defaultName.st new file mode 100644 index 0000000..09a9775 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/defaultName.st @@ -0,0 +1,4 @@ +initialize-release +defaultName + + ^ nil \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/expenseForUsage..st b/packages/SemanticText.package/AnthropicModel.class/instance/expenseForUsage..st new file mode 100644 index 0000000..35b9a35 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/expenseForUsage..st @@ -0,0 +1,4 @@ +private +expenseForUsage: usage + + ^ self subclassResponsibility \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/handleTransientServerErrorsDuring..st b/packages/SemanticText.package/AnthropicModel.class/instance/handleTransientServerErrorsDuring..st new file mode 100644 index 0000000..2a4291c --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/handleTransientServerErrorsDuring..st @@ -0,0 +1,17 @@ +private +handleTransientServerErrorsDuring: requestBlock + + | pendingRetries success result | + pendingRetries := 3. + success := false. + [[result := requestBlock value. success := true] + on: ConnectionClosed , ConnectionTimedOut do: [:ex | + (pendingRetries := pendingRetries - 1) <= 0 ifTrue: [ex pass]. + Transcript showln: 'retry: ' , ex] + on: OpenAIError do: [:ex | + ex code = 503 ifFalse: [ex pass]. + (pendingRetries := pendingRetries - 1) <= 0 ifTrue: [ex pass]. + Transcript showln: 'retry: ' , ex]. + success] + whileFalse: [success]. + ^ result \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/initialize.st b/packages/SemanticText.package/AnthropicModel.class/instance/initialize.st new file mode 100644 index 0000000..1bdfca8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/initialize.st @@ -0,0 +1,8 @@ +initialize-release +initialize + + super initialize. + + self name: self defaultName. + self account: self defaultAccount. + self baseConfig: self defaultConfig. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput..st b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput..st new file mode 100644 index 0000000..a7aa99e --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput..st @@ -0,0 +1,8 @@ +private +invokeWithConfig: aConfigOrNil documents: documentsOrNil editInput: inputBlock + + ^ self + invokeWithConfig: aConfigOrNil + documents: documentsOrNil + editInput: inputBlock + handleResponse: [:response |] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st new file mode 100644 index 0000000..01be36c --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.documents.editInput.handleResponse..st @@ -0,0 +1,73 @@ +private +invokeWithConfig: aConfigOrNil documents: documentsOrNil editInput: inputBlock handleResponse: responseBlock + + | config input output response | + config := self baseConfig. + aConfigOrNil ifNotNil: + [config := config updatedWith: aConfigOrNil]. + + input := JsonObject new + model: self name; + "max_tokens is required" + max_tokens: self maxCompletionTokens; + yourself. + + inputBlock ifNotNil: + [inputBlock cull: input cull: config]. + config anthropicRawConfig ifNotNil: [:rawConfig | + rawConfig keysAndValuesDo: [:key :value | + input at: key put: value]]. + + self handleTransientServerErrorsDuring: + [response := self account controlConnectionDuring: + [| client authorizeBlock | + client := WebClient new. + authorizeBlock := [:request | + self account apiKey ifNotNil: [:apiKey | + request headerAt: 'x-api-key' put: apiKey. + request headerAt: 'anthropic-version' put: '2023-06-01']]. + documentsOrNil + ifNil: + [client + httpPost: self url + content: input openAIWithUnixLineEndings asJsonString squeakToUtf8 + type: 'application/json' + do: authorizeBlock] + ifNotNil: + [| inputFields documentFields | + inputFields := input openAIWithUnixLineEndings associations gather: [:assoc | + assoc value isArray + ifFalse: + [{assoc key squeakToUtf8 -> assoc value squeakToUtf8}] + ifTrue: + [assoc value collect: [:ea | assoc key squeakToUtf8 -> ea squeakToUtf8]]]. + documentFields := (documentsOrNil as: OrderedDictionary) associations collect: [:assoc | + assoc key squeakToUtf8 -> + (assoc value copy + in: [:copy | + copy content isString ifTrue: + [copy content: assoc value content squeakToUtf8]]; + yourself)]. + client + openAIHttpPost: self url + multipartFields: inputFields , documentFields + do: authorizeBlock]]. + + output := (responseBlock ifNotNil: [responseBlock cull: response cull: config]) + ifNil: [response content utf8ToSqueak withoutTrailingBlanks parseAsJson openAIWithSqueakLineEndings]. + + response isSuccess ifFalse: + [| error | + error := output at: #error. + ^ OpenAIError + signalForType: error type + parameter: error param + code: error code + message: error message]]. + + output usage ifNotNil: [:usage | + | expense | + expense := self expenseForUsage: usage. + self account noteExpense: expense forUser: config user model: self name]. + + ^ output \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput..st b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput..st new file mode 100644 index 0000000..a4efa82 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput..st @@ -0,0 +1,7 @@ +private +invokeWithConfig: aConfigOrNil editInput: inputBlock + + ^ self + invokeWithConfig: aConfigOrNil + editInput: inputBlock + handleResponse: [:response |] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput.handleResponse..st b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput.handleResponse..st new file mode 100644 index 0000000..1b95a45 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/invokeWithConfig.editInput.handleResponse..st @@ -0,0 +1,8 @@ +private +invokeWithConfig: aConfigOrNil editInput: inputBlock handleResponse: responseBlock + + ^ self + invokeWithConfig: aConfigOrNil + documents: nil + editInput: inputBlock + handleResponse: responseBlock \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/isLegacy.st b/packages/SemanticText.package/AnthropicModel.class/instance/isLegacy.st new file mode 100644 index 0000000..89b9cd1 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/isLegacy.st @@ -0,0 +1,4 @@ +testing +isLegacy + + ^ false \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/maxCharactersInTokens..st b/packages/SemanticText.package/AnthropicModel.class/instance/maxCharactersInTokens..st new file mode 100644 index 0000000..d3a0cb8 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/maxCharactersInTokens..st @@ -0,0 +1,4 @@ +service +maxCharactersInTokens: numberOfTokens + + ^ numberOfTokens * 4 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/maxTokensInStringOfSize..st b/packages/SemanticText.package/AnthropicModel.class/instance/maxTokensInStringOfSize..st new file mode 100644 index 0000000..9c83400 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/maxTokensInStringOfSize..st @@ -0,0 +1,5 @@ +service +maxTokensInStringOfSize: stringSize + + stringSize = 1 ifTrue: [^ 1]. + ^ stringSize // 2 \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/name..st b/packages/SemanticText.package/AnthropicModel.class/instance/name..st new file mode 100644 index 0000000..540ea67 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/name..st @@ -0,0 +1,4 @@ +accessing +name: aString + + name := aString. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/name.st b/packages/SemanticText.package/AnthropicModel.class/instance/name.st new file mode 100644 index 0000000..4b7f350 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/name.st @@ -0,0 +1,4 @@ +accessing +name + + ^ name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/nameForRateLimits.st b/packages/SemanticText.package/AnthropicModel.class/instance/nameForRateLimits.st new file mode 100644 index 0000000..6df4575 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/nameForRateLimits.st @@ -0,0 +1,4 @@ +private +nameForRateLimits + + ^ self name \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/pathToEndpoint.st b/packages/SemanticText.package/AnthropicModel.class/instance/pathToEndpoint.st new file mode 100644 index 0000000..8f8fd10 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/pathToEndpoint.st @@ -0,0 +1,4 @@ +private +pathToEndpoint + + ^ self subclassResponsibility \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/printOn..st b/packages/SemanticText.package/AnthropicModel.class/instance/printOn..st new file mode 100644 index 0000000..53f98e6 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/printOn..st @@ -0,0 +1,4 @@ +printing +printOn: aStream + + aStream nextPutAll: self name. \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/requestsPerMinute.st b/packages/SemanticText.package/AnthropicModel.class/instance/requestsPerMinute.st new file mode 100644 index 0000000..8bdf1b2 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/requestsPerMinute.st @@ -0,0 +1,4 @@ +rate limits +requestsPerMinute + + ^ self account requestsPerMinuteForModel: self nameForRateLimits ifUnknown: [nil] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/tokensPerMinute.st b/packages/SemanticText.package/AnthropicModel.class/instance/tokensPerMinute.st new file mode 100644 index 0000000..4a135c2 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/tokensPerMinute.st @@ -0,0 +1,4 @@ +rate limits +tokensPerMinute + + ^ self account tokensPerMinuteForModel: self nameForRateLimits ifUnknown: [nil] \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/instance/url.st b/packages/SemanticText.package/AnthropicModel.class/instance/url.st new file mode 100644 index 0000000..dd5fd56 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/instance/url.st @@ -0,0 +1,4 @@ +private +url + + ^ self baseUrl , self pathToEndpoint \ No newline at end of file diff --git a/packages/SemanticText.package/AnthropicModel.class/methodProperties.json b/packages/SemanticText.package/AnthropicModel.class/methodProperties.json new file mode 100644 index 0000000..059d3b3 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/methodProperties.json @@ -0,0 +1,33 @@ +{ + "class" : { + "updateModels" : "ct 11/28/2023 12:44" }, + "instance" : { + "account" : "ct 8/17/2023 17:29", + "account:" : "ct 8/17/2023 17:29", + "asStringOrText" : "ct 2/4/2024 20:15", + "assureAvailableOr:" : "zakkor 1/10/2025 20:13", + "baseConfig" : "ct 8/17/2023 17:47", + "baseConfig:" : "ct 8/17/2023 17:48", + "baseUrl" : "ct 12/1/2023 23:28", + "countTokensIn:" : "ct 5/25/2024 22:20", + "defaultAccount" : "zakkor 1/10/2025 15:50", + "defaultConfig" : "ct 8/17/2023 17:48", + "defaultName" : "ct 8/17/2023 18:16", + "expenseForUsage:" : "ct 12/11/2023 15:59", + "handleTransientServerErrorsDuring:" : "ct 2/9/2024 20:21", + "initialize" : "ct 8/17/2023 19:27", + "invokeWithConfig:documents:editInput:" : "ct 6/12/2024 00:38", + "invokeWithConfig:documents:editInput:handleResponse:" : "zakkor 1/13/2025 18:35", + "invokeWithConfig:editInput:" : "ct 8/20/2023 19:43", + "invokeWithConfig:editInput:handleResponse:" : "ct 6/18/2024 21:44", + "isLegacy" : "ct 2/7/2024 17:47", + "maxCharactersInTokens:" : "ct 2/3/2024 21:17", + "maxTokensInStringOfSize:" : "ct 2/7/2024 17:34", + "name" : "ct 8/17/2023 17:25", + "name:" : "ct 8/17/2023 17:25", + "nameForRateLimits" : "ct 2/5/2024 20:07", + "pathToEndpoint" : "ct 8/17/2023 18:08", + "printOn:" : "ct 8/17/2023 20:05", + "requestsPerMinute" : "ct 2/5/2024 20:07", + "tokensPerMinute" : "ct 2/5/2024 20:08", + "url" : "ct 12/1/2023 23:28" } } diff --git a/packages/SemanticText.package/AnthropicModel.class/properties.json b/packages/SemanticText.package/AnthropicModel.class/properties.json new file mode 100644 index 0000000..4fe7b33 --- /dev/null +++ b/packages/SemanticText.package/AnthropicModel.class/properties.json @@ -0,0 +1,16 @@ +{ + "category" : "SemanticText-Providers-Anthropic", + "classinstvars" : [ + ], + "classvars" : [ + ], + "commentStamp" : "", + "instvars" : [ + "name", + "account", + "baseConfig" ], + "name" : "AnthropicModel", + "pools" : [ + ], + "super" : "Object", + "type" : "normal" } diff --git a/packages/SemanticText.package/SemanticConfig.class/instance/anthropicRawConfig.st b/packages/SemanticText.package/SemanticConfig.class/instance/anthropicRawConfig.st new file mode 100644 index 0000000..961b38a --- /dev/null +++ b/packages/SemanticText.package/SemanticConfig.class/instance/anthropicRawConfig.st @@ -0,0 +1,5 @@ +*SemanticText-Providers-Anthropic-accessing +anthropicRawConfig + "Parameters that are directly added to the Anthropic model request. This will override any conflicting other properties of the config." + + ^ self argumentAt: #anthropicRawConfig ifAbsentPut: [JsonObject new] \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConfig.class/methodProperties.json b/packages/SemanticText.package/SemanticConfig.class/methodProperties.json index 669a0a2..3acc640 100644 --- a/packages/SemanticText.package/SemanticConfig.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticConfig.class/methodProperties.json @@ -2,6 +2,7 @@ "class" : { }, "instance" : { + "anthropicRawConfig" : "zakkor 1/13/2025 18:33", "argumentAt:" : "ct 8/17/2023 19:43", "argumentAt:ifAbsentPut:" : "ct 1/8/2024 16:36", "argumentAt:put:" : "ct 8/17/2023 17:44", diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass..st b/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass..st index 1d6c59d..12ffda5 100644 --- a/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass..st +++ b/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass..st @@ -1,5 +1,5 @@ accessing nucleusSamplingMass: aNumber - "Probabilitiy mass for nucleus sampling in (0, 1] (defaults to 1). Controls the coherence of outputs. A lower sampling mass excludes less probable words in the output, resulting in higher coherence and reduced creativity. Also referred to as top-p sampling. See also #temperature. It is disrecommended to modify both temperature and nucleusSamplingMass. For deciding between both parameters, experimentation with either is adviced." + "Probability mass for nucleus sampling in (0, 1] (defaults to 1). Controls the coherence of outputs. A lower sampling mass excludes less probable words in the output, resulting in higher coherence and reduced creativity. Also referred to as top-p sampling. See also #temperature. It is disrecommended to modify both temperature and nucleusSamplingMass. For deciding between both parameters, experimentation with either is adviced." self argumentAt: #nucleusSamplingMass put: aNumber. \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass.st b/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass.st index 1558b7e..0263792 100644 --- a/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass.st +++ b/packages/SemanticText.package/SemanticConversationConfig.class/instance/nucleusSamplingMass.st @@ -1,5 +1,5 @@ accessing nucleusSamplingMass - "Probabilitiy mass for nucleus sampling in (0, 1] (defaults to 1). Controls the coherence of outputs. A lower sampling mass excludes less probable words in the output, resulting in higher coherence and reduced creativity. Also referred to as top-p sampling. See also #temperature. It is disrecommended to modify both temperature and nucleusSamplingMass. For deciding between both parameters, experimentation with either is adviced." + "Probability mass for nucleus sampling in (0, 1] (defaults to 1). Controls the coherence of outputs. A lower sampling mass excludes less probable words in the output, resulting in higher coherence and reduced creativity. Also referred to as top-p sampling. See also #temperature. It is disrecommended to modify both temperature and nucleusSamplingMass. For deciding between both parameters, experimentation with either is adviced." ^ self argumentAt: #nucleusSamplingMass \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling..st b/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling..st new file mode 100644 index 0000000..94db2b3 --- /dev/null +++ b/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling..st @@ -0,0 +1,5 @@ +accessing +topKSampling: aNumber + "Only available for Anthropic models. Only sample from the top K options for each subsequent token. Used to remove ""long tail"" low probability responses. Learn more technical details here: https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277. Recommended for advanced use cases only. You usually only need to use temperature. Required range: aNumber > 0" + + self argumentAt: #topKSampling put: aNumber. \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling.st b/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling.st new file mode 100644 index 0000000..95423d9 --- /dev/null +++ b/packages/SemanticText.package/SemanticConversationConfig.class/instance/topKSampling.st @@ -0,0 +1,5 @@ +accessing +topKSampling + "Only available for Anthropic models. Only sample from the top K options for each subsequent token. Used to remove ""long tail"" low probability responses. Learn more technical details here: https://towardsdatascience.com/how-to-sample-from-language-models-682bceb97277. Recommended for advanced use cases only. You usually only need to use temperature. Required range: aNumber > 0" + + ^ self argumentAt: #topKSampling \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticConversationConfig.class/methodProperties.json b/packages/SemanticText.package/SemanticConversationConfig.class/methodProperties.json index 0cbba8b..b582121 100644 --- a/packages/SemanticText.package/SemanticConversationConfig.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticConversationConfig.class/methodProperties.json @@ -4,11 +4,13 @@ "instance" : { "maxTokens" : "ct 2/5/2024 16:20", "maxTokens:" : "ct 2/7/2024 21:50", - "nucleusSamplingMass" : "ct 1/15/2024 17:05", - "nucleusSamplingMass:" : "ct 1/15/2024 17:05", + "nucleusSamplingMass" : "zakkor 1/15/2025 21:47", + "nucleusSamplingMass:" : "zakkor 1/15/2025 21:47", "shouldLogProbabilities" : "ct 1/15/2024 20:25", "shouldLogProbabilities:" : "ct 1/15/2024 20:25", "shouldStream" : "ct 8/19/2023 21:37", "shouldStream:" : "ct 8/19/2023 21:37", "temperature" : "ct 1/5/2024 20:57", - "temperature:" : "ct 1/5/2024 20:57" } } + "temperature:" : "ct 1/5/2024 20:57", + "topKSampling" : "zakkor 1/15/2025 22:46", + "topKSampling:" : "zakkor 1/15/2025 22:46" } } diff --git a/packages/SemanticText.package/SemanticConversationEditor.class/instance/addModelItemsToWindowMenu..st b/packages/SemanticText.package/SemanticConversationEditor.class/instance/addModelItemsToWindowMenu..st index 582de40..7e213ed 100644 --- a/packages/SemanticText.package/SemanticConversationEditor.class/instance/addModelItemsToWindowMenu..st +++ b/packages/SemanticText.package/SemanticConversationEditor.class/instance/addModelItemsToWindowMenu..st @@ -28,6 +28,11 @@ addModelItemsToWindowMenu: menu add: 'open OpenAI expense watcher' target: accountClass action: #openExpenseWatcher]. + (Smalltalk classNamed: #AnthropicAccount) ifNotNil: [:accountClass | + menu + add: 'open Anthropic expense watcher' + target: accountClass + action: #openExpenseWatcher]. menu add: 'edit preferences' action: #openPreferences. diff --git a/packages/SemanticText.package/SemanticConversationEditor.class/methodProperties.json b/packages/SemanticText.package/SemanticConversationEditor.class/methodProperties.json index e7798d6..a6d56c8 100644 --- a/packages/SemanticText.package/SemanticConversationEditor.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticConversationEditor.class/methodProperties.json @@ -23,7 +23,7 @@ "addDo:" : "ct 2/15/2024 00:57", "addDo:asPlaceholder:" : "ct 2/15/2024 00:58", "addMessage:" : "ct 4/30/2024 23:55", - "addModelItemsToWindowMenu:" : "ct 11/23/2024 22:05", + "addModelItemsToWindowMenu:" : "zakkor 1/10/2025 23:52", "addRoleMessage:" : "ct 10/15/2023 18:05", "addSystemMessage" : "ct 8/12/2023 21:01", "addToolCall" : "ct 4/30/2024 23:50", diff --git a/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..e8a7eef --- /dev/null +++ b/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicObject.st @@ -0,0 +1,19 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + + ^ OrderedJsonObject new + name: self name; + in: [:json | + self description ifNotNil: + [json description: self description]]; + input_schema: + (JsonObject new + type: #object; + properties: + (self parameters collect: [:parameter | parameter asAnthropicObject]); + required: + (self parameters asArray + select: [:parameter | parameter required] + thenCollect: [:parameter | parameter name]); + yourself); + yourself \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicToolChoiceObject.st b/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicToolChoiceObject.st new file mode 100644 index 0000000..638dd7a --- /dev/null +++ b/packages/SemanticText.package/SemanticFunction.class/instance/asAnthropicToolChoiceObject.st @@ -0,0 +1,9 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicToolChoiceObject + + ^ JsonObject new + type: self type; + function: + (JsonObject new + name: self name; + yourself) \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticFunction.class/methodProperties.json b/packages/SemanticText.package/SemanticFunction.class/methodProperties.json index 74b8282..419cd1f 100644 --- a/packages/SemanticText.package/SemanticFunction.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticFunction.class/methodProperties.json @@ -18,6 +18,8 @@ "addParameter:type:" : "ct 1/16/2024 00:30", "addParameter:type:required:" : "ct 1/16/2024 00:30", "argsAction:" : "ct 11/7/2024 23:16", + "asAnthropicObject" : "zakkor 1/13/2025 18:55", + "asAnthropicToolChoiceObject" : "zakkor 1/13/2025 18:42", "asOpenAIObject" : "ct 2/13/2024 02:02", "asOpenAIToolChoiceObject" : "ct 1/16/2024 00:19", "asToolCall" : "ct 4/30/2024 21:52", diff --git a/packages/SemanticText.package/SemanticFunctionParameter.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticFunctionParameter.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..0e724a2 --- /dev/null +++ b/packages/SemanticText.package/SemanticFunctionParameter.class/instance/asAnthropicObject.st @@ -0,0 +1,12 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + "Note: this is exactly the same as SemanticFunctionParameter>>asOpenAIObject" + ^ (true + caseOf: + {[self type isNil] -> [JsonObject new]. + [self type isString] -> [JsonObject new type: self type; yourself]} + otherwise: [self type]) + in: [:json | + self description ifNotNil: + [json description: self description]]; + yourself \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticFunctionParameter.class/methodProperties.json b/packages/SemanticText.package/SemanticFunctionParameter.class/methodProperties.json index c0cb284..14a01e4 100644 --- a/packages/SemanticText.package/SemanticFunctionParameter.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticFunctionParameter.class/methodProperties.json @@ -3,6 +3,7 @@ "name:description:type:required:" : "ct 10/13/2023 23:30", "name:type:required:" : "ct 1/15/2024 23:31" }, "instance" : { + "asAnthropicObject" : "zakkor 1/13/2025 18:55", "asOpenAIObject" : "ct 1/17/2024 02:32", "asSignatureString" : "ct 1/15/2024 23:10", "defaultRequired" : "ct 10/13/2023 23:27", diff --git a/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..8eb7f65 --- /dev/null +++ b/packages/SemanticText.package/SemanticMessage.class/instance/asAnthropicObject.st @@ -0,0 +1,18 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + + | jsonObject | + jsonObject := JsonObject new + role: self role; + content: self content; + yourself. + "If any tool calls are present, we'll have to return an array of content blocks, not a plain string" + self basicToolCalls ifNotNil: [:calls | + | textContent contentBlocks | + textContent := self content isString ifTrue: [self content] ifFalse: [self content first text]. + contentBlocks := + {JsonObject new type: 'text'; text: textContent; yourself} , + (calls collect: [:toolCall | toolCall asAnthropicObject]). + jsonObject content: contentBlocks + ]. + ^ jsonObject \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticMessage.class/instance/asOpenAIObject.st b/packages/SemanticText.package/SemanticMessage.class/instance/asOpenAIObject.st index dcdfdc6..2ef82f2 100644 --- a/packages/SemanticText.package/SemanticMessage.class/instance/asOpenAIObject.st +++ b/packages/SemanticText.package/SemanticMessage.class/instance/asOpenAIObject.st @@ -4,7 +4,7 @@ asOpenAIObject | jsonObject | jsonObject := JsonObject new role: self role; - content: self basicContent; + content: self content; yourself. self basicToolCalls ifNotNil: [:calls | jsonObject tool_calls: diff --git a/packages/SemanticText.package/SemanticMessage.class/instance/basicContent.st b/packages/SemanticText.package/SemanticMessage.class/instance/basicContent.st index e55ce28..b5358f9 100644 --- a/packages/SemanticText.package/SemanticMessage.class/instance/basicContent.st +++ b/packages/SemanticText.package/SemanticMessage.class/instance/basicContent.st @@ -1,4 +1,31 @@ accessing basicContent - + "Both OpenAI and Anthropic messages' content can either take the form of a plain string, or in more special cases, an array of objects, for example:" + " + ""content"": [ + {""type"": ""text"", ""text"": ""What's in this image?""}, + { + ""type"": ""image_url"", + ""image_url"": { + ""url"": ""https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg"", + }, + }, + ] + " + "If in array of objects format like above, we'll collect the text contents of the objects" + self flag: #todo. "Maybe show something nicer for images and cache_control points" + content isArray ifTrue: [ + | textParts textContent | + textParts := content select: [:message | message type = 'text'] + thenCollect: [:message | message text]. + textContent := ''. + textParts do: [:part | + textContent ifEmpty: [ + textContent := textContent , part + ] ifNotEmpty: [ + textContent := textContent , Character cr , part + ] + ]. + ^ textContent. + ]. ^ content \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticMessage.class/methodProperties.json b/packages/SemanticText.package/SemanticMessage.class/methodProperties.json index a447a9a..3355c26 100644 --- a/packages/SemanticText.package/SemanticMessage.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticMessage.class/methodProperties.json @@ -3,8 +3,9 @@ "conversation:role:content:" : "ct 2/10/2024 20:35", "role:content:" : "ct 6/22/2023 17:28" }, "instance" : { - "asOpenAIObject" : "ct 1/16/2024 20:41", - "basicContent" : "ct 8/13/2023 18:48", + "asAnthropicObject" : "zakkor 1/15/2025 15:39", + "asOpenAIObject" : "zakkor 1/11/2025 21:10", + "basicContent" : "zakkor 1/11/2025 21:05", "basicToolCalls" : "ct 1/16/2024 20:40", "content" : "ct 6/22/2023 17:28", "content:" : "ct 6/22/2023 17:28", diff --git a/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..b3e148d --- /dev/null +++ b/packages/SemanticText.package/SemanticToolCall.class/instance/asAnthropicObject.st @@ -0,0 +1,9 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + + ^ JsonObject new + id: self key; + type: 'tool_use'; + name: self toolName; + input: self arguments; + yourself \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json b/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json index f124173..f79661b 100644 --- a/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticToolCall.class/methodProperties.json @@ -7,6 +7,7 @@ "arguments" : "ct 1/17/2024 01:20", "arguments:" : "ct 1/17/2024 01:19", "argumentsJsonString" : "ct 1/16/2024 18:18", + "asAnthropicObject" : "zakkor 1/15/2025 15:43", "asOpenAIObject" : "ct 1/16/2024 16:17", "asToolMessage" : "ct 1/16/2024 13:05", "asToolMessageWithResult" : "ct 1/16/2024 20:36", diff --git a/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st b/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st new file mode 100644 index 0000000..0b77e59 --- /dev/null +++ b/packages/SemanticText.package/SemanticToolMessage.class/instance/asAnthropicObject.st @@ -0,0 +1,14 @@ +*SemanticText-Providers-Anthropic-converting +asAnthropicObject + + | jsonObject | + jsonObject := super asAnthropicObject. + jsonObject role: 'user'. + jsonObject content: { + (JsonObject new + type: 'tool_result'; + tool_use_id: self key; + content: self basicContent; + yourself) + }. + ^ jsonObject \ No newline at end of file diff --git a/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json b/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json index 52111c9..c6af88b 100644 --- a/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json +++ b/packages/SemanticText.package/SemanticToolMessage.class/methodProperties.json @@ -3,6 +3,7 @@ "toolCall:" : "ct 1/16/2024 13:05", "toolCall:content:" : "ct 1/16/2024 01:39" }, "instance" : { + "asAnthropicObject" : "zakkor 1/15/2025 15:24", "asOpenAIObject" : "ct 1/16/2024 20:05", "basicContent" : "ct 1/16/2024 20:06", "content" : "ct 1/16/2024 20:09", diff --git a/packages/SemanticText.package/monticello.meta/categories.st b/packages/SemanticText.package/monticello.meta/categories.st index b077e98..bf424ed 100644 --- a/packages/SemanticText.package/monticello.meta/categories.st +++ b/packages/SemanticText.package/monticello.meta/categories.st @@ -1,10 +1,11 @@ +SystemOrganization addCategory: #'SemanticText-Help'! SystemOrganization addCategory: #'SemanticText-Model'! SystemOrganization addCategory: #'SemanticText-Model-Agents'! SystemOrganization addCategory: #'SemanticText-Model-Conversation'! SystemOrganization addCategory: #'SemanticText-Model-Conversation-Tests'! SystemOrganization addCategory: #'SemanticText-Model-Search'! SystemOrganization addCategory: #'SemanticText-Model-Speech'! -SystemOrganization addCategory: #'SemanticText-Help'! +SystemOrganization addCategory: #'SemanticText-Providers-Anthropic'! SystemOrganization addCategory: #'SemanticText-Providers-Mocks'! SystemOrganization addCategory: #'SemanticText-Providers-OpenAI'! SystemOrganization addCategory: #'SemanticText-Tools-Conversation'!