File tree Expand file tree Collapse file tree 2 files changed +3
-5
lines changed
Sources/AnyLanguageModel/Models
Tests/AnyLanguageModelTests Expand file tree Collapse file tree 2 files changed +3
-5
lines changed Original file line number Diff line number Diff line change @@ -166,14 +166,12 @@ import Foundation
166166 }
167167
168168 /// Default llama.cpp options used when none are provided at runtime.
169- public static func defaults(
170- seed: UInt32 = UInt32 . random ( in: 0 ... UInt32 . max)
171- ) -> Self {
169+ public static var `default` : Self {
172170 . init(
173171 contextSize: 2048 ,
174172 batchSize: 512 ,
175173 threads: Int32 ( ProcessInfo . processInfo. processorCount) ,
176- seed: seed ,
174+ seed: UInt32 . random ( in : 0 ... UInt32 . max ) ,
177175 temperature: 0.8 ,
178176 topK: 40 ,
179177 topP: 0.95 ,
Original file line number Diff line number Diff line change @@ -65,7 +65,7 @@ import Testing
6565 }
6666
6767 @Test func customGenerationOptionsDefaults( ) {
68- let defaults = LlamaLanguageModel . CustomGenerationOptions. defaults ( )
68+ let defaults = LlamaLanguageModel . CustomGenerationOptions. default
6969 #expect( defaults. contextSize == 2048 )
7070 #expect( defaults. batchSize == 512 )
7171 #expect( defaults. temperature == 0.8 )
You can’t perform that action at this time.
0 commit comments