@@ -76,27 +76,27 @@ def _validate_encoding_model(self) -> None:
7676 if self .encoding_model .strip () == "" :
7777 self .encoding_model = tiktoken .encoding_name_for_model (self .model )
7878
79- max_tokens : int | None = Field (
79+ max_tokens : int = Field (
8080 description = "The maximum number of tokens to generate." ,
8181 default = defs .LLM_MAX_TOKENS ,
8282 )
83- temperature : float | None = Field (
83+ temperature : float = Field (
8484 description = "The temperature to use for token generation." ,
8585 default = defs .LLM_TEMPERATURE ,
8686 )
87- top_p : float | None = Field (
87+ top_p : float = Field (
8888 description = "The top-p value to use for token generation." ,
8989 default = defs .LLM_TOP_P ,
9090 )
91- n : int | None = Field (
91+ n : int = Field (
9292 description = "The number of completions to generate." ,
9393 default = defs .LLM_N ,
9494 )
95- frequency_penalty : float | None = Field (
95+ frequency_penalty : float = Field (
9696 description = "The frequency penalty to use for token generation." ,
9797 default = defs .LLM_FREQUENCY_PENALTY ,
9898 )
99- presence_penalty : float | None = Field (
99+ presence_penalty : float = Field (
100100 description = "The presence penalty to use for token generation." ,
101101 default = defs .LLM_PRESENCE_PENALTY ,
102102 )
0 commit comments