File tree Expand file tree Collapse file tree 1 file changed +6
-12
lines changed Expand file tree Collapse file tree 1 file changed +6
-12
lines changed Original file line number Diff line number Diff line change @@ -131,18 +131,12 @@ def get_role_end(self, name):
131131
132132 def _generator (self , prompt , temperature ):
133133 # start the new stream
134- prompt_end = prompt .find (b'<|endofprompt|>' )
135- if prompt_end >= 0 :
136- stripped_prompt = prompt [:prompt_end ]
137- else :
138- raise Exception ("This model cannot handle prompts that don't match the instruct format!" )
139-
140- # make sure you don't try and instruct the same model twice
141- if b'<|endofprompt|>' in prompt [prompt_end + len (b'<|endofprompt|>' ):]:
142- raise Exception ("This model has been given two separate instruct blocks, but this is not allowed!" )
143-
144- # update our shared data state
145- self ._reset_shared_data (stripped_prompt + b'<|endofprompt|>' , temperature )
134+ eop_count = prompt .count (b'<|endofprompt|>' )
135+ if eop_count > 1 :
136+ raise Exception ("This model has been given multiple instruct blocks or <|endofprompt|> tokens, but this is not allowed!" )
137+ updated_prompt = prompt + b'<|endofprompt|>' if eop_count == 0 else prompt
138+
139+ self ._reset_shared_data (updated_prompt , temperature )
146140
147141 try :
148142 generator = self .client .completions .create (
You can’t perform that action at this time.
0 commit comments