File tree Expand file tree Collapse file tree 1 file changed +6
-7
lines changed
Expand file tree Collapse file tree 1 file changed +6
-7
lines changed Original file line number Diff line number Diff line change @@ -176,7 +176,6 @@ def __init__(
176176
177177 if self .verbose :
178178 print (llama_cpp .llama_print_system_info ().decode ("utf-8" ), file = sys .stderr )
179-
180179
181180 n_vocab = self .n_vocab ()
182181 n_ctx = self .n_ctx ()
@@ -575,9 +574,9 @@ def create_embedding(
575574 else :
576575 inputs = input
577576
578- data = []
577+ data : List [ EmbeddingData ] = []
579578 total_tokens = 0
580- for input in inputs :
579+ for index , input in enumerate ( inputs ) :
581580 tokens = self .tokenize (input .encode ("utf-8" ))
582581 self .reset ()
583582 self .eval (tokens )
@@ -587,20 +586,20 @@ def create_embedding(
587586 : llama_cpp .llama_n_embd (self .ctx )
588587 ]
589588
590- if self .verbose :
591- llama_cpp .llama_print_timings (self .ctx )
592589 data .append (
593590 {
594591 "object" : "embedding" ,
595592 "embedding" : embedding ,
596- "index" : 0 ,
593+ "index" : index ,
597594 }
598595 )
596+ if self .verbose :
597+ llama_cpp .llama_print_timings (self .ctx )
599598
600599 return {
601600 "object" : "list" ,
602601 "data" : data ,
603- "model" : self . model_path ,
602+ "model" : model_name ,
604603 "usage" : {
605604 "prompt_tokens" : total_tokens ,
606605 "total_tokens" : total_tokens ,
You can’t perform that action at this time.
0 commit comments