File tree Expand file tree Collapse file tree 2 files changed +7
-0
lines changed Expand file tree Collapse file tree 2 files changed +7
-0
lines changed Original file line number Diff line number Diff line change @@ -309,6 +309,7 @@ def _cachable_default_model(backend: Backend) -> str:
309309    :rtype: str 
310310    :raises ValueError: If no models are available. 
311311    """ 
312+ 
312313    logger .debug ("Getting default model for backend: {}" , backend )
313314    models  =  backend .available_models ()
314315    if  models :
Original file line number Diff line number Diff line change @@ -103,6 +103,7 @@ async def make_request(
103103
104104        request_args .update (self ._request_args )
105105
106+         print (">>> Creating stream object for OpenAI server " )
106107        stream  =  await  self ._async_client .chat .completions .create (
107108            model = self .model ,
108109            messages = [
@@ -111,8 +112,10 @@ async def make_request(
111112            stream = True ,
112113            ** request_args ,
113114        )
115+ 
114116        token_count  =  0 
115117        async  for  chunk  in  stream :
118+             print (f"Getting chunk: { chunk }  )
116119            choice  =  chunk .choices [0 ]
117120            token  =  choice .delta .content  or  "" 
118121
@@ -145,6 +148,9 @@ def available_models(self) -> List[str]:
145148        :raises openai.OpenAIError: If an error occurs while retrieving models. 
146149        """ 
147150
151+         # TODO: Remove this line 
152+         return  ["Meta-Llama-3-8B.Q4_K_M.gguf" ]
153+ 
148154        try :
149155            return  [model .id  for  model  in  self ._client .models .list ().data ]
150156        except  Exception  as  error :
 
 
   
 
     
   
   
          
    
    
     
    
      
     
     
    You can’t perform that action at this time.
  
 
    
  
    
      
        
     
       
      
     
   
 
    
    
  
 
  
 
     
    
0 commit comments