File tree Expand file tree Collapse file tree 2 files changed +24
-14
lines changed
supporting-blog-content/github-assistant Expand file tree Collapse file tree 2 files changed +24
-14
lines changed Original file line number Diff line number Diff line change 2323parser = argparse .ArgumentParser (
2424 description = "Process documents and questions for evaluation."
2525)
26- parser .add_argument ("--num_documents" ,
26+ parser .add_argument (
27+ "--num_documents" ,
2728 type = int ,
2829 default = None ,
29- help = "Number of documents to process (default: all)" )
30- parser .add_argument ("--skip_documents" ,
30+ help = "Number of documents to process (default: all)"
31+ )
32+ parser .add_argument (
33+ "--skip_documents" ,
3134 type = int ,
3235 default = 0 ,
33- help = "Number of documents to skip at the beginning (default: 0)" )
34- parser .add_argument ("--num_questions" ,
36+ help = "Number of documents to skip at the beginning (default: 0)"
37+ )
38+ parser .add_argument (
39+ "--num_questions" ,
3540 type = int ,
3641 default = None ,
37- help = "Number of questions to process (default: all)" )
38- parser .add_argument ("--skip_questions" ,
42+ help = "Number of questions to process (default: all)"
43+ )
44+ parser .add_argument (
45+ "--skip_questions" ,
3946 type = int ,
4047 default = 0 ,
41- help = "Number of questions to skip at the beginning (default: 0)" )
42- parser .add_argument ("--process_last_questions" ,
48+ help = "Number of questions to skip at the beginning (default: 0)"
49+ )
50+ parser .add_argument (
51+ "--process_last_questions" ,
4352 action = "store_true" ,
44- help = "Process last N questions instead of first N" )
53+ help = "Process last N questions instead of first N"
54+ )
4555args = parser .parse_args ()
4656
4757load_dotenv (".env" )
Original file line number Diff line number Diff line change @@ -20,24 +20,24 @@ def run_query_sync():
2020 query_engine = index .as_query_engine (
2121 llm = openai_llm ,
2222 similarity_top_k = 3 ,
23- streaming = False ,
23+ streaming = False ,
2424 response_mode = "tree_summarize" ,
2525 )
2626
2727 bundle = QueryBundle (query , embedding = embed_model .get_query_embedding (query ))
2828
2929 result = query_engine .query (bundle )
30- return result .response
30+ return result .response
3131 except Exception as e :
3232 print (f"An error occurred while running the query: { e } " )
3333 finally :
3434 if hasattr (openai_llm , "client" ) and isinstance (
3535 openai_llm .client , httpx .Client
36- ):
36+ ):
3737 openai_llm .client .close ()
3838 if hasattr (embed_model , "client" ) and isinstance (
3939 embed_model .client , httpx .Client
40- ):
40+ ):
4141 embed_model .client .close ()
4242 if hasattr (es_vector_store , "close" ):
4343 es_vector_store .close ()
You can’t perform that action at this time.
0 commit comments