77import pandas as pd
88from langchain_openai .chat_models import ChatOpenAI
99from langchain_openai .embeddings import OpenAIEmbeddings
10- from llama_index .readers .schema import Document as LlamaindexDocument
1110
1211from ragas ._analytics import TesetGenerationEvent , track
1312from ragas .embeddings import BaseRagasEmbeddings
1716from ragas .testset .evolutions import ComplexEvolution , CurrentNodes , DataRow
1817from ragas .testset .filters import EvolutionFilter , NodeFilter , QuestionFilter
1918
20- logger = logging .getLogger (__name__ )
19+ if t .TYPE_CHECKING :
20+ from llama_index .readers .schema import Document as LlamaindexDocument
21+ from langchain_core .documents import Document as LCDocument
22+
2123Distributions = t .Dict [t .Any , float ]
2224
25+ logger = logging .getLogger (__name__ )
26+
2327
2428@dataclass
2529class TestDataset :
@@ -79,12 +83,14 @@ def with_openai(
7983 docstore = docstore ,
8084 )
8185
86+ # if you add any arguments to this function, make sure to add them to
87+ # generate_with_langchain_docs as well
8288 def generate_with_llamaindex_docs (
8389 self ,
8490 documents : t .Sequence [LlamaindexDocument ],
8591 test_size : int ,
8692 distributions : Distributions = {},
87- show_debug_logs = False ,
93+ with_debugging_logs = False ,
8894 ):
8995 # chunk documents and add to docstore
9096 self .docstore .add_documents (
@@ -94,11 +100,34 @@ def generate_with_llamaindex_docs(
94100 return self .generate (
95101 test_size = test_size ,
96102 distributions = distributions ,
97- show_debug_logs = show_debug_logs ,
103+ with_debugging_logs = with_debugging_logs ,
104+ )
105+
106+ # if you add any arguments to this function, make sure to add them to
107+ # generate_with_langchain_docs as well
108+ def generate_with_langchain_docs (
109+ self ,
110+ documents : t .Sequence [LCDocument ],
111+ test_size : int ,
112+ distributions : Distributions = {},
113+ with_debugging_logs = False ,
114+ ):
115+ # chunk documents and add to docstore
116+ self .docstore .add_documents (
117+ [Document .from_langchain_document (doc ) for doc in documents ]
118+ )
119+
120+ return self .generate (
121+ test_size = test_size ,
122+ distributions = distributions ,
123+ with_debugging_logs = with_debugging_logs ,
98124 )
99125
100126 def generate (
101- self , test_size : int , distributions : Distributions = {}, show_debug_logs = False
127+ self ,
128+ test_size : int ,
129+ distributions : Distributions = {},
130+ with_debugging_logs = False ,
102131 ):
103132 # init filters and evolutions
104133 for evolution in distributions :
@@ -116,7 +145,7 @@ def generate(
116145 evolution .init_evolution ()
117146 if evolution .evolution_filter is None :
118147 evolution .evolution_filter = EvolutionFilter (llm = self .critic_llm )
119- if show_debug_logs :
148+ if with_debugging_logs :
120149 from ragas .utils import patch_logger
121150
122151 patch_logger ("ragas.testset.evolutions" , logging .DEBUG )
0 commit comments