@@ -37,15 +37,90 @@ Supported types of graphs:
3737
3838- [x] chain
3939- [x] single cycle
40+ - [x] multi-cycle graph
41+ - [x] complex graph with cycles
4042
4143Currently unsupported types:
4244
4345- [ ] single node cycle
44- - [ ] multi-cycle graph
4546- [ ] incomplete graph
46- - [ ] complex graph with cycles
4747
48- ## Current algorithms progress
48+ ## How to use
49+ We provide several of using our library for various tasks.
50+ ### Data generation
51+
52+ ``` python
53+ from chatsky_llm_autoconfig.algorithms.cycle_graph_generation_pipeline import GraphGenerationPipeline
54+ from langchain_openai import ChatOpenAI
55+
56+
57+ generation_model = ChatOpenAI(
58+ model = ' deepseek/deepseek-reasoner' ,
59+ api_key = os.getenv(" OPENAI_API_KEY" ),
60+ base_url = os.getenv(" OPENAI_BASE_URL" ),
61+ temperature = 0.2
62+ )
63+
64+ validation_model = ChatOpenAI(
65+ model = ' gpt-4o-mini' ,
66+ api_key = os.getenv(" OPENAI_API_KEY" ),
67+ base_url = os.getenv(" OPENAI_BASE_URL" ),
68+ temperature = 0
69+
70+ pipeline = GraphGenerationPipeline(
71+ generation_model = generation_model,
72+ validation_model = validation_model
73+ )
74+
75+ topics = [
76+ " technical support conversation" ,
77+ " job interview" ,
78+ " restaurant reservation" ,
79+ " online shopping checkout" ,
80+ " travel booking"
81+ ]
82+
83+ successful_generations = []
84+
85+ for topic in topics:
86+
87+ try :
88+ result = pipeline(topic)
89+
90+ # Check the result type
91+ if isinstance (result, GraphGenerationResult):
92+ print (f " ✅ Successfully generated graph for { topic} " )
93+ # Save the full result with the graph and dialog
94+ successful_generations.append({
95+ " graph" : result.graph.model_dump(),
96+ " topic" : result.topic,
97+ " dialogues" : [d.model_dump() for d in result.dialogues]
98+ })
99+ else : # isinstance(result, GenerationError)
100+ print (f " ❌ Failed to generate graph for { topic} " )
101+ print (f " Error type: { result.error_type} " )
102+ print (f " Error message: { result.message} " )
103+
104+ except Exception as e:
105+ print (f " ❌ Unexpected error processing topic ' { topic} ': { str (e)} " )
106+ continue
107+ ```
108+
109+ # ## Dialogue sampling
110+
111+ ```python
112+ from chatsky_llm_autoconfig.algorithms.dialogue_generation import RecursiveDialogueSampler
113+ from chatsky_llm_autoconfig.graph import Graph
114+
115+ G = Graph(graph_dict = {... })
116+
117+ sampler = RecursiveDialogueSampler()
118+ sampler.invoke(graph = G) # -> list of Dialogue objects
119+
120+ ```
121+
122+ # ## Graph generation
123+ # ## Evaluation
49124
50125- Generate graph from scratch by topic (input : topic, output: graph) (for dataset generation)
51126 - algorithms.topic_graph_generation.CycleGraphGenerator
0 commit comments