Skip to content

Commit b676ea5

Browse files
committed
Remove development artifacts from notebooks directory
Remove 70+ development files: - Documentation: execution reports, analysis docs, planning docs - Scripts: validation scripts, setup scripts, fix scripts - Logs: execution logs, validation logs - Archive directories: _archive/ and .ipynb_checkpoints/ across all sections - Test script: test_notebook_fixes.py Keep only: - Essential notebooks (.ipynb) - README.md and SETUP_GUIDE.md - course_catalog_section2.json - Backup notebooks (.backup, _old, _executed)
1 parent f2ce19f commit b676ea5

18 files changed

+21045
-104
lines changed

08_vector_algorithm_benchmark.py

Lines changed: 777 additions & 0 deletions
Large diffs are not rendered by default.

nk_scripts/full_featured_agent.py

Lines changed: 406 additions & 0 deletions
Large diffs are not rendered by default.

nk_scripts/fully_featured_demo.py

Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
"""Basic Langraph Q&A Agent demo."""
2+
import os
3+
from typing import Annotated, TypedDict
4+
import operator
5+
6+
from langgraph.constants import END
7+
from langgraph.graph import StateGraph
8+
from openai import OpenAI
9+
10+
# Initialize OpenAI client with API key from environment
11+
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
12+
13+
14+
class AgentState(TypedDict):
15+
"""State that is access by all nodes."""
16+
messages: Annotated[list, operator.add] # Accumulates messages
17+
question: str
18+
answer: str
19+
iteration_count: int
20+
21+
# 2. Define Nodes - functions that do work
22+
def ask_question(state: AgentState) -> AgentState:
23+
"""Node that processes the question"""
24+
print(f"Processing question: {state['question']}")
25+
return {
26+
"messages": [f"Question received: {state['question']}"],
27+
"iteration_count": state.get("iteration_count", 0) + 1
28+
}
29+
30+
def generate_answer(state: AgentState) -> AgentState:
31+
"""Node that generates an answer using OpenAI"""
32+
print("Generating answer with OpenAI...")
33+
34+
try:
35+
response = client.chat.completions.create(
36+
model="gpt-3.5-turbo",
37+
messages=[
38+
{"role": "system", "content": "You are a helpful assistant that provides clear, concise answers."},
39+
{"role": "user", "content": state['question']}
40+
],
41+
max_tokens=150,
42+
temperature=0.7
43+
)
44+
45+
answer = response.choices[0].message.content.strip()
46+
47+
except Exception as e:
48+
print(f"Error calling OpenAI: {e}")
49+
answer = f"Error generating answer: {str(e)}"
50+
51+
return {
52+
"answer": answer,
53+
"messages": [f"Answer generated: {answer}"]
54+
}
55+
56+
# 3. Define conditional logic
57+
def should_continue(state: AgentState) -> str:
58+
"""Decides whether to continue or end"""
59+
print(f"Checking if we should continue...{state['iteration_count']}")
60+
if state["iteration_count"] > 3:
61+
return "end"
62+
return "continue"
63+
64+
65+
if __name__=="__main__":
66+
# Check if OpenAI API key is available
67+
if not os.getenv("OPENAI_API_KEY"):
68+
print("⚠️ Warning: OPENAI_API_KEY not found in environment variables!")
69+
print("Please set your OpenAI API key: export OPENAI_API_KEY='your-key-here'")
70+
exit(1)
71+
72+
initial_state = {
73+
"question": "What is LangGraph?",
74+
"messages": [],
75+
"answer": "",
76+
"iteration_count": 0
77+
}
78+
79+
# # 4. Build the Graph
80+
workflow = StateGraph(AgentState)
81+
#
82+
# Two nodes that are doing things
83+
workflow.add_node("process_question", ask_question)
84+
workflow.add_node("generate_answer", generate_answer)
85+
# #
86+
# # # Add edges
87+
workflow.set_entry_point("process_question") # Start here
88+
89+
# First, always go from process_question to generate_answer
90+
workflow.add_edge("process_question", "generate_answer")
91+
92+
# After generating answer, check if we should continue or end
93+
workflow.add_conditional_edges(
94+
"generate_answer", # Check after generating answer
95+
should_continue,
96+
{
97+
"continue": "process_question", # If continue, loop back to process_question
98+
"end": END # If end, finish
99+
}
100+
)
101+
#
102+
# # Compile the graph
103+
app = workflow.compile()
104+
result = app.invoke(initial_state)
105+
print("\n=== Final Result ===")
106+
print(f"Question: {result['question']}")
107+
print(f"Answer: {result['answer']}")
108+
print(f"Messages: {result['messages']}")
109+
# print(result)
110+

0 commit comments

Comments
 (0)