|
| 1 | +""" |
| 2 | +GraphRAG Implementation using Llama-Index and NetworkX |
| 3 | +Issue #103 for king04aman/All-In-One-Python-Projects |
| 4 | +""" |
| 5 | +import networkx as nx |
| 6 | +from llama_index.core import KnowledgeGraphIndex, SimpleNodeParser, QueryEngine |
| 7 | +from llama_index.llms.openai import OpenAI |
| 8 | +from llama_index.embeddings.openai import OpenAIEmbedding |
| 9 | + |
| 10 | +# Example documents |
| 11 | +DOCUMENTS = [ |
| 12 | + "Alice is a data scientist. She works at Acme Corp.", |
| 13 | + "Bob is a software engineer. He collaborates with Alice on ML projects.", |
| 14 | + "Acme Corp is a tech company based in New York." |
| 15 | +] |
| 16 | + |
| 17 | +# Step 1: Parse documents into nodes |
| 18 | +parser = SimpleNodeParser() |
| 19 | +nodes = parser.get_nodes_from_documents(DOCUMENTS) |
| 20 | + |
| 21 | +# Step 2: Build Knowledge Graph Index |
| 22 | +kg_index = KnowledgeGraphIndex(nodes) |
| 23 | + |
| 24 | +# Step 3: Create Query Engine (using OpenAI LLM and embeddings) |
| 25 | +llm = OpenAI(model="gpt-3.5-turbo") |
| 26 | +embed_model = OpenAIEmbedding(model="text-embedding-ada-002") |
| 27 | +query_engine = QueryEngine(kg_index, llm=llm, embed_model=embed_model) |
| 28 | + |
| 29 | +# Step 4: Example query |
| 30 | +query = "Who works at Acme Corp?" |
| 31 | +response = query_engine.query(query) |
| 32 | +print("Query:", query) |
| 33 | +print("Response:", response) |
| 34 | + |
| 35 | +# Step 5: Visualize Knowledge Graph |
| 36 | +G = nx.Graph() |
| 37 | +for node in nodes: |
| 38 | + for rel in node.relationships: |
| 39 | + G.add_edge(node.entity, rel.entity, label=rel.type) |
| 40 | + |
| 41 | +nx.write_gml(G, "knowledge_graph.gml") |
| 42 | +print("Knowledge graph saved as knowledge_graph.gml") |
0 commit comments