Spaces:
Running
Running
| #!/usr/bin/env python3 | |
| """ | |
| Example usage of the RAG-based Prompt Reconstructor | |
| This example demonstrates how to use the new RAG-based reconstruction method | |
| as an alternative to the traditional content reference approach. | |
| """ | |
| from agentgraph.reconstruction import ( | |
| reconstruct_prompts_from_knowledge_graph_rag, | |
| enrich_knowledge_graph_with_prompts_rag, | |
| RagPromptReconstructor | |
| ) | |
| def example_usage(): | |
| """Example of how to use the RAG-based prompt reconstructor.""" | |
| # Sample knowledge graph data | |
| knowledge_graph = { | |
| "entities": [ | |
| { | |
| "id": "user_001", | |
| "name": "User inquiry about document loader", | |
| "type": "Input", | |
| "raw_prompt": "What is a document loader?" | |
| }, | |
| { | |
| "id": "agent_001", | |
| "name": "Stereotypical Robot Named Robbie", | |
| "type": "Agent", | |
| "raw_prompt": "You are a stereotypical robot named Robbie..." | |
| } | |
| ], | |
| "relations": [ | |
| { | |
| "id": "rel_001", | |
| "source": "user_001", | |
| "target": "agent_001", | |
| "type": "PERFORMS", | |
| "interaction_prompt": "What is a document loader?" | |
| } | |
| ] | |
| } | |
| # Sample original trace content | |
| original_trace = """ | |
| User: What is a document loader? | |
| Agent: BEEP BOOP! Hello human! A document loader is a component in | |
| LangChain that helps load documents from various sources. BEEP! | |
| There are many types like TextLoader, PDFLoader, CSVLoader, etc. | |
| Each one is designed for specific file formats. BOOP BEEP! | |
| Would you like me to explain any specific type? BEEP BOOP! | |
| """ | |
| # Method 1: Using the pure function | |
| print("=== Using Pure Function ===") | |
| reconstructed_relations = reconstruct_prompts_from_knowledge_graph_rag( | |
| knowledge_graph=knowledge_graph, | |
| original_trace=original_trace, | |
| llm_config={"model": "gpt-5-mini", "temperature": 0.1} | |
| ) | |
| for relation in reconstructed_relations: | |
| print(f"Relation: {relation['id']}") | |
| print(f"Type: {relation['type']}") | |
| print(f"Reconstructed Prompt:") | |
| print(relation['prompt']) | |
| print(f"Search Queries Used: {relation.get('search_queries_used', [])}") | |
| print("-" * 50) | |
| # Method 2: Using the class directly | |
| print("\n=== Using RagPromptReconstructor Class ===") | |
| reconstructor = RagPromptReconstructor( | |
| knowledge_graph=knowledge_graph, | |
| original_trace=original_trace, | |
| llm_config={"model": "gpt-5-mini", "temperature": 0.1} | |
| ) | |
| # Reconstruct a specific relation | |
| specific_reconstruction = reconstructor.reconstruct_relation_prompt("rel_001") | |
| print(f"Specific Reconstruction:") | |
| print(f"Prompt: {specific_reconstruction['reconstructed_prompt']}") | |
| print(f"Method: {specific_reconstruction['reconstruction_method']}") | |
| # Method 3: Enrich entire knowledge graph | |
| print("\n=== Enriching Knowledge Graph ===") | |
| enriched_kg = enrich_knowledge_graph_with_prompts_rag( | |
| knowledge_graph=knowledge_graph, | |
| original_trace=original_trace | |
| ) | |
| print(f"Original KG had {len(knowledge_graph.get('relations', []))} relations") | |
| print(f"Enriched KG has {len(enriched_kg.get('prompt_reconstructions', []))} reconstructed prompts") | |
| print(f"Reconstruction metadata: {enriched_kg.get('reconstruction_metadata', {})}") | |
| if __name__ == "__main__": | |
| example_usage() | |