Spaces:
Sleeping
Sleeping
| import json | |
| from pathlib import Path | |
| import autogen | |
| from typing import Dict, Any | |
| class PromptEngineeringModule: | |
| def __init__(self): | |
| self.templates_path = Path("templates/prompt_templates.json") | |
| self.config_list = [ | |
| { | |
| 'model': 'llama-3.2', | |
| 'api_key': 'your-api-key' # Replace with actual API key management | |
| } | |
| ] | |
| # Initialize AutoGen assistant | |
| self.llm_config = { | |
| "config_list": self.config_list, | |
| "temperature": 0.7, | |
| } | |
| self.assistant = autogen.AssistantAgent( | |
| name="llama_assistant", | |
| llm_config=self.llm_config, | |
| ) | |
| self.user_proxy = autogen.UserProxyAgent( | |
| name="user_proxy", | |
| human_input_mode="NEVER", | |
| max_consecutive_auto_reply=1, | |
| ) | |
| def load_templates(self) -> Dict[str, Any]: | |
| """Load prompt templates from JSON file""" | |
| if not self.templates_path.exists(): | |
| default_templates = { | |
| "general_query": { | |
| "name": "General Query", | |
| "prompt": "Please provide information about {topic}", | |
| "description": "A general purpose query template" | |
| }, | |
| "data_analysis": { | |
| "name": "Data Analysis", | |
| "prompt": "Analyze the following data and provide insights: {data}", | |
| "description": "Template for data analysis tasks" | |
| }, | |
| "code_generation": { | |
| "name": "Code Generation", | |
| "prompt": "Generate {language} code for: {requirement}", | |
| "description": "Template for code generation tasks" | |
| } | |
| } | |
| self.templates_path.parent.mkdir(exist_ok=True) | |
| with open(self.templates_path, 'w') as f: | |
| json.dump(default_templates, f, indent=2) | |
| with open(self.templates_path, 'r') as f: | |
| return json.load(f) | |
| def save_template(self, template_name: str, template_data: Dict[str, Any]): | |
| """Save a new prompt template""" | |
| templates = self.load_templates() | |
| templates[template_name] = template_data | |
| with open(self.templates_path, 'w') as f: | |
| json.dump(templates, f, indent=2) | |
| def generate_response(self, prompt: str) -> str: | |
| """Generate response using AutoGen and Llama""" | |
| # Initialize a chat between agents | |
| self.user_proxy.initiate_chat( | |
| self.assistant, | |
| message=prompt | |
| ) | |
| # Return the last message from the assistant | |
| chat_history = self.user_proxy.chat_messages[self.assistant.name] | |
| return chat_history[-1]["content"] if chat_history else "" | |
| def format_prompt(self, template_name: str, **kwargs) -> str: | |
| """Format a prompt template with provided variables""" | |
| templates = self.load_templates() | |
| if template_name not in templates: | |
| raise ValueError(f"Template '{template_name}' not found") | |
| template = templates[template_name] | |
| return template["prompt"].format(**kwargs) |