Spaces:
Sleeping
Sleeping
File size: 3,247 Bytes
eca89a1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
import json
from pathlib import Path
import autogen
from typing import Dict, Any
class PromptEngineeringModule:
def __init__(self):
self.templates_path = Path("templates/prompt_templates.json")
self.config_list = [
{
'model': 'llama-3.2',
'api_key': 'your-api-key' # Replace with actual API key management
}
]
# Initialize AutoGen assistant
self.llm_config = {
"config_list": self.config_list,
"temperature": 0.7,
}
self.assistant = autogen.AssistantAgent(
name="llama_assistant",
llm_config=self.llm_config,
)
self.user_proxy = autogen.UserProxyAgent(
name="user_proxy",
human_input_mode="NEVER",
max_consecutive_auto_reply=1,
)
def load_templates(self) -> Dict[str, Any]:
"""Load prompt templates from JSON file"""
if not self.templates_path.exists():
default_templates = {
"general_query": {
"name": "General Query",
"prompt": "Please provide information about {topic}",
"description": "A general purpose query template"
},
"data_analysis": {
"name": "Data Analysis",
"prompt": "Analyze the following data and provide insights: {data}",
"description": "Template for data analysis tasks"
},
"code_generation": {
"name": "Code Generation",
"prompt": "Generate {language} code for: {requirement}",
"description": "Template for code generation tasks"
}
}
self.templates_path.parent.mkdir(exist_ok=True)
with open(self.templates_path, 'w') as f:
json.dump(default_templates, f, indent=2)
with open(self.templates_path, 'r') as f:
return json.load(f)
def save_template(self, template_name: str, template_data: Dict[str, Any]):
"""Save a new prompt template"""
templates = self.load_templates()
templates[template_name] = template_data
with open(self.templates_path, 'w') as f:
json.dump(templates, f, indent=2)
def generate_response(self, prompt: str) -> str:
"""Generate response using AutoGen and Llama"""
# Initialize a chat between agents
self.user_proxy.initiate_chat(
self.assistant,
message=prompt
)
# Return the last message from the assistant
chat_history = self.user_proxy.chat_messages[self.assistant.name]
return chat_history[-1]["content"] if chat_history else ""
def format_prompt(self, template_name: str, **kwargs) -> str:
"""Format a prompt template with provided variables"""
templates = self.load_templates()
if template_name not in templates:
raise ValueError(f"Template '{template_name}' not found")
template = templates[template_name]
return template["prompt"].format(**kwargs) |