Ashar086 commited on
Commit
eca89a1
·
verified ·
1 Parent(s): e1dd5ab

Create modules/prompt_engineering.py

Browse files
Files changed (1) hide show
  1. modules/prompt_engineering.py +85 -0
modules/prompt_engineering.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ from pathlib import Path
3
+ import autogen
4
+ from typing import Dict, Any
5
+
6
+ class PromptEngineeringModule:
7
+ def __init__(self):
8
+ self.templates_path = Path("templates/prompt_templates.json")
9
+ self.config_list = [
10
+ {
11
+ 'model': 'llama-3.2',
12
+ 'api_key': 'your-api-key' # Replace with actual API key management
13
+ }
14
+ ]
15
+
16
+ # Initialize AutoGen assistant
17
+ self.llm_config = {
18
+ "config_list": self.config_list,
19
+ "temperature": 0.7,
20
+ }
21
+ self.assistant = autogen.AssistantAgent(
22
+ name="llama_assistant",
23
+ llm_config=self.llm_config,
24
+ )
25
+ self.user_proxy = autogen.UserProxyAgent(
26
+ name="user_proxy",
27
+ human_input_mode="NEVER",
28
+ max_consecutive_auto_reply=1,
29
+ )
30
+
31
+ def load_templates(self) -> Dict[str, Any]:
32
+ """Load prompt templates from JSON file"""
33
+ if not self.templates_path.exists():
34
+ default_templates = {
35
+ "general_query": {
36
+ "name": "General Query",
37
+ "prompt": "Please provide information about {topic}",
38
+ "description": "A general purpose query template"
39
+ },
40
+ "data_analysis": {
41
+ "name": "Data Analysis",
42
+ "prompt": "Analyze the following data and provide insights: {data}",
43
+ "description": "Template for data analysis tasks"
44
+ },
45
+ "code_generation": {
46
+ "name": "Code Generation",
47
+ "prompt": "Generate {language} code for: {requirement}",
48
+ "description": "Template for code generation tasks"
49
+ }
50
+ }
51
+ self.templates_path.parent.mkdir(exist_ok=True)
52
+ with open(self.templates_path, 'w') as f:
53
+ json.dump(default_templates, f, indent=2)
54
+
55
+ with open(self.templates_path, 'r') as f:
56
+ return json.load(f)
57
+
58
+ def save_template(self, template_name: str, template_data: Dict[str, Any]):
59
+ """Save a new prompt template"""
60
+ templates = self.load_templates()
61
+ templates[template_name] = template_data
62
+
63
+ with open(self.templates_path, 'w') as f:
64
+ json.dump(templates, f, indent=2)
65
+
66
+ def generate_response(self, prompt: str) -> str:
67
+ """Generate response using AutoGen and Llama"""
68
+ # Initialize a chat between agents
69
+ self.user_proxy.initiate_chat(
70
+ self.assistant,
71
+ message=prompt
72
+ )
73
+
74
+ # Return the last message from the assistant
75
+ chat_history = self.user_proxy.chat_messages[self.assistant.name]
76
+ return chat_history[-1]["content"] if chat_history else ""
77
+
78
+ def format_prompt(self, template_name: str, **kwargs) -> str:
79
+ """Format a prompt template with provided variables"""
80
+ templates = self.load_templates()
81
+ if template_name not in templates:
82
+ raise ValueError(f"Template '{template_name}' not found")
83
+
84
+ template = templates[template_name]
85
+ return template["prompt"].format(**kwargs)