File size: 3,338 Bytes
5fffd14
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f37acfa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5fffd14
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
import os
from groq import Groq
from typing import List, Dict, Any

class QueryEngine:
    def __init__(self, api_key: str = None):
        """Initialize the query engine with Groq API"""
        if not api_key:
            api_key = os.getenv("GROQ_API_KEY")
            if not api_key:
                raise ValueError("Groq API key is required")
        
        self.client = Groq(api_key=api_key)
        self.model = "llama3-70b-8192"  # Default model
    
    def generate_response(self, query: str, context_docs: List[Dict[str, Any]] = None):
        """Generate a response using the LLM"""
        # Prepare context from retrieved documents
        context = ""
        if context_docs:
            for doc in context_docs.get('documents', [[]])[0]:
                context += f"{doc}\n\n"
        
        # Create prompt with context
        prompt = f"""You are an AI assistant that helps with document analysis and answering questions.
        
Context information:
{context}

User question: {query}

Please provide a helpful, accurate, and concise answer based on the context information provided. If the context doesn't contain relevant information, say so instead of making up an answer."""
        
        # Generate response
        response = self.client.chat.completions.create(
            model=self.model,
            messages=[
                {"role": "system", "content": "You are a helpful AI assistant."},
                {"role": "user", "content": prompt}
            ],
            temperature=0.2,
            max_tokens=1024
        )
        
        return response.choices[0].message.content 

    def generate_sql_query(self, question: str, table_info: str):
        """Generate an SQL query for the given question and table information"""
        prompt = f"""You are an SQL and data analysis expert. Generate an appropriate SQL query using SQLite syntax for the question provided, without any explanations or code comments.
        
Table Information:
{table_info}

User Question: {question}

Generate only the SQL query, nothing else."""
        
        # Generate response
        response = self.client.chat.completions.create(
            model=self.model,
            messages=[
                {"role": "system", "content": "You are an SQL expert."},
                {"role": "user", "content": prompt}
            ],
            temperature=0.1,
            max_tokens=512
        )
        
        return response.choices[0].message.content.strip()

    def analyze_sql_results(self, question: str, sql_query: str, results: str):
        """Analyze the results of an SQL query"""
        prompt = f"""You are a data analyst. Analyze the following SQL query results and provide a clear, concise interpretation.

User Question: {question}
SQL Query: {sql_query}
Query Results:
{results}

Provide a clear analysis of these results that directly answers the user's question."""
        
        # Generate response
        response = self.client.chat.completions.create(
            model=self.model,
            messages=[
                {"role": "system", "content": "You are a data analyst."},
                {"role": "user", "content": prompt}
            ],
            temperature=0.2,
            max_tokens=1024
        )
        
        return response.choices[0].message.content