File size: 2,543 Bytes
839817f
 
 
b5f581d
 
7281234
839817f
 
54fc86e
839817f
7281234
839817f
 
 
 
 
 
 
 
7281234
 
 
 
 
 
 
 
 
 
 
 
6911ee7
7281234
 
6911ee7
7281234
 
 
 
 
 
 
 
 
839817f
7281234
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
839817f
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import os
from dotenv import load_dotenv
from openai import OpenAI
from groq import Groq
import streamlit as st

load_dotenv()

def generate_response(system_prompt: str, user_prompt: str, temp: float = 0.7):
    """
    Generate a response using Gemini or Groq LLM.
    
    Args:
        system_prompt (str): The system prompt to set the context for the model
        user_prompt (str): The user's input/question
    
    Returns:
        str: The model's response
    """
    try:
        # Try Gemini first
        api_key = os.getenv("GEMINI_API_KEY")
        
        if not api_key:
            raise ValueError("GEMINI_API_KEY not found in environment variables")
        
        # Initialize the OpenAI client with Gemini API
        client = OpenAI(
            api_key=api_key,
            base_url="https://generativelanguage.googleapis.com/v1beta/openai/"
        )
        # st.info("🤖 Using Gemini model")  # Debug message
        # Create chat completion request
        response = client.chat.completions.create(
            model="gemini-2.0-flash",
            messages=[
                {"role": "system", "content": system_prompt},
                {"role": "user", "content": user_prompt}
            ],
            temperature=temp
        )
        
        # Return the generated response
        return response.choices[0].message.content
    
    except Exception as e:
        # Fallback to Groq if Gemini fails
        try:
            groq_key = os.getenv("GROQ_API_KEY")
            if not groq_key:
                raise ValueError("GROQ_API_KEY not found in environment variables")
            
            groq_client = Groq(api_key=groq_key)
            st.warning("⚠️ Gemini failed, using Groq model")  # Debug message

            response = groq_client.chat.completions.create(
                messages=[
                    {"role": "system", "content": system_prompt},
                    {"role": "user", "content": user_prompt}
                ],
                model="llama-3.3-70b-versatile",
                temperature=temp
            )
            
            return response.choices[0].message.content
            
        except Exception as groq_error:
            st.error(f"Both models failed. Gemini error: {e}, Groq error: {groq_error}")
            raise

# Example usage
if __name__ == "__main__":
    system_prompt = "You are a helpful assistant."
    user_prompt = "Explain to me how AI works"
    
    result = generate_response(system_prompt, user_prompt)
    print(result)