Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -143,16 +143,40 @@ def initialize_llm():
|
|
| 143 |
|
| 144 |
llm = ChatGroq(
|
| 145 |
model="meta-llama/llama-4-scout-17b-16e-instruct",
|
| 146 |
-
temperature=0.
|
| 147 |
max_tokens=200,
|
| 148 |
max_retries=0,
|
| 149 |
groq_api_key=groq_api_key
|
| 150 |
)
|
| 151 |
return llm
|
| 152 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 153 |
# Initialize database and LLM
|
| 154 |
db = setup_database()
|
| 155 |
llm = initialize_llm()
|
|
|
|
| 156 |
|
| 157 |
# Database agent setup
|
| 158 |
system_message = """
|
|
@@ -497,7 +521,7 @@ def format_customer_response(input_string: str) -> str:
|
|
| 497 |
user_prompt = f"User Query: {user_query}"
|
| 498 |
|
| 499 |
# 🔹 Generate response using LLM (system + user messages)
|
| 500 |
-
response_msg =
|
| 501 |
SystemMessage(content=system_prompt),
|
| 502 |
HumanMessage(content=user_prompt)
|
| 503 |
])
|
|
|
|
| 143 |
|
| 144 |
llm = ChatGroq(
|
| 145 |
model="meta-llama/llama-4-scout-17b-16e-instruct",
|
| 146 |
+
temperature=0.05,
|
| 147 |
max_tokens=200,
|
| 148 |
max_retries=0,
|
| 149 |
groq_api_key=groq_api_key
|
| 150 |
)
|
| 151 |
return llm
|
| 152 |
|
| 153 |
+
@st.cache_resource
|
| 154 |
+
def initialize_llm_high():
|
| 155 |
+
"""Initialize the LLM with Groq API"""
|
| 156 |
+
# Get API key from Streamlit secrets or environment variable
|
| 157 |
+
try:
|
| 158 |
+
groq_api_key = st.secrets["GROQ_API_KEY"]
|
| 159 |
+
except:
|
| 160 |
+
groq_api_key = os.getenv("GROQ_API_KEY")
|
| 161 |
+
|
| 162 |
+
if not groq_api_key:
|
| 163 |
+
st.error("⚠️ GROQ_API_KEY not found! Please set it in .streamlit/secrets.toml or as an environment variable.")
|
| 164 |
+
st.info("Create a file `.streamlit/secrets.toml` with:\n```\nGROQ_API_KEY = \"your-api-key-here\"\n```")
|
| 165 |
+
st.stop()
|
| 166 |
+
|
| 167 |
+
llm = ChatGroq(
|
| 168 |
+
model="meta-llama/llama-4-scout-17b-16e-instruct",
|
| 169 |
+
temperature=0.8,
|
| 170 |
+
max_tokens=200,
|
| 171 |
+
max_retries=0,
|
| 172 |
+
groq_api_key=groq_api_key
|
| 173 |
+
)
|
| 174 |
+
return llm
|
| 175 |
+
|
| 176 |
# Initialize database and LLM
|
| 177 |
db = setup_database()
|
| 178 |
llm = initialize_llm()
|
| 179 |
+
llmhigh = initialize_llm_high()
|
| 180 |
|
| 181 |
# Database agent setup
|
| 182 |
system_message = """
|
|
|
|
| 521 |
user_prompt = f"User Query: {user_query}"
|
| 522 |
|
| 523 |
# 🔹 Generate response using LLM (system + user messages)
|
| 524 |
+
response_msg = llmhigh.predict_messages([
|
| 525 |
SystemMessage(content=system_prompt),
|
| 526 |
HumanMessage(content=user_prompt)
|
| 527 |
])
|