Spaces:
Build error
Build error
File size: 3,508 Bytes
e094945 bb32f7f e094945 798e85c e094945 bb32f7f e094945 798e85c e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 37b25b4 e094945 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
# app.py β Streamlit + LangChain + Groq
import os, asyncio, streamlit as st
from dotenv import load_dotenv
from langchain.schema import SystemMessage, HumanMessage, AIMessage
from langchain_groq import ChatGroq
# βββββββββββββββββββββββββ bootstrap eventβloop βββββββββββββββββββββ
try:
asyncio.get_running_loop()
except RuntimeError:
asyncio.set_event_loop(asyncio.new_event_loop())
if os.name == "nt":
import asyncio as _asyncio
asyncio.set_event_loop_policy(_asyncio.WindowsSelectorEventLoopPolicy())
# βββββββββββββββββββββββββββ UI / SETTINGS ββββββββββββββββββββββββ
st.set_page_config("Groq Chatbot", "π€")
st.title("π€ Groqβpowered Advanced Chatbot")
st.caption("DeepSeekβR1βDistillβLlamaβ70B β’ LangChain β’ Streamlit")
with st.sidebar:
st.header("π Groq API Key")
groq_key = st.text_input("Paste your key here", type="password")
st.divider()
temperature = st.slider("Temperature", 0.0, 1.2, 0.7, 0.1)
top_p = st.slider("Topβp", 0.0, 1.0, 1.0, 0.05)
st.markdown("*All values remain local to your browser.*")
user_q = st.chat_input("Type your messageβ¦")
# ββββββββββββββββββββββββββ LLM (lazy init) βββββββββββββββββββββββββ
MODEL_NAME = "deepseek-r1-distill-llama-70b"
def get_llm():
if "llm" not in st.session_state:
key = groq_key or os.getenv("GROQ_API_KEY")
if not key:
raise ValueError("Add your Groq key in the sidebar.")
os.environ["GROQ_API_KEY"] = key # for the client
st.session_state.llm = ChatGroq(
model = MODEL_NAME,
groq_api_key = key,
temperature = temperature,
top_p = top_p,
)
# refresh sampling params if the sliders changed
llm = st.session_state.llm
llm.temperature = temperature
llm.top_p = top_p
return llm
# βββββββββββββββββββββββββ conversation memory ββββββββββββββββββββββ
if "history" not in st.session_state:
st.session_state.history = [
SystemMessage(content="You are an advanced, helpful assistant.")
]
# ββββββββββββββββββββββββββββ main loop βββββββββββββββββββββββββββββ
if user_q:
st.session_state.history.append(HumanMessage(content=user_q))
try:
with st.chat_message("assistant", avatar="π€"):
with st.spinner("Thinkingβ¦"):
answer = get_llm().invoke(st.session_state.history).content
st.markdown(answer)
st.session_state.history.append(AIMessage(content=answer))
except Exception as err:
st.error(f"**Error:** {err}")
# ββββββββββββββββββββββββ display chat history ββββββββββββββββββββββ
for msg in st.session_state.history[1:]: # skip system message
role = "user" if isinstance(msg, HumanMessage) else "assistant"
with st.chat_message(role):
st.markdown(msg.content)
|