Spaces:
Running
Running
File size: 4,673 Bytes
5d21581 a03a2cf 5d21581 8ee304b 5d21581 be80196 d95d602 be80196 d95d602 be80196 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 | import streamlit as st
from groq import Groq
from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
import edge_tts
import asyncio
import os
from typing import Optional
GROQ_API_KEY = os.getenv('GROQ_API_KEY')
class CodeAssistantBot:
def __init__(self):
self.client = Groq(api_key=GROQ_API_KEY)
self.model = ChatGroq(model="llama-3.3-70b-versatile", temperature=0.6)
# Initialize prompts
self.analysis_prompt = ChatPromptTemplate.from_messages([
("system",
"""You are an expert code assistant. Analyze the code and context provided,
then give clear, helpful responses. Keep responses concise and focused on the code."""
),
("user", """Code: {code}
Output: {output}
Error: {error}
Question: {question}""")
])
self.summary_prompt = ChatPromptTemplate.from_messages([(
"system",
"""Summarize the conversation focusing on key technical points and insights.
Keep it brief and clear."""
), ("user", "Conversation: {conversation}")])
def analyze_code(self, code: str, output: str, error: str,
question: str) -> str:
try:
parser = StrOutputParser()
chain = self.analysis_prompt | self.model | parser
return chain.invoke({
'code': code,
'output': output,
'error': error,
'question': question
})
except Exception as e:
return f"Sorry, I encountered an error: {str(e)}"
def summarize_conversation(self, conversation: list) -> str:
try:
parser = StrOutputParser()
chain = self.summary_prompt | self.model | parser
formatted_conv = "\n".join(
[f"Q: {q}\nA: {a}" for q, a in conversation])
return chain.invoke({'conversation': formatted_conv})
except Exception as e:
return f"Could not generate summary: {str(e)}"
async def text_to_speech(text: str, filename: str):
voice = "fr-FR-VivienneMultilingualNeural"
communicate = edge_tts.Communicate(text, voice)
await communicate.save(filename)
def render_chatbot(code: str, output: str, error: str):
"""Render the chatbot UI as an integrated panel"""
# Initialize session state
if "conversation" not in st.session_state:
st.session_state.conversation = []
if "audio_count" not in st.session_state:
st.session_state.audio_count = 0
# Create bot instance
bot = CodeAssistantBot()
# Chat interface
st.markdown("""
<style>
.chat-message {
padding: 10px;
border-radius: 5px;
margin-bottom: 10px;
}
.user-message {
background-color: #e3f2fd;
}
.bot-message {
background-color: #f5f5f5;
}
</style>
""",
unsafe_allow_html=True)
# Input area
user_input = st.text_input("Ask about your code...",
key="chat_input",
placeholder="Type your question here...")
if user_input:
# Get response
response = bot.analyze_code(code, output, error, user_input)
st.session_state.conversation.append((user_input, response))
# Generate summary and speech if conversation is long enough
if len(st.session_state.conversation) > 3:
with st.spinner("Generating conversation summary..."):
summary = bot.summarize_conversation(
st.session_state.conversation)
audio_file = f"summary_{st.session_state.audio_count}.wav"
asyncio.run(text_to_speech(summary, audio_file))
st.session_state.audio_count += 1
# Display summary in a collapsible section
with st.expander("📝 Conversation Summary", expanded=False):
st.markdown(summary)
st.audio(audio_file, format="audio/wav")
# Chat history in a scrollable container
chat_container = st.container()
with chat_container:
for q, a in st.session_state.conversation:
st.markdown(
f'<div class="chat-message user-message">You:- {q}</div>',
unsafe_allow_html=True)
st.markdown(
f'<div class="chat-message bot-message">Assistant:- {a}</div>',
unsafe_allow_html=True)
|