Spaces:
Sleeping
Sleeping
Vlad Bastina
commited on
Commit
·
068665b
1
Parent(s):
74d2816
gemini changed and fool proffing
Browse files
__pycache__/sentiment_analysis.cpython-312.pyc
CHANGED
|
Binary files a/__pycache__/sentiment_analysis.cpython-312.pyc and b/__pycache__/sentiment_analysis.cpython-312.pyc differ
|
|
|
sentiment_analysis.py
CHANGED
|
@@ -8,7 +8,10 @@ def get_analysis(file_path)->str:
|
|
| 8 |
"""Makes the pipeline to perform the sentiment analysis on a .wav file"""
|
| 9 |
transcript = get_transcription_from_sound(file_path)
|
| 10 |
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
| 12 |
|
| 13 |
return transcript , analysis
|
| 14 |
|
|
|
|
| 8 |
"""Makes the pipeline to perform the sentiment analysis on a .wav file"""
|
| 9 |
transcript = get_transcription_from_sound(file_path)
|
| 10 |
|
| 11 |
+
if len(transcript) > 0:
|
| 12 |
+
analysis = ask_gemini(transcript)
|
| 13 |
+
else :
|
| 14 |
+
analysis = "Did not provide any message in order to be analysed"
|
| 15 |
|
| 16 |
return transcript , analysis
|
| 17 |
|
streamlit_app.py
CHANGED
|
@@ -31,8 +31,8 @@ def display_message(role, content, image_path=None):
|
|
| 31 |
"""Displays the messages on the screen along with the plots"""
|
| 32 |
if role == "user":
|
| 33 |
st.chat_message(role).markdown(f"**User:** {content}")
|
| 34 |
-
elif role == "
|
| 35 |
-
st.chat_message(role).markdown(f"**
|
| 36 |
if image_path:
|
| 37 |
# Display the image if it exists
|
| 38 |
try:
|
|
@@ -70,18 +70,18 @@ def main():
|
|
| 70 |
save_audio(audio["bytes"], save_path)
|
| 71 |
|
| 72 |
time.sleep(0.1)
|
| 73 |
-
with st.spinner('Fetching response from
|
| 74 |
user_message, gemini_response = get_analysis("recorded_audio.wav")
|
| 75 |
|
| 76 |
st.session_state.messages.append({"role": "user", "content": user_message})
|
| 77 |
-
st.session_state.messages.append({"role": "
|
| 78 |
|
| 79 |
# Display the chat history
|
| 80 |
for msg in st.session_state.messages:
|
| 81 |
# Check if the message is from Gemini and display the image accordingly
|
| 82 |
if msg["role"] == "user":
|
| 83 |
display_message(msg["role"], msg["content"])
|
| 84 |
-
elif msg["role"] == "
|
| 85 |
display_message(msg["role"], msg["content"], msg.get("image_url"))
|
| 86 |
|
| 87 |
time.sleep(0.1)
|
|
|
|
| 31 |
"""Displays the messages on the screen along with the plots"""
|
| 32 |
if role == "user":
|
| 33 |
st.chat_message(role).markdown(f"**User:** {content}")
|
| 34 |
+
elif role == "ai":
|
| 35 |
+
st.chat_message(role).markdown(f"**Ai:** {content}")
|
| 36 |
if image_path:
|
| 37 |
# Display the image if it exists
|
| 38 |
try:
|
|
|
|
| 70 |
save_audio(audio["bytes"], save_path)
|
| 71 |
|
| 72 |
time.sleep(0.1)
|
| 73 |
+
with st.spinner('Fetching response from Ai...'):
|
| 74 |
user_message, gemini_response = get_analysis("recorded_audio.wav")
|
| 75 |
|
| 76 |
st.session_state.messages.append({"role": "user", "content": user_message})
|
| 77 |
+
st.session_state.messages.append({"role": "ai", "content": gemini_response, "image_url": "plot.png"})
|
| 78 |
|
| 79 |
# Display the chat history
|
| 80 |
for msg in st.session_state.messages:
|
| 81 |
# Check if the message is from Gemini and display the image accordingly
|
| 82 |
if msg["role"] == "user":
|
| 83 |
display_message(msg["role"], msg["content"])
|
| 84 |
+
elif msg["role"] == "ai":
|
| 85 |
display_message(msg["role"], msg["content"], msg.get("image_url"))
|
| 86 |
|
| 87 |
time.sleep(0.1)
|