File size: 3,373 Bytes
94b7b89
6a70465
 
b67d34a
4f67380
aebfac0
7b463a7
94b7b89
6a70465
 
 
 
 
 
 
 
 
 
662ba2f
6a70465
 
 
 
 
 
 
 
 
 
 
 
 
 
c03f2c4
256796e
 
c03f2c4
94b7b89
 
 
 
 
c03f2c4
6a70465
94b7b89
 
6a70465
d906520
94b7b89
 
6a70465
c2d5266
682a315
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94b7b89
c2d5266
94b7b89
c2d5266
 
 
 
 
 
 
 
662ba2f
dd49b59
6a70465
c2d5266
4f67380
 
d3b0511
 
7199219
6d03417
a2a550a
 
4f67380
d3b0511
89c7f6e
47b6e5b
91e3aa3
f9c0cce
4f67380
6a70465
c2d5266
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import streamlit as st
import time
from langchain.schema import HumanMessage, SystemMessage, AIMessage
from langchain.chat_models import ChatOpenAI
import openai
from IPython.display import Audio
import base64

def get_chatmodel_response(question):
    # Retry logic
    max_retries = 3
    retries = 0

    while retries < max_retries:
        try:
            st.session_state['flowmessages'].append(HumanMessage(content=question))
            answer = chat(st.session_state['flowmessages'])
            st.session_state['flowmessages'].append(AIMessage(content=answer.content))
            return answer.content
        except Exception as e:
            print(f"Error: {e}")
            if "Rate limit" in str(e):
                print(f"Rate limit exceeded. Waiting and retrying...")
                time.sleep(5)  # Adjust the waiting time as needed
                retries += 1
            else:
                print("Unhandled exception. Please try again later.")
                break

    print("Exceeded the maximum number of retries. Please try again later.")
    return None

# Streamlit app setup
st.set_page_config(page_title="Sisi Chatbot")
st.snow()
st.balloons()
st.header("Hey, I'm Sisi!")

from dotenv import load_dotenv
load_dotenv()
import os

# ChatOpenAI class
chat = ChatOpenAI(temperature=0.5)

if 'flowmessages' not in st.session_state:
    st.session_state['flowmessages'] = [
        SystemMessage(content="You are an AI Friend, your name is Sisi and you was developed by Sailesh on December 5 2023. First get the name of the user and become his friend. then, You have to be a nice friend and an AI assistant to the users and help them with what information they need. It should be short and sharp")
    ]

# Streamlit UI
with st.form(key='my_form',clear_on_submit=True):
    st.markdown(
        """
        <style>
            .stTextInput {
                border-radius: 15px;
                padding: 12px;
                margin-top: 10px;
                margin-bottom: 10px;
                box-shadow: 2px 2px 5px #888888;
                border: 1px solid #dddddd;
                font-size: 16px;
                width: 100%;  /* Make the input box full width */
                height: 100px;  /* Set the height of the input box */
            }
        </style>
        """,
        unsafe_allow_html=True
    )

    input_question = st.text_input("Type here.", key="input")
    submit = st.form_submit_button("Submit")

# If the "Submit" button is clicked
if submit:

    
    # Display loading message while processing
    with st.spinner("Analyzing..."):
        st.header(":blue[You]", divider=True)
        st.caption(input_question)
        
        st.header("Sisi", divider=True)
        response = get_chatmodel_response(input_question)

    if response is not None:
       
        st.write(response)
        # Text-to-speech
        audio_response = openai.audio.speech.create(
            model="tts-1",
            voice="nova",
            input=response,
            response_format="mp3",
            speed=1.0
        )
        
        # Embed audio in the webpage without saving it
        st.header(':blue[Listen] :loud_sound:')
        st.audio(audio_response.content,format="audio/wav",start_time=0)
          
        
    else:
        st.subheader("Error: Unable to get response. Please try again later.")