File size: 1,391 Bytes
88da5ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4baf78f
88da5ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
##############################################################################
# Main script that builds the UI & connects the logic for an LLM-driven
# query frontend to a "Global Commerce" demo app.
#
# @philmui
# Mon May 1 18:34:45 PDT 2023
##############################################################################


import streamlit as st
from langchain.chat_models import ChatOpenAI
from langchain.schema import HumanMessage

st.set_page_config(page_title="Chat", 
                   page_icon=":chat:")
st.header("💬 Hugging Chat 💬")

col1, col2 = st.columns([1,1])

with col1:
    option_llm = st.selectbox(
        "Model",
        ('gpt-4',
         'gpt-3.5-turbo')
    )

def get_question():
    input_text = st.text_area(label="Your question ...", 
                              placeholder="Ask me anything ...",
                              key="question_text", label_visibility="collapsed")
    return input_text

question_text = get_question()
if question_text and len(question_text) > 1:
    output = ""
    agent = ChatOpenAI(model_name=option_llm, temperature=0.5)
    response = agent([HumanMessage(content=question_text)])
    print(f"> {response}")

    if response and response.content:
        output = response.content
        height = min(2*len(output), 280)
        st.text_area(label="In response ...", 
                    value=output, height=height)