File size: 2,088 Bytes
bdcf6b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import logging
from langchain.prompts import ChatPromptTemplate
from langchain.schema.output_parser import StrOutputParser
from langchain_community.chat_models import ChatOpenAI
from config import template
import gradio as gr

log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)

# Dictionary to store conversation history for each user
user_conversations = {}

def dispatch(username: str, input_text: str):
    global user_conversations

    # Initialize conversation history for new users
    if username not in user_conversations:
        user_conversations[username] = []

    model_name = 'gpt-3.5-turbo-16k'
    model = ChatOpenAI(model_name=model_name, verbose=True)

    # Append user's input to the conversation history
    user_conversations[username].append(f"{username}: {input_text}")

    # Generate the full prompt including conversation history
    full_prompt = "\n".join(user_conversations[username])

    prompt = ChatPromptTemplate.from_template(template)

    log.info("\n\n================ UNFILLED PROMPT:\n" + template)

    chain = (
        {
            "question": lambda query: full_prompt,
        }
        | prompt
        | model
        | StrOutputParser()
    )

    result = chain.invoke(input_text)

    # Append model's response to the conversation history without adding "Model:"
    user_conversations[username].append(result)

    # Safely format the conversation history for output as Markdown
    chat_history_md = "\n\n".join([
        f"**{msg.split(': ')[0]}**: {msg.split(': ', 1)[1]}" if ':' in msg else f"**{username}**: {msg}"
        for msg in user_conversations[username]
    ])

    return chat_history_md

# Create Gradio Interface with an additional input for username
iface = gr.Interface(
    fn=dispatch,
    inputs=[
        gr.Textbox(placeholder="Enter your username", label="Username"),  # Username input with label
        gr.Textbox(placeholder="Enter your message", label="Message")     # User input with label
    ],
    outputs=gr.Markdown()  # Use Markdown to display chat history
)

iface.launch(share=True)