Spaces:
Sleeping
Sleeping
| import logging | |
| from langchain.prompts import ChatPromptTemplate | |
| from langchain.schema.output_parser import StrOutputParser | |
| from langchain_community.chat_models import ChatOpenAI | |
| from config import template | |
| import gradio as gr | |
| log = logging.getLogger(__name__) | |
| log.setLevel(logging.DEBUG) | |
| # Dictionary to store conversation history for each user | |
| user_conversations = {} | |
| def dispatch(username: str, input_text: str): | |
| global user_conversations | |
| # Initialize conversation history for new users | |
| if username not in user_conversations: | |
| user_conversations[username] = [] | |
| model_name = 'gpt-3.5-turbo-16k' | |
| model = ChatOpenAI(model_name=model_name, verbose=True) | |
| # Append user's input to the conversation history | |
| user_conversations[username].append(f"{username}: {input_text}") | |
| # Generate the full prompt including conversation history | |
| full_prompt = "\n".join(user_conversations[username]) | |
| prompt = ChatPromptTemplate.from_template(template) | |
| log.info("\n\n================ UNFILLED PROMPT:\n" + template) | |
| chain = ( | |
| { | |
| "question": lambda query: full_prompt, | |
| } | |
| | prompt | |
| | model | |
| | StrOutputParser() | |
| ) | |
| result = chain.invoke(input_text) | |
| # Append model's response to the conversation history without adding "Model:" | |
| user_conversations[username].append(result) | |
| # Safely format the conversation history for output as Markdown | |
| chat_history_md = "\n\n".join([ | |
| f"**{msg.split(': ')[0]}**: {msg.split(': ', 1)[1]}" if ':' in msg else f"**{username}**: {msg}" | |
| for msg in user_conversations[username] | |
| ]) | |
| return chat_history_md | |
| # Create Gradio Interface with an additional input for username | |
| iface = gr.Interface( | |
| fn=dispatch, | |
| inputs=[ | |
| gr.Textbox(placeholder="Enter your username", label="Username"), # Username input with label | |
| gr.Textbox(placeholder="Enter your message", label="Message") # User input with label | |
| ], | |
| outputs=gr.Markdown() # Use Markdown to display chat history | |
| ) | |
| iface.launch(share=True) | |