Spaces:
Sleeping
Sleeping
File size: 1,157 Bytes
1ce8e5f 7441ead 44dfb88 7441ead 567c17c 7441ead 567c17c 7441ead 567c17c 7441ead 567c17c 5d53914 567c17c 7441ead 567c17c 7441ead 567c17c 7441ead 1ce8e5f 7441ead 1ce8e5f 44dfb88 1ce8e5f 7441ead 1ce8e5f 7441ead 3459a2b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
import huggingface_hub as hf_hub
import gradio as gr
import os
client = hf_hub.InferenceClient(token = os.environ['HUGGINGFACEHUB_API_TOKEN'])
def create_chat_summary(history):
context = '''
Please summarize following conversation so that it become context to further conversation:
Chatbot: I am a chemistry professor. I help student to understand chemistry.
'''
for ques, ans in history:
context += f'''
Human : {ques}
Chatbot : {ans}
'''
return context
def create_prompt(question, history):
context_prompt = create_chat_summary(history)
context = client.text_generation(prompt = context_prompt, model = 'google/flan-t5-xxl')
prompt = f'''
You are a good chatbot that talks with human based on following context:
Context :
{context}
Human : {question}
Chatbot :
'''
return prompt
def chat_interface(question, history):
prompt = create_prompt(question, history)
response = client.text_generation(prompt, model = 'google/flan-t5-xxl')
return response
chat_app = gr.ChatInterface(fn = chat_interface)
chat_app.launch() |