Spaces:
Runtime error
Runtime error
| #from transformers import AutoModelForCausalLM, AutoTokenizer | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain.chains import ConversationChain | |
| from langchain.chains.conversation.memory import ConversationBufferWindowMemory | |
| from langchain.prompts import PromptTemplate | |
| import streamlit as st | |
| from streamlit_chat import message | |
| from PIL import Image | |
| st.title("Nexus TCM Chatbot") | |
| #query = st.text_input("Query: ", key="input") | |
| #https://discuss.streamlit.io/t/clear-input-box-after-hitting-enter/33824/2 | |
| #https://discuss.streamlit.io/t/how-do-you-clear-the-input-box-after-hitting-enter/45691/4 | |
| #https://stackoverflow.com/questions/77093266/how-to-clear-input-field-after-hitting-enter-in-streamlit | |
| if "my_text" not in st.session_state: | |
| st.session_state.my_text = "" | |
| def submit(): | |
| st.session_state.my_text = st.session_state.widget | |
| st.session_state.widget = "" | |
| st.text_input("Enter text here", key="widget", on_change=submit) | |
| query = st.session_state.my_text | |
| #st.write(query) | |
| # Set the logo URL | |
| #logo_url = "https://huggingface.co/spaces/mathslearn/chatbot_test_streamlit/blob/main/logo.jpeg" | |
| logo_url = "https://huggingface.co/spaces/mathslearn/chatbot_test_streamlit/resolve/main/logo.jpeg" | |
| # Set the desired logo height | |
| logo_height = 25 | |
| # Display the logo with adjusted column width | |
| #st.image(logo_url, caption="Nexus TCM Chatbot Logo", use_column_width=True) | |
| #st.image(logo_url, use_column_width=True) | |
| st.sidebar.image(logo_url, caption="Disclaimer: The chatbot is not a substitute for medical advice from a qualified healthcare professional.", use_column_width=True) | |
| if 'responses' not in st.session_state: | |
| st.session_state['responses'] = [] | |
| if 'requests' not in st.session_state: | |
| st.session_state['requests'] = [] | |
| #REPO_ID = "Xenova/gpt-3.5-turbo" | |
| # Load the model and tokenizer from Hugging Face's model hub | |
| #model = AutoModelForCausalLM.from_pretrained(REPO_ID) | |
| #tokenizer = AutoTokenizer.from_pretrained(REPO_ID) | |
| #llm = ChatOpenAI(model=model, tokenizer=tokenizer) | |
| llm = ChatOpenAI(model_name="gpt-3.5-turbo", openai_api_key="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") | |
| # Initialize global variable for conversation memory | |
| if 'buffer_memory' not in st.session_state: | |
| st.session_state.buffer_memory= ConversationBufferWindowMemory(k=8) | |
| #st.session_state.buffer_memory= ConversationBufferWindowMemory(k=3) | |
| conversation = ConversationChain( | |
| llm=llm, | |
| memory=st.session_state.buffer_memory, | |
| verbose=True | |
| ) | |
| context = """ | |
| You act as a chatbot to interact with users on their questions about traditional chinese medicine (TCM). \ | |
| Welcome the user in a friendly way. | |
| You respond in a short and friendly style. \ | |
| For each text, mark NER tags. \ | |
| Tag categories: location, product. \ | |
| Text: I want to visit a clinic in [Ang Mo Kio](location). \ | |
| If a user says he/she wants to visit a traditional chinese medicine (TCM) clinic, \ | |
| ask for which location he/she wants to visit. \ | |
| After user replies with location, show in chat window the Google Map from this link \ | |
| https://www.google.com/maps/search/tcm+clinics+at+location+name \ | |
| For example, if a user wants to visit a clinic in Ang Mo Kio, \ | |
| show in chat window the Google Map from this link \ | |
| https://www.google.com/maps/search/tcm+clinics+at+ang+mo+kio \ | |
| Examples of location names: | |
| North: | |
| Sembawang | |
| Woodlands | |
| Yishun | |
| North-East: | |
| Ang Mo Kio | |
| Hougang | |
| Punggol | |
| Sengkang | |
| Serangoon | |
| East: | |
| Bedok | |
| Pasir Ris | |
| Tampines | |
| West: | |
| Bukit Batok | |
| Bukit Panjang | |
| Choa Chu Kang | |
| Clementi | |
| Jurong East | |
| Jurong West | |
| Tengah | |
| Central: | |
| Bishan | |
| Bukit Merah | |
| Bukit Timah | |
| Central Area | |
| Geylang | |
| Kallang | |
| Whampoa | |
| Marine Parade | |
| Queenstown | |
| Toa Payoh | |
| For each text, mark NER tags. \ | |
| Tag categories: location, product. \ | |
| Text: I want to buy/get [Po Chai Pills](product). \ | |
| If a user wants to buy/get a product, suggest that \ | |
| he/she can consider buying/getting from https://www.amazon.sg/s?k=product+name \ | |
| For example, if a user wants to buy Po Chai Pills, suggest \ | |
| he/she can consider buying/getting from https://www.amazon.sg/s?k=po+chai+pills \ | |
| Examples of product names: | |
| Ointment/Hong You/Feng You/Fengyou | |
| Liquorice/Gan cao/Gancao | |
| Chrysanthemum/Ju hua/Juhua | |
| Goji berry/wolfberry/Gou Qi Zi/Gouqizi | |
| Red dates/Jujubes/Hong Zao/Hongzao | |
| """ | |
| prompt_template = PromptTemplate.from_template( | |
| '''system role :{context} \ | |
| user:{query}\ | |
| assistance: | |
| ''') | |
| # Define Streamlit Interface | |
| if query: | |
| formatquery= prompt_template.format(context=context, query=query) | |
| response = conversation.run(formatquery) | |
| st.session_state.requests.append(query) | |
| st.session_state.responses.append(response) | |
| if st.session_state['responses']: | |
| for i in range(len(st.session_state['responses'])-1, -1, -1): | |
| message(st.session_state['requests'][i], is_user=True, key=str(i) + '_user') | |
| message(st.session_state["responses"][i], key=str(i)) | |
| # gr.load("models/ksh-nyp/llama-2-7b-chat-TCMKB").launch() |