import streamlit as st import requests import datetime import os from langchain.llms import HuggingFaceHub from langchain.chains import RetrievalQA from langchain import PromptTemplate, LLMChain # Embed and store from langchain.vectorstores import Chroma from langchain.embeddings import HuggingFaceHubEmbeddings from langchain.memory import ConversationBufferMemory from langchain.prompts import PromptTemplate from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain.document_loaders import RecursiveUrlLoader, WebBaseLoader from langchain.chains import RetrievalQA os.environ["HUGGINGFACEHUB_API_TOKEN"] = "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" embedding = HuggingFaceHubEmbeddings() url = "https://nanoreview.net/en/phone-compare/samsung-galaxy-s9-vs-samsung-galaxy-s10" loader = RecursiveUrlLoader(url=url, max_depth=1) # loader = WebBaseLoader(url) loaders = [] loaders.append(loader) docs = [] for l in loaders: docs.extend(l.load()) text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=0) docs = text_splitter.split_documents(docs) vectorstore = Chroma.from_documents(documents=docs, embedding=embedding) memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True) # to remember chat history include this prompt_template = """Use the following pieces of context to answer the question at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. {context} Question: {question}""" PROMPT = PromptTemplate( template=prompt_template, input_variables=["context", "question"] ) chain_type_kwargs = {"prompt": PROMPT} llm = HuggingFaceHub(repo_id="google/flan-ul2", model_kwargs={"temperature":0.1, "max_new_tokens":250}) qachain=RetrievalQA.from_chain_type(llm, retriever=vectorstore.as_retriever(), chain_type_kwargs=chain_type_kwargs) st.header("#CodeWars localGPT", divider='rainbow') option = st.selectbox('What is your role?', ('Support', 'Sales')) st.write('You selected', option) prompt = st.chat_input("Say something to our #CodeWars bot...") context = [] # the context stores a conversation history, you can use this to make the model more context aware if(prompt): with st.chat_message(option): st.write(f"{datetime.datetime.now()} :red[{option}:] ", prompt) context = qachain({"query": prompt}) st.write(f"{datetime.datetime.now()}", context)