| import gradio as gr |
| import os |
| import openai |
|
|
|
|
| openai.api_key = os.environ['OPENAI_API_KEY'] |
|
|
| from langchain.chat_models import ChatOpenAI |
| from langchain.schema import HumanMessage |
|
|
|
|
| from langchain.prompts.chat import ( |
| ChatPromptTemplate, |
| SystemMessagePromptTemplate, |
| HumanMessagePromptTemplate |
| ) |
|
|
| from langchain.schema import ( |
| AIMessage, |
| HumanMessage, |
| SystemMessage |
| ) |
|
|
| from langchain.chains import LLMChain |
|
|
| from langchain.text_splitter import CharacterTextSplitter |
|
|
| from langchain.embeddings.openai import OpenAIEmbeddings |
|
|
| from langchain.vectorstores import Chroma |
|
|
| from langchain.chains.question_answering import load_qa_chain |
|
|
| from langchain.llms import OpenAI |
|
|
| chat_model = ChatOpenAI(model_name="gpt-3.5-turbo") |
|
|
| |
| system_message = SystemMessage(content="You are a food critic.") |
|
|
| |
| user_message = HumanMessage(content="Do you think Kraft Dinner constitues fine dining?") |
|
|
| |
| assistant_message = AIMessage(content="Egads! No, it most certainly does not!") |
|
|
|
|
| isecond_user_message = HumanMessage(content="What about Red Lobster, surely that is fine dining!") |
|
|
| |
| list_of_prompts = [ |
| system_message, |
| user_message, |
| assistant_message, |
| second_user_message |
| ] |
|
|
| |
| chat_model(list_of_prompts) |
|
|
|
|
| |
| system_prompt_template = "You are an expert in {SUBJECT}, and you're currently feeling {MOOD}" |
| system_prompt_template = SystemMessagePromptTemplate.from_template(system_prompt_template) |
|
|
| user_prompt_template = "{CONTENT}" |
| user_prompt_template = HumanMessagePromptTemplate.from_template(user_prompt_template) |
|
|
| |
| chat_prompt = ChatPromptTemplate.from_messages([system_prompt_template, user_prompt_template]) |
|
|
|
|
| chain = LLMChain(llm=chat_model, prompt=chat_prompt) |
|
|
| with open("guide1.txt") as f: |
| hitchhikersguide = f.read() |
|
|
|
|
|
|
| text_splitter = CharacterTextSplitter( |
| separator = "\n", |
| chunk_size = 1000, |
| chunk_overlap=0, |
| length_function = len, |
| ) |
|
|
| texts = text_splitter.split_text(hitchhikersguide) |
|
|
| embeddings = OpenAIEmbeddings() |
|
|
| docsearch = Chroma.from_texts(texts, embeddings, metadatas=[{"source": str(i)} for i in range(len(texts))]).as_retriever() |
|
|
|
|
| chain = load_qa_chain(OpenAI(temperature=0), chain_type="stuff") |
|
|
| def query(query): |
| return chain.run(input_documents=docs, question=query) |
|
|
|
|
| iface = gr.Interface(fn=query, inputs="text", outputs="text") |
| iface.launch() |
|
|