xangma commited on
Commit
ef6c37d
·
1 Parent(s): a9595ad
Files changed (2) hide show
  1. app.py +8 -10
  2. chain.py +4 -3
app.py CHANGED
@@ -100,13 +100,13 @@ def get_docs():
100
  documents.extend(text_splitter.split_documents(load))
101
  return documents
102
 
103
- def set_openai_api_key(api_key, agent):
104
  if api_key:
105
  os.environ["OPENAI_API_KEY"] = api_key
106
- documents = get_docs
107
  embeddings = OpenAIEmbeddings()
108
- vectorstore = CachedChroma.from_documents_with_cache(".persisted_data", documents, embeddings, collection_name="pycbc")
109
- qa_chain = get_new_chain1(vectorstore)
110
  os.environ["OPENAI_API_KEY"] = ""
111
  return qa_chain
112
 
@@ -125,7 +125,6 @@ def chat(inp, history, agent):
125
  print(history)
126
  return history, history
127
 
128
-
129
  block = gr.Blocks(css=".gradio-container {background-color: lightgray}")
130
 
131
  with block:
@@ -138,6 +137,7 @@ with block:
138
  lines=1,
139
  type="password",
140
  )
 
141
 
142
  chatbot = gr.Chatbot()
143
 
@@ -151,16 +151,14 @@ with block:
151
 
152
  gr.Examples(
153
  examples=[
154
- "What are agents?",
155
- "How do I summarize a long document?",
156
- "What types of memory exist?",
157
  ],
158
  inputs=message,
159
  )
160
 
161
  gr.HTML(
162
  """
163
- This simple application is an implementation of ChatGPT but over an external dataset (in this case, the LangChain documentation)."""
164
  )
165
 
166
  gr.HTML(
@@ -175,7 +173,7 @@ with block:
175
 
176
  openai_api_key_textbox.change(
177
  set_openai_api_key,
178
- inputs=[openai_api_key_textbox, agent_state],
179
  outputs=[agent_state],
180
  )
181
 
 
100
  documents.extend(text_splitter.split_documents(load))
101
  return documents
102
 
103
+ def set_openai_api_key(api_key, model_selector, agent):
104
  if api_key:
105
  os.environ["OPENAI_API_KEY"] = api_key
106
+ documents = get_docs()
107
  embeddings = OpenAIEmbeddings()
108
+ vectorstore = CachedChroma.from_documents_with_cache(".persisted_data", documents, embedding=embeddings, collection_name="pycbc")
109
+ qa_chain = get_new_chain1(vectorstore, model_selector)
110
  os.environ["OPENAI_API_KEY"] = ""
111
  return qa_chain
112
 
 
125
  print(history)
126
  return history, history
127
 
 
128
  block = gr.Blocks(css=".gradio-container {background-color: lightgray}")
129
 
130
  with block:
 
137
  lines=1,
138
  type="password",
139
  )
140
+ model_selector = gr.Dropdown(["gpt-3.5-turbo", "gpt-4"], label="Model")
141
 
142
  chatbot = gr.Chatbot()
143
 
 
151
 
152
  gr.Examples(
153
  examples=[
154
+ "What is pycbc?",
 
 
155
  ],
156
  inputs=message,
157
  )
158
 
159
  gr.HTML(
160
  """
161
+ This simple application is an implementation of ChatGPT but over an external dataset (in this case, the pycbc source code)."""
162
  )
163
 
164
  gr.HTML(
 
173
 
174
  openai_api_key_textbox.change(
175
  set_openai_api_key,
176
+ inputs=[openai_api_key_textbox, model_selector, agent_state],
177
  outputs=[agent_state],
178
  )
179
 
chain.py CHANGED
@@ -20,6 +20,7 @@ import langchain
20
  # logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
21
  from langchain.vectorstores import Chroma
22
  from langchain.llms import OpenAI
 
23
  from langchain.chains.llm import LLMChain
24
  from langchain.chains.question_answering import load_qa_chain
25
  from langchain.prompts import PromptTemplate
@@ -62,7 +63,7 @@ class CustomChain(Chain, BaseModel):
62
  return {"answer": answer}
63
 
64
 
65
- def get_new_chain1(vectorstore) -> Chain:
66
 
67
  _eg_template = """## Example:
68
 
@@ -91,7 +92,7 @@ def get_new_chain1(vectorstore) -> Chain:
91
  example_prompt=_eg_prompt,
92
  input_variables=["question", "chat_history"],
93
  )
94
- llm = OpenAI(temperature=0, model_name="text-davinci-003")
95
  key_word_extractor = LLMChain(llm=llm, prompt=prompt)
96
 
97
  EXAMPLE_PROMPT = PromptTemplate(
@@ -111,7 +112,7 @@ Question: {question}
111
  Answer in Markdown:"""
112
  PROMPT = PromptTemplate(template=template, input_variables=["question", "context"])
113
  doc_chain = load_qa_chain(
114
- OpenAI(temperature=0, model_name="text-davinci-003", max_tokens=-1),
115
  chain_type="stuff",
116
  prompt=PROMPT,
117
  document_prompt=EXAMPLE_PROMPT,
 
20
  # logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
21
  from langchain.vectorstores import Chroma
22
  from langchain.llms import OpenAI
23
+ from langchain.chat_models import ChatOpenAI
24
  from langchain.chains.llm import LLMChain
25
  from langchain.chains.question_answering import load_qa_chain
26
  from langchain.prompts import PromptTemplate
 
63
  return {"answer": answer}
64
 
65
 
66
+ def get_new_chain1(vectorstore, model_selector) -> Chain:
67
 
68
  _eg_template = """## Example:
69
 
 
92
  example_prompt=_eg_prompt,
93
  input_variables=["question", "chat_history"],
94
  )
95
+ llm = ChatOpenAI(temperature=0, model_name=model_selector)
96
  key_word_extractor = LLMChain(llm=llm, prompt=prompt)
97
 
98
  EXAMPLE_PROMPT = PromptTemplate(
 
112
  Answer in Markdown:"""
113
  PROMPT = PromptTemplate(template=template, input_variables=["question", "context"])
114
  doc_chain = load_qa_chain(
115
+ ChatOpenAI(temperature=0, model_name=model_selector, max_tokens=-1),
116
  chain_type="stuff",
117
  prompt=PROMPT,
118
  document_prompt=EXAMPLE_PROMPT,