Spaces:
Sleeping
Sleeping
File size: 1,330 Bytes
b0d4092 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 |
import os
from query_data import chain_options
from rich.console import Console
from rich.prompt import Prompt
from Constants import *
from apiKey import *
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
if __name__ == "__main__":
c = Console()
model = Prompt.ask("Which QA model would you like to work with?",
choices=list(chain_options.keys()),
default="basic")
chain = chain_options[model]()
c.print("[bold]Chat with your docs!")
c.print("[bold red]---------------")
while True:
default_question = "what did the president say about ketanji brown?"
question = Prompt.ask("Your Question: ", default=default_question)
# change this line if you're using RetrievalQA
# input = query
# output = result
result = chain({"question": question})
c.print("[green]Answer: [/green]" + result['answer'])
# include a bit more if we're using `with_sources`
if model == "with_sources" and result.get('source_documents', None):
c.print("[green]Sources: [/green]")
for doc in result['source_documents']:
c.print(f"[bold underline green]{doc.metadata['source']}")
c.print("[green]" + doc.page_content)
c.print("[bold red]---------------")
|