Spaces:
Runtime error
Runtime error
| # import required functions, classes | |
| from autollm import AutoQueryEngine | |
| from autollm.utils.document_reading import read_github_repo_as_documents, read_files_as_documents | |
| import os | |
| import gradio as gr | |
| import nest_asyncio | |
| """- Set your OpenAI API key in order to use default gpt-3.5-turbo model:""" | |
| os.environ["OPENAI_API_KEY"] = "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" | |
| """## 1. Read Files as Documents | |
| - Reading from a Github repo: | |
| """ | |
| git_repo_url = "https://github.com/michelebon/Ago.git" | |
| relative_folder_path = "docs" # relative path from the repo root to the folder containing documents | |
| required_exts = [".pdf"] # optional, only read files with these extensions | |
| documents = read_github_repo_as_documents(git_repo_url=git_repo_url, relative_folder_path=relative_folder_path, required_exts=required_exts) | |
| """Solve the loop problem""" | |
| query_engine = AutoQueryEngine.from_defaults | |
| nest_asyncio.apply() | |
| app = AutoQueryEngine.from_defaults | |
| # llm params | |
| llm_model = "gpt-4" | |
| llm_max_tokens = 512 | |
| llm_temperature = 0.1 | |
| # service_context_params | |
| system_prompt = """ | |
| You are an expert in antimicrobial resistance, specifically tailored to provide information exclusively from the 'LIBRO BIANCO SULL’ANTIMICROBICO RESISTENZA E LE INFEZIONI CORRELATE ALL’ASSISTENZA IN ITALIA - Una sfida improrogabile.' | |
| Its primary role is to interpret, explain, and discuss content within this document. | |
| You will not provide information from other sources; if a query is outside the scope of the document, it will explicitly state that the information is not found in the document and refrain from giving an answer. | |
| This specialized focus ensures accurate and specific responses related to antimicrobial resistance and related infections in the Italian healthcare context, as detailed in the document. | |
| If a query is unclear, you will ask for clarification to provide the most accurate and relevant response possible based on the document's content. | |
| You will communicate in a scientific yet easy-to-understand tone, making complex information accessible without compromising scientific accuracy. | |
| """ | |
| query_wrapper_prompt = ''' | |
| The document information is below. | |
| --------------------- | |
| {context_str} | |
| --------------------- | |
| Using the document information and mostly relying on it, | |
| answer the query. | |
| Query: {query_str} | |
| Answer: | |
| ''' | |
| enable_cost_calculator = True | |
| embed_model = "text-embedding-ada-002" # ["default", "local"] | |
| chunk_size = 512 | |
| context_window = 4096 | |
| # vector store params | |
| vector_store_type = "LanceDBVectorStore" | |
| lancedb_uri = "./.lancedb" | |
| lancedb_table_name = "vectors" | |
| enable_metadata_extraction = False | |
| # query engine params | |
| similarity_top_k = 3 | |
| query_engine = AutoQueryEngine.from_defaults( | |
| documents=documents, | |
| llm_model=llm_model, | |
| llm_max_tokens=llm_max_tokens, | |
| llm_temperature=llm_temperature, | |
| system_prompt=system_prompt, | |
| query_wrapper_prompt=query_wrapper_prompt, | |
| enable_cost_calculator=enable_cost_calculator, | |
| embed_model=embed_model, | |
| chunk_size=chunk_size, | |
| context_window=context_window, | |
| vector_store_type=vector_store_type, | |
| lancedb_uri=lancedb_uri, | |
| lancedb_table_name=lancedb_table_name, | |
| enable_metadata_extraction=enable_metadata_extraction, | |
| similarity_top_k=similarity_top_k | |
| ) | |
| def greet(query): | |
| return query_engine.query(query).response | |
| demo = gr.Interface(fn=greet, inputs="text", outputs="text") | |
| demo.launch() | |