Spaces:
Runtime error
Runtime error
Commit ·
e4352d7
1
Parent(s): fc30648
update chainlit
Browse files- Dockerfile +5 -1
- app.py +13 -5
Dockerfile
CHANGED
|
@@ -8,7 +8,8 @@ USER user
|
|
| 8 |
|
| 9 |
# Set the home directory and path
|
| 10 |
ENV HOME=/home/user \
|
| 11 |
-
PATH=/home/user/.local/bin:$PATH
|
|
|
|
| 12 |
|
| 13 |
ENV UVICORN_WS_PROTOCOL=websockets
|
| 14 |
|
|
@@ -18,6 +19,9 @@ WORKDIR $HOME/app
|
|
| 18 |
# Copy the app to the container
|
| 19 |
COPY --chown=user . $HOME/app
|
| 20 |
|
|
|
|
|
|
|
|
|
|
| 21 |
# Install the dependencies
|
| 22 |
# RUN uv sync --frozen
|
| 23 |
RUN uv sync
|
|
|
|
| 8 |
|
| 9 |
# Set the home directory and path
|
| 10 |
ENV HOME=/home/user \
|
| 11 |
+
PATH=/home/user/.local/bin:$PATH \
|
| 12 |
+
CHAINLIT_CONFIG_DIR=/home/user/app
|
| 13 |
|
| 14 |
ENV UVICORN_WS_PROTOCOL=websockets
|
| 15 |
|
|
|
|
| 19 |
# Copy the app to the container
|
| 20 |
COPY --chown=user . $HOME/app
|
| 21 |
|
| 22 |
+
# Ensure proper permissions
|
| 23 |
+
RUN chmod -R 755 $HOME/app
|
| 24 |
+
|
| 25 |
# Install the dependencies
|
| 26 |
# RUN uv sync --frozen
|
| 27 |
RUN uv sync
|
app.py
CHANGED
|
@@ -43,7 +43,7 @@ HF_TOKEN = os.environ["HF_TOKEN"]
|
|
| 43 |
document_loader = TextLoader("./data/paul_graham_essays.txt")
|
| 44 |
documents = document_loader.load()
|
| 45 |
|
| 46 |
-
text_splitter = RecursiveCharacterTextSplitter(chunk_size=
|
| 47 |
split_documents = text_splitter.split_documents(documents)
|
| 48 |
|
| 49 |
hf_embeddings = HuggingFaceEndpointEmbeddings(
|
|
@@ -82,7 +82,7 @@ async def main():
|
|
| 82 |
pbars.append(pbar)
|
| 83 |
|
| 84 |
if i == 0:
|
| 85 |
-
vectorstore = await
|
| 86 |
else:
|
| 87 |
tasks.append(process_batch(vectorstore, batch, False, pbar))
|
| 88 |
|
|
@@ -94,7 +94,15 @@ async def main():
|
|
| 94 |
|
| 95 |
await process_all_batches()
|
| 96 |
|
| 97 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 98 |
print("\nIndexing complete. Vectorstore is ready for use.")
|
| 99 |
return hf_retriever
|
| 100 |
|
|
@@ -111,7 +119,7 @@ hf_retriever = asyncio.run(run())
|
|
| 111 |
"""
|
| 112 |
RAG_PROMPT_TEMPLATE = """\
|
| 113 |
<|start_header_id|>system<|end_header_id|>
|
| 114 |
-
You are a helpful assistant. You answer user questions based on provided context. If you can't answer the question with the provided context, say you don't know.<|eot_id|>
|
| 115 |
|
| 116 |
<|start_header_id|>user<|end_header_id|>
|
| 117 |
User Query:
|
|
@@ -131,7 +139,7 @@ rag_prompt = PromptTemplate.from_template(RAG_PROMPT_TEMPLATE)
|
|
| 131 |
"""
|
| 132 |
hf_llm = HuggingFaceEndpoint(
|
| 133 |
endpoint_url=HF_LLM_ENDPOINT,
|
| 134 |
-
max_new_tokens=
|
| 135 |
top_k=10,
|
| 136 |
top_p=0.95,
|
| 137 |
temperature=0.3,
|
|
|
|
| 43 |
document_loader = TextLoader("./data/paul_graham_essays.txt")
|
| 44 |
documents = document_loader.load()
|
| 45 |
|
| 46 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50)
|
| 47 |
split_documents = text_splitter.split_documents(documents)
|
| 48 |
|
| 49 |
hf_embeddings = HuggingFaceEndpointEmbeddings(
|
|
|
|
| 82 |
pbars.append(pbar)
|
| 83 |
|
| 84 |
if i == 0:
|
| 85 |
+
vectorstore = await FAISS.afrom_documents(batch, hf_embeddings)
|
| 86 |
else:
|
| 87 |
tasks.append(process_batch(vectorstore, batch, False, pbar))
|
| 88 |
|
|
|
|
| 94 |
|
| 95 |
await process_all_batches()
|
| 96 |
|
| 97 |
+
# Configure retriever with search parameters
|
| 98 |
+
hf_retriever = vectorstore.as_retriever(
|
| 99 |
+
search_kwargs={
|
| 100 |
+
"k": 3, # Number of documents to retrieve
|
| 101 |
+
"fetch_k": 5, # Number of documents to fetch before filtering
|
| 102 |
+
"maximal_marginal_relevance": True, # Use MMR to ensure diversity
|
| 103 |
+
"filter": None # No filtering
|
| 104 |
+
}
|
| 105 |
+
)
|
| 106 |
print("\nIndexing complete. Vectorstore is ready for use.")
|
| 107 |
return hf_retriever
|
| 108 |
|
|
|
|
| 119 |
"""
|
| 120 |
RAG_PROMPT_TEMPLATE = """\
|
| 121 |
<|start_header_id|>system<|end_header_id|>
|
| 122 |
+
You are a helpful assistant. You answer user questions based on provided context. If you can't answer the question with the provided context, say you don't know. Keep your responses concise and focused.<|eot_id|>
|
| 123 |
|
| 124 |
<|start_header_id|>user<|end_header_id|>
|
| 125 |
User Query:
|
|
|
|
| 139 |
"""
|
| 140 |
hf_llm = HuggingFaceEndpoint(
|
| 141 |
endpoint_url=HF_LLM_ENDPOINT,
|
| 142 |
+
max_new_tokens=256,
|
| 143 |
top_k=10,
|
| 144 |
top_p=0.95,
|
| 145 |
temperature=0.3,
|