Spaces:
Sleeping
Sleeping
Commit Β·
2a14080
1
Parent(s): 29477ed
Updated
Browse files- Dockerfile +28 -0
- backend/Dockerfile +0 -14
- {backend/data β data}/tesco_faq.csv +0 -0
- frontend/src/components/ChatInterface.jsx +1 -1
- backend/llm.py β llm.py +0 -0
- backend/main.py β main.py +18 -6
- backend/memory.py β memory.py +0 -0
- backend/rag.py β rag.py +0 -0
- backend/requirements.txt β requirements.txt +0 -0
Dockerfile
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Stage 1: Build Frontend
|
| 2 |
+
FROM node:18-alpine as frontend-build
|
| 3 |
+
WORKDIR /app/frontend
|
| 4 |
+
COPY frontend/package*.json ./
|
| 5 |
+
RUN npm install
|
| 6 |
+
COPY frontend/ .
|
| 7 |
+
RUN npm run build
|
| 8 |
+
|
| 9 |
+
# Stage 2: Setup Backend & Serve
|
| 10 |
+
FROM python:3.11-slim
|
| 11 |
+
|
| 12 |
+
WORKDIR /app
|
| 13 |
+
|
| 14 |
+
# Copy Backend Requirements
|
| 15 |
+
COPY requirements.txt .
|
| 16 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 17 |
+
|
| 18 |
+
# Copy Backend Code
|
| 19 |
+
COPY . .
|
| 20 |
+
|
| 21 |
+
# Copy Frontend Build from Stage 1
|
| 22 |
+
COPY --from=frontend-build /app/frontend/dist /app/frontend/dist
|
| 23 |
+
|
| 24 |
+
# Expose Port 7860 (Hugging Face Default)
|
| 25 |
+
EXPOSE 7860
|
| 26 |
+
|
| 27 |
+
# Run Application
|
| 28 |
+
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
|
backend/Dockerfile
DELETED
|
@@ -1,14 +0,0 @@
|
|
| 1 |
-
FROM python:3.11-slim
|
| 2 |
-
|
| 3 |
-
WORKDIR /app
|
| 4 |
-
|
| 5 |
-
# Install system dependencies if needed (e.g. for build tools)
|
| 6 |
-
RUN apt-get update && apt-get install -y build-essential && rm -rf /var/lib/apt/lists/*
|
| 7 |
-
|
| 8 |
-
COPY requirements.txt .
|
| 9 |
-
|
| 10 |
-
RUN pip install --no-cache-dir -r requirements.txt
|
| 11 |
-
|
| 12 |
-
COPY . .
|
| 13 |
-
|
| 14 |
-
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
{backend/data β data}/tesco_faq.csv
RENAMED
|
File without changes
|
frontend/src/components/ChatInterface.jsx
CHANGED
|
@@ -30,7 +30,7 @@ const ChatInterface = ({ sessionId }) => {
|
|
| 30 |
setIsLoading(true);
|
| 31 |
|
| 32 |
try {
|
| 33 |
-
const response = await fetch('
|
| 34 |
method: 'POST',
|
| 35 |
headers: { 'Content-Type': 'application/json' },
|
| 36 |
body: JSON.stringify({ session_id: sessionId, message: userMessage }),
|
|
|
|
| 30 |
setIsLoading(true);
|
| 31 |
|
| 32 |
try {
|
| 33 |
+
const response = await fetch('/chat', {
|
| 34 |
method: 'POST',
|
| 35 |
headers: { 'Content-Type': 'application/json' },
|
| 36 |
body: JSON.stringify({ session_id: sessionId, message: userMessage }),
|
backend/llm.py β llm.py
RENAMED
|
File without changes
|
backend/main.py β main.py
RENAMED
|
@@ -1,9 +1,10 @@
|
|
| 1 |
from fastapi import FastAPI, HTTPException
|
| 2 |
-
from fastapi.responses import StreamingResponse
|
|
|
|
| 3 |
from fastapi.middleware.cors import CORSMiddleware
|
| 4 |
from pydantic import BaseModel
|
| 5 |
import os
|
| 6 |
-
from
|
| 7 |
import logging
|
| 8 |
from dotenv import load_dotenv
|
| 9 |
|
|
@@ -18,14 +19,13 @@ app = FastAPI(title="Tesco AI Support API")
|
|
| 18 |
# CORS setup
|
| 19 |
app.add_middleware(
|
| 20 |
CORSMiddleware,
|
| 21 |
-
allow_origins=["*"],
|
| 22 |
allow_credentials=True,
|
| 23 |
allow_methods=["*"],
|
| 24 |
allow_headers=["*"],
|
| 25 |
)
|
| 26 |
|
| 27 |
-
# Initialize LLM Client
|
| 28 |
-
# We do this globally so it persists
|
| 29 |
try:
|
| 30 |
llm_client = LLMClient()
|
| 31 |
except Exception as e:
|
|
@@ -50,6 +50,18 @@ async def chat_endpoint(request: ChatRequest):
|
|
| 50 |
media_type="text/event-stream"
|
| 51 |
)
|
| 52 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 53 |
if __name__ == "__main__":
|
| 54 |
import uvicorn
|
| 55 |
-
uvicorn.run(app, host="0.0.0.0", port=
|
|
|
|
| 1 |
from fastapi import FastAPI, HTTPException
|
| 2 |
+
from fastapi.responses import StreamingResponse, FileResponse
|
| 3 |
+
from fastapi.staticfiles import StaticFiles
|
| 4 |
from fastapi.middleware.cors import CORSMiddleware
|
| 5 |
from pydantic import BaseModel
|
| 6 |
import os
|
| 7 |
+
from llm import LLMClient
|
| 8 |
import logging
|
| 9 |
from dotenv import load_dotenv
|
| 10 |
|
|
|
|
| 19 |
# CORS setup
|
| 20 |
app.add_middleware(
|
| 21 |
CORSMiddleware,
|
| 22 |
+
allow_origins=["*"],
|
| 23 |
allow_credentials=True,
|
| 24 |
allow_methods=["*"],
|
| 25 |
allow_headers=["*"],
|
| 26 |
)
|
| 27 |
|
| 28 |
+
# Initialize LLM Client
|
|
|
|
| 29 |
try:
|
| 30 |
llm_client = LLMClient()
|
| 31 |
except Exception as e:
|
|
|
|
| 50 |
media_type="text/event-stream"
|
| 51 |
)
|
| 52 |
|
| 53 |
+
# Serve Frontend Static Files
|
| 54 |
+
# Mount assets Folder
|
| 55 |
+
if os.path.exists("frontend/dist/assets"):
|
| 56 |
+
app.mount("/assets", StaticFiles(directory="frontend/dist/assets"), name="assets")
|
| 57 |
+
|
| 58 |
+
@app.get("/{full_path:path}")
|
| 59 |
+
async def catch_all(full_path: str):
|
| 60 |
+
# Serve index.html for SPA routing or clean entry
|
| 61 |
+
if os.path.exists("frontend/dist/index.html"):
|
| 62 |
+
return FileResponse("frontend/dist/index.html")
|
| 63 |
+
return {"error": "Frontend not built"}
|
| 64 |
+
|
| 65 |
if __name__ == "__main__":
|
| 66 |
import uvicorn
|
| 67 |
+
uvicorn.run(app, host="0.0.0.0", port=7860)
|
backend/memory.py β memory.py
RENAMED
|
File without changes
|
backend/rag.py β rag.py
RENAMED
|
File without changes
|
backend/requirements.txt β requirements.txt
RENAMED
|
File without changes
|