Spaces:
Sleeping
Sleeping
| import os | |
| import PyPDF2 | |
| from fastapi import FastAPI, File, UploadFile, Form | |
| from fastapi.responses import JSONResponse | |
| from openai import OpenAI | |
| app = FastAPI() | |
| # β Initialize OpenAI client | |
| api_key = os.getenv("OPENAI_API_KEY") | |
| if not api_key: | |
| raise ValueError("β OPENAI_API_KEY not found. Please add it in Hugging Face Space secrets.") | |
| client = OpenAI(api_key=api_key) | |
| # π Extract text from PDF | |
| def extract_pdf_text(file) -> str: | |
| try: | |
| reader = PyPDF2.PdfReader(file) | |
| text = "" | |
| for page in reader.pages: | |
| page_text = page.extract_text() or "" | |
| text += page_text + "\n" | |
| return text | |
| except Exception as e: | |
| raise Exception(f"PDF extraction failed: {e}") | |
| async def query_pdf(query: str = Form(...), pdf: UploadFile = File(...)): | |
| try: | |
| pdf_text = extract_pdf_text(pdf.file) | |
| system_prompt = ( | |
| "You are a polite and helpful assistant who answers questions strictly using the provided PDF text. " | |
| "If the information is not in the PDF, respond with: " | |
| "'I'm sorry, but the answer is not available in the PDF.'" | |
| ) | |
| chat_completion = client.chat.completions.create( | |
| model="gpt-4o-mini", | |
| messages=[ | |
| {"role": "system", "content": system_prompt}, | |
| {"role": "user", "content": f"PDF Content:\n{pdf_text[:15000]}\n\nQuery: {query}"} | |
| ], | |
| temperature=0.0, | |
| max_tokens=1000 | |
| ) | |
| answer = chat_completion.choices[0].message.content.strip() | |
| return JSONResponse({"success": True, "answer": answer}) | |
| except Exception as e: | |
| import traceback | |
| print("β ERROR:", traceback.format_exc()) | |
| return JSONResponse({"success": False, "error": str(e)}) | |
| def home(): | |
| return {"message": "β Strict PDF Query API is running!"} | |
| # β Required for Hugging Face Space | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run(app, host="0.0.0.0", port=7860) | |