tstech1 commited on
Commit
3f9623f
·
verified ·
1 Parent(s): a60e590

Create main.py

Browse files
Files changed (1) hide show
  1. main.py +65 -0
main.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import PyPDF2
3
+ from fastapi import FastAPI, File, UploadFile, Form
4
+ from fastapi.responses import JSONResponse
5
+ from openai import OpenAI
6
+
7
+ app = FastAPI()
8
+
9
+ # ✅ Initialize OpenAI client (use environment variable for safety)
10
+ client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
11
+
12
+ # 📖 Extract text from PDF
13
+ def extract_pdf_text(file) -> str:
14
+ text = ""
15
+ reader = PyPDF2.PdfReader(file)
16
+ for page in reader.pages:
17
+ page_text = page.extract_text()
18
+ if page_text:
19
+ text += page_text + "\n"
20
+ return text
21
+
22
+
23
+ @app.post("/generate-questions")
24
+ async def generate_questions(query: str = Form(...), pdf: UploadFile = File(...)):
25
+ try:
26
+ # Read uploaded PDF
27
+ pdf_text = extract_pdf_text(pdf.file)
28
+
29
+ # 🔥 Call OpenAI GPT-4o-mini model
30
+ chat_completion = client.chat.completions.create(
31
+ model="gpt-4o-mini",
32
+ messages=[
33
+ {
34
+ "role": "system",
35
+ "content": (
36
+ "You are a helpful assistant that answers questions based strictly "
37
+ "on the provided PDF content. If the answer cannot be found, say so."
38
+ )
39
+ },
40
+ {
41
+ "role": "user",
42
+ "content": f"""
43
+ Here is the PDF content (may be truncated):
44
+ {pdf_text[:15000]}
45
+
46
+ Now, answer this query: {query}
47
+ """
48
+ }
49
+ ],
50
+ temperature=0.3,
51
+ max_tokens=1200
52
+ )
53
+
54
+ return JSONResponse({
55
+ "success": True,
56
+ "answer": chat_completion.choices[0].message.content
57
+ })
58
+
59
+ except Exception as e:
60
+ return JSONResponse({"success": False, "error": str(e)})
61
+
62
+
63
+ @app.get("/")
64
+ def home():
65
+ return {"message": "✅ PDF Query API is running!"}