Spaces:
Runtime error
Runtime error
| # app.py ← เปลี่ยนชื่อไฟล์เป็น app.py | |
| # 1) force sane thread env BEFORE any other imports | |
| import os | |
| for k in ["OMP_NUM_THREADS", "OPENBLAS_NUM_THREADS", "MKL_NUM_THREADS", "NUMEXPR_MAX_THREADS"]: | |
| v = os.environ.get(k, "") | |
| try: | |
| if int(v) <= 0: | |
| os.environ[k] = "1" | |
| except Exception: | |
| os.environ[k] = "1" | |
| os.environ["TOKENIZERS_PARALLELISM"] = "false" | |
| # 2) now import the rest | |
| import gradio as gr | |
| from fastapi import FastAPI | |
| from pydantic import BaseModel | |
| from agent_pdfimages import VS # VS ต้องเป็น lazy-load ตามที่แก้ไว้ | |
| from agent_ml import build_agent_multimodal as build_agent | |
| from langchain_openai import ChatOpenAI | |
| # (ตัดของที่ไม่ใช้เพื่อลดโหลด) | |
| # from langchain.agents import AgentExecutor | |
| # from langchain.memory import ConversationBufferMemory | |
| OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") | |
| # FastAPI (ไม่บังคับ แต่คงไว้ได้) | |
| app = FastAPI(title="Breast Cancer RAG Assistant") | |
| agent = build_agent() | |
| llm_direct = ChatOpenAI(model="gpt-4o", temperature=0.2, openai_api_key=OPENAI_API_KEY) | |
| class Ask(BaseModel): | |
| query: str | |
| def ask(q: Ask): | |
| try: | |
| return {"answer": agent.run(q.query)} | |
| except Exception: | |
| return {"answer": direct_rag_answer(q.query)} | |
| def direct_rag_answer(question: str) -> str: | |
| docs = VS.similarity_search(question, k=3) | |
| context = "\n\n".join([(d.page_content or "")[:800] for d in docs]) | |
| srcs = [] | |
| for d in docs: | |
| m = d.metadata or {} | |
| tag = m.get("source_file", "unknown") | |
| if m.get("year"): | |
| tag += f" ({m.get('year')})" | |
| if m.get("page") is not None: | |
| tag += f", p.{m.get('page')}" | |
| srcs.append(tag) | |
| prompt = ( | |
| "You are an assistant specializing in imaging and molecular profiles of breast cancer. " | |
| "Answer based on the provided text chunks. If info is incomplete, say so.\n\n" | |
| f"[Retrieved Chunks]\n{context}\n\n[Question]\n{question}\n\n" | |
| "Answer in bullet points. Sources: " + ";".join(srcs) | |
| ) | |
| return llm_direct.invoke(prompt).content | |
| def chat_fn(message, history): | |
| try: | |
| return agent.run(message) | |
| except Exception: | |
| return direct_rag_answer(message) | |
| # IMPORTANT for Spaces (Gradio SDK): expose `demo` at top-level; DO NOT launch | |
| demo = gr.ChatInterface( | |
| fn=chat_fn, | |
| title="🔬 Breast Cancer Knowledge RAG Assistant", | |
| description="Retrieves evidence from local PDFs (and images if indexed) and answers with citations." | |
| ) | |
| # ⛔️ ไม่มี demo.launch(...) | |