usmansafderktk commited on
Commit
e4f36e0
·
1 Parent(s): 863a477

Add backend

Browse files
Files changed (2) hide show
  1. requirements.txt +22 -0
  2. server.py +98 -0
requirements.txt ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ fastapi
2
+ uvicorn[standard]
3
+ pydantic
4
+ openfactcheck
5
+ scikit-learn
6
+ anthropic
7
+ backoff
8
+ boto3
9
+ datasets
10
+ en-core-web-sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.7.1/en_core_web_sm-3.7.1-py3-none-any.whl#sha256=86cc141f63942d4b2c5fcee06630fd6f904788d2f0ab005cce45aadb8fb73889
11
+ evaluate
12
+ factool @ git+https://github.com/hasaniqbal777/factool
13
+ nltk
14
+ python-dotenv
15
+ python-Levenshtein
16
+ seaborn
17
+ sentence-transformers
18
+ spacy
19
+ streamlit
20
+ streamlit-option-menu
21
+ torch
22
+ transformers
server.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from pydantic import BaseModel
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ from openfactcheck import OpenFactCheck, OpenFactCheckConfig
5
+ import uvicorn
6
+ import os
7
+ from dotenv import load_dotenv
8
+ from fastapi.concurrency import run_in_threadpool
9
+
10
+ # Load environment variables from .env file
11
+ load_dotenv()
12
+
13
+ app = FastAPI()
14
+
15
+ # Allow Chrome extension to make requests (CORS)
16
+ # WARNING: In production, replace ["*"] with your actual extension ID (e.g., ["chrome-extension://YOUR_ID"])
17
+ app.add_middleware(
18
+ CORSMiddleware,
19
+ allow_origins=["*"],
20
+ allow_methods=["*"],
21
+ allow_headers=["*"],
22
+ )
23
+
24
+ # Initialization
25
+ try:
26
+ print("Initializing OpenFactCheck with default configuration...")
27
+ # Initialize with defaults. It will automatically look for .env variables.
28
+ config = OpenFactCheckConfig()
29
+ ofc = OpenFactCheck(config)
30
+ print("OpenFactCheck initialized successfully.")
31
+ except Exception as e:
32
+ print(f"CRITICAL ERROR initializing OpenFactCheck: {e}")
33
+ ofc = None
34
+
35
+ class CheckRequest(BaseModel):
36
+ text: str
37
+ # Optional fields for pipeline configuration with defaults
38
+ claim_processor: str = "factool_claimprocessor"
39
+ retriever: str = "factool_retriever"
40
+ verifier: str = "factool_verifier"
41
+
42
+ @app.post("/check")
43
+ async def check_text(req: CheckRequest):
44
+ if not ofc:
45
+ raise HTTPException(status_code=500, detail="Fact checker not initialized.")
46
+
47
+ print(f"\n>>> Received request: {req.text[:50]}...")
48
+ print(f">>> Pipeline: {req.claim_processor} -> {req.retriever} -> {req.verifier}")
49
+
50
+ try:
51
+ # 1. Configure pipeline dynamically based on request
52
+ ofc.init_pipeline_manually([
53
+ req.claim_processor,
54
+ req.retriever,
55
+ req.verifier
56
+ ])
57
+
58
+ # 2. Define a helper to run synchronously in a separate thread
59
+ def run_full_evaluation(text):
60
+ # evaluate_streaming yields results step-by-step.
61
+ # We list() it to consume the whole stream and get all details.
62
+ return list(ofc.ResponseEvaluator.evaluate_streaming(response=text))
63
+
64
+ # 3. Run it in a thread to avoid blocking the main event loop
65
+ full_report = await run_in_threadpool(run_full_evaluation, req.text)
66
+
67
+ # 4. Format the output for easier use by your extension
68
+ formatted_result = {
69
+ "status": "success",
70
+ "claims": [],
71
+ "evidences": {},
72
+ "verification_details": {},
73
+ "overall_label": "Unknown"
74
+ }
75
+
76
+ for step in full_report:
77
+ solver_name = step.get("solver_name", "")
78
+ output = step.get("output", {})
79
+
80
+ if "claimprocessor" in solver_name:
81
+ formatted_result["claims"] = output.get("claims", [])
82
+ elif "retriever" in solver_name:
83
+ formatted_result["evidences"] = output.get("claims_with_evidences", {})
84
+ elif "verifier" in solver_name:
85
+ formatted_result["overall_label"] = output.get("label", "Unknown")
86
+ formatted_result["verification_details"] = output.get("detail", {})
87
+
88
+ print(">>> Detailed evaluation complete. Sending response.")
89
+ return formatted_result
90
+
91
+ except Exception as e:
92
+ print(f"ERROR during evaluation: {e}")
93
+ # Re-raise as an HTTP exception so the client gets a proper error code
94
+ raise HTTPException(status_code=500, detail=str(e))
95
+
96
+ if __name__ == "__main__":
97
+ print("Starting OpenFactCheck API server on port 8000...")
98
+ uvicorn.run(app, host="127.0.0.1", port=8000)