teoat commited on
Commit
fb031b0
·
verified ·
1 Parent(s): dc1c44f

Upload app/routers/advanced_ai.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app/routers/advanced_ai.py +12 -4
app/routers/advanced_ai.py CHANGED
@@ -25,14 +25,18 @@ class RedTeamRequest(BaseModel):
25
  @router.post("/advanced-ai/rag/query")
26
  async def local_rag_query(req: RAGQuery):
27
  """Retrieve documents using consolidated AI Service (FAISS/TF-IDF)."""
28
- results = await ai_service.semantic_search(req.query, limit=req.k, filters=req.filters)
 
 
29
  return {"query": req.query, "results": results}
30
 
31
 
32
  @router.post("/advanced-ai/rag/add")
33
  async def local_rag_add(doc_id: str = Form(), text: str = Form()):
34
  """Add a document to the shared vector store."""
35
- success = await ai_service.add_document(doc_id, text, metadata={"source": "user_upload"})
 
 
36
  return {"success": success, "doc_id": doc_id}
37
 
38
 
@@ -51,7 +55,9 @@ async def analyze_image(file: UploadFile = File()):
51
  temp_file_path = temp_file.name
52
 
53
  try:
54
- results = await evidence_processor.process_files_batch([temp_file_path], options={"enable_ocr": True, "enable_forensics": True})
 
 
55
  if not results:
56
  raise HTTPException(status_code=500, detail="Analysis failed")
57
 
@@ -77,7 +83,9 @@ async def analyze_text(text: str = Form()):
77
  import tempfile
78
 
79
  try:
80
- with tempfile.NamedTemporaryFile(delete=False, mode="w", suffix=".txt") as temp_file:
 
 
81
  temp_file.write(text)
82
  temp_file_path = temp_file.name
83
 
 
25
  @router.post("/advanced-ai/rag/query")
26
  async def local_rag_query(req: RAGQuery):
27
  """Retrieve documents using consolidated AI Service (FAISS/TF-IDF)."""
28
+ results = await ai_service.semantic_search(
29
+ req.query, limit=req.k, filters=req.filters
30
+ )
31
  return {"query": req.query, "results": results}
32
 
33
 
34
  @router.post("/advanced-ai/rag/add")
35
  async def local_rag_add(doc_id: str = Form(), text: str = Form()):
36
  """Add a document to the shared vector store."""
37
+ success = await ai_service.add_document(
38
+ doc_id, text, metadata={"source": "user_upload"}
39
+ )
40
  return {"success": success, "doc_id": doc_id}
41
 
42
 
 
55
  temp_file_path = temp_file.name
56
 
57
  try:
58
+ results = await evidence_processor.process_files_batch(
59
+ [temp_file_path], options={"enable_ocr": True, "enable_forensics": True}
60
+ )
61
  if not results:
62
  raise HTTPException(status_code=500, detail="Analysis failed")
63
 
 
83
  import tempfile
84
 
85
  try:
86
+ with tempfile.NamedTemporaryFile(
87
+ delete=False, mode="w", suffix=".txt"
88
+ ) as temp_file:
89
  temp_file.write(text)
90
  temp_file_path = temp_file.name
91