Spaces:
Running
Running
Add time
Browse files- app/face_check/service.py +20 -0
- app/face_check/view.py +14 -6
- note.md +0 -0
app/face_check/service.py
CHANGED
|
@@ -1,18 +1,24 @@
|
|
| 1 |
import httpx
|
| 2 |
from app.config import settings
|
| 3 |
from fastapi import HTTPException
|
|
|
|
| 4 |
|
| 5 |
class AIService:
|
| 6 |
async def call_geolocation(file_bytes, meta_bytes):
|
| 7 |
try:
|
|
|
|
| 8 |
async with httpx.AsyncClient(base_url=settings.SERVICE_GEOLOCATION_URL, timeout=1000) as client:
|
| 9 |
files_payload = [("files", (name, content, ctype)) for name, content, ctype in file_bytes]
|
| 10 |
files_payload.append(("files", (meta_bytes[0], meta_bytes[1], meta_bytes[2])))
|
| 11 |
|
| 12 |
resp = await client.post("/g3/predict", files=files_payload)
|
| 13 |
resp.raise_for_status()
|
|
|
|
|
|
|
|
|
|
| 14 |
return resp.json()
|
| 15 |
except httpx.HTTPError as e:
|
|
|
|
| 16 |
raise HTTPException(
|
| 17 |
status_code=500,
|
| 18 |
detail=f"[call_geolocation] error: {str(e)}"
|
|
@@ -20,14 +26,19 @@ class AIService:
|
|
| 20 |
|
| 21 |
async def call_timestamp(file_bytes, meta_bytes):
|
| 22 |
try:
|
|
|
|
| 23 |
async with httpx.AsyncClient(base_url=settings.SERVICE_TIMESTAMP_URL, timeout=1000) as client:
|
| 24 |
files_payload = [("files", (name, content, ctype)) for name, content, ctype in file_bytes]
|
| 25 |
files_payload.append(("metadata_file", (meta_bytes[0], meta_bytes[1], meta_bytes[2])))
|
| 26 |
|
| 27 |
resp = await client.post("/analyze/", files=files_payload)
|
| 28 |
resp.raise_for_status()
|
|
|
|
|
|
|
|
|
|
| 29 |
return resp.json()
|
| 30 |
except httpx.HTTPError as e:
|
|
|
|
| 31 |
raise HTTPException(
|
| 32 |
status_code=500,
|
| 33 |
detail=f"[call_timestamp] error: {str(e)}"
|
|
@@ -35,6 +46,7 @@ class AIService:
|
|
| 35 |
|
| 36 |
async def call_AIGVDetection(file_bytes):
|
| 37 |
try:
|
|
|
|
| 38 |
async with httpx.AsyncClient(base_url=settings.SERVICE_AIGVDET_URL, timeout=1000) as client:
|
| 39 |
if not file_bytes:
|
| 40 |
raise ValueError("No files provided")
|
|
@@ -45,8 +57,12 @@ class AIService:
|
|
| 45 |
|
| 46 |
resp = await client.post("/predict", files=files_payload)
|
| 47 |
resp.raise_for_status()
|
|
|
|
|
|
|
|
|
|
| 48 |
return resp.json()
|
| 49 |
except httpx.HTTPError as e:
|
|
|
|
| 50 |
raise HTTPException(
|
| 51 |
status_code=500,
|
| 52 |
detail=f"[call_AIGVDetection] error: {str(e)}"
|
|
@@ -54,9 +70,13 @@ class AIService:
|
|
| 54 |
|
| 55 |
async def call_Report(data):
|
| 56 |
try:
|
|
|
|
| 57 |
async with httpx.AsyncClient(base_url=settings.SERVICE_REPORT_URL, timeout=1000) as client:
|
| 58 |
resp = await client.post("/v1/report", json=data)
|
| 59 |
resp.raise_for_status()
|
|
|
|
|
|
|
|
|
|
| 60 |
return resp.json()
|
| 61 |
except httpx.HTTPError as e:
|
| 62 |
raise HTTPException(
|
|
|
|
| 1 |
import httpx
|
| 2 |
from app.config import settings
|
| 3 |
from fastapi import HTTPException
|
| 4 |
+
import time
|
| 5 |
|
| 6 |
class AIService:
|
| 7 |
async def call_geolocation(file_bytes, meta_bytes):
|
| 8 |
try:
|
| 9 |
+
start_time = time.perf_counter()
|
| 10 |
async with httpx.AsyncClient(base_url=settings.SERVICE_GEOLOCATION_URL, timeout=1000) as client:
|
| 11 |
files_payload = [("files", (name, content, ctype)) for name, content, ctype in file_bytes]
|
| 12 |
files_payload.append(("files", (meta_bytes[0], meta_bytes[1], meta_bytes[2])))
|
| 13 |
|
| 14 |
resp = await client.post("/g3/predict", files=files_payload)
|
| 15 |
resp.raise_for_status()
|
| 16 |
+
|
| 17 |
+
elapsed = time.perf_counter() - start_time
|
| 18 |
+
print(f"⏱️ [call_geolocation] Service call took {elapsed:.2f} seconds")
|
| 19 |
return resp.json()
|
| 20 |
except httpx.HTTPError as e:
|
| 21 |
+
print(f"❌ [call_geolocation] error: {str(e)}")
|
| 22 |
raise HTTPException(
|
| 23 |
status_code=500,
|
| 24 |
detail=f"[call_geolocation] error: {str(e)}"
|
|
|
|
| 26 |
|
| 27 |
async def call_timestamp(file_bytes, meta_bytes):
|
| 28 |
try:
|
| 29 |
+
start_time = time.perf_counter()
|
| 30 |
async with httpx.AsyncClient(base_url=settings.SERVICE_TIMESTAMP_URL, timeout=1000) as client:
|
| 31 |
files_payload = [("files", (name, content, ctype)) for name, content, ctype in file_bytes]
|
| 32 |
files_payload.append(("metadata_file", (meta_bytes[0], meta_bytes[1], meta_bytes[2])))
|
| 33 |
|
| 34 |
resp = await client.post("/analyze/", files=files_payload)
|
| 35 |
resp.raise_for_status()
|
| 36 |
+
|
| 37 |
+
elapsed = time.perf_counter() - start_time
|
| 38 |
+
print(f"⏱️ [call_timestamp] Service call took {elapsed:.2f} seconds")
|
| 39 |
return resp.json()
|
| 40 |
except httpx.HTTPError as e:
|
| 41 |
+
print(f"❌ [call_timestamp] error: {str(e)}")
|
| 42 |
raise HTTPException(
|
| 43 |
status_code=500,
|
| 44 |
detail=f"[call_timestamp] error: {str(e)}"
|
|
|
|
| 46 |
|
| 47 |
async def call_AIGVDetection(file_bytes):
|
| 48 |
try:
|
| 49 |
+
start_time = time.perf_counter()
|
| 50 |
async with httpx.AsyncClient(base_url=settings.SERVICE_AIGVDET_URL, timeout=1000) as client:
|
| 51 |
if not file_bytes:
|
| 52 |
raise ValueError("No files provided")
|
|
|
|
| 57 |
|
| 58 |
resp = await client.post("/predict", files=files_payload)
|
| 59 |
resp.raise_for_status()
|
| 60 |
+
|
| 61 |
+
elapsed = time.perf_counter() - start_time
|
| 62 |
+
print(f"⏱️ [call_AIGVDetection] Service call took {elapsed:.2f} seconds")
|
| 63 |
return resp.json()
|
| 64 |
except httpx.HTTPError as e:
|
| 65 |
+
print(f"❌ [call_AIGVDetection] error: {str(e)}")
|
| 66 |
raise HTTPException(
|
| 67 |
status_code=500,
|
| 68 |
detail=f"[call_AIGVDetection] error: {str(e)}"
|
|
|
|
| 70 |
|
| 71 |
async def call_Report(data):
|
| 72 |
try:
|
| 73 |
+
start_time = time.perf_counter()
|
| 74 |
async with httpx.AsyncClient(base_url=settings.SERVICE_REPORT_URL, timeout=1000) as client:
|
| 75 |
resp = await client.post("/v1/report", json=data)
|
| 76 |
resp.raise_for_status()
|
| 77 |
+
|
| 78 |
+
elapsed = time.perf_counter() - start_time
|
| 79 |
+
print(f"⏱️ [call_Report] Service call took {elapsed:.2f} seconds")
|
| 80 |
return resp.json()
|
| 81 |
except httpx.HTTPError as e:
|
| 82 |
raise HTTPException(
|
app/face_check/view.py
CHANGED
|
@@ -10,11 +10,13 @@ from app.config import settings
|
|
| 10 |
import os
|
| 11 |
from fastapi.responses import FileResponse, JSONResponse
|
| 12 |
import shutil
|
|
|
|
| 13 |
|
| 14 |
class FaceCheckView:
|
| 15 |
async def face_check(self, files: List[UploadFile], metadata_file: UploadFile):
|
| 16 |
try:
|
| 17 |
print("[face_check] start")
|
|
|
|
| 18 |
file_bytes = [(f.filename, await f.read(), f.content_type) for f in files]
|
| 19 |
meta_bytes = (metadata_file.filename, await metadata_file.read(), metadata_file.content_type)
|
| 20 |
save_dir = create_timestamped_folder(settings.SAVE_DATA_PATH)
|
|
@@ -42,6 +44,7 @@ class FaceCheckView:
|
|
| 42 |
|
| 43 |
media_base64_list = await convert_keyframes_to_base64(ts_result)
|
| 44 |
sources = [item.get("source") for item in ts_result.get("results", []) if "source" in item]
|
|
|
|
| 45 |
|
| 46 |
save_json({
|
| 47 |
"media_id": "1",
|
|
@@ -77,16 +80,21 @@ class FaceCheckView:
|
|
| 77 |
"media_url": ts_result.get("results", [])[0].get("source", ""),
|
| 78 |
"timestamp": ts_result["results"][0]["timestamp"],
|
| 79 |
"source_url": ts_result.get("results", [])[0].get("source", ""),
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
|
|
|
|
|
|
|
|
|
| 85 |
"media": geo_result.get("media") or media_base64_list
|
| 86 |
})
|
| 87 |
|
| 88 |
save_string_as_md(report_data.get("readme_content", ""), save_dir, "report.md")
|
| 89 |
-
|
|
|
|
|
|
|
| 90 |
return {"save to": f"{save_dir}"}
|
| 91 |
except Exception as e:
|
| 92 |
return JSONResponse(
|
|
|
|
| 10 |
import os
|
| 11 |
from fastapi.responses import FileResponse, JSONResponse
|
| 12 |
import shutil
|
| 13 |
+
import time
|
| 14 |
|
| 15 |
class FaceCheckView:
|
| 16 |
async def face_check(self, files: List[UploadFile], metadata_file: UploadFile):
|
| 17 |
try:
|
| 18 |
print("[face_check] start")
|
| 19 |
+
start_time = time.perf_counter()
|
| 20 |
file_bytes = [(f.filename, await f.read(), f.content_type) for f in files]
|
| 21 |
meta_bytes = (metadata_file.filename, await metadata_file.read(), metadata_file.content_type)
|
| 22 |
save_dir = create_timestamped_folder(settings.SAVE_DATA_PATH)
|
|
|
|
| 44 |
|
| 45 |
media_base64_list = await convert_keyframes_to_base64(ts_result)
|
| 46 |
sources = [item.get("source") for item in ts_result.get("results", []) if "source" in item]
|
| 47 |
+
score = aigv_result.get("authentic_confidence_score")
|
| 48 |
|
| 49 |
save_json({
|
| 50 |
"media_id": "1",
|
|
|
|
| 80 |
"media_url": ts_result.get("results", [])[0].get("source", ""),
|
| 81 |
"timestamp": ts_result["results"][0]["timestamp"],
|
| 82 |
"source_url": ts_result.get("results", [])[0].get("source", ""),
|
| 83 |
+
"media_analysis": (
|
| 84 |
+
{
|
| 85 |
+
"ai_generated": score < 0.5,
|
| 86 |
+
"confidence": score
|
| 87 |
+
}
|
| 88 |
+
if score is not None
|
| 89 |
+
else None
|
| 90 |
+
),
|
| 91 |
"media": geo_result.get("media") or media_base64_list
|
| 92 |
})
|
| 93 |
|
| 94 |
save_string_as_md(report_data.get("readme_content", ""), save_dir, "report.md")
|
| 95 |
+
|
| 96 |
+
elapsed = time.perf_counter() - start_time
|
| 97 |
+
print(f"⏱️ [face_check] Service call took {elapsed:.2f} seconds")
|
| 98 |
return {"save to": f"{save_dir}"}
|
| 99 |
except Exception as e:
|
| 100 |
return JSONResponse(
|
note.md
ADDED
|
File without changes
|