ritesh-torinit commited on
Commit
e77552b
·
verified ·
1 Parent(s): 8da037b

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. inference.py +10 -2
  2. result.dat +0 -0
  3. test_inference.sh +46 -0
inference.py CHANGED
@@ -30,6 +30,13 @@ API_KEY = os.environ.get("API_KEY", "")
30
  MAX_WAIT_SECONDS = 900 # 15 minutes
31
  POLL_INTERVAL_SECONDS = 5
32
 
 
 
 
 
 
 
 
33
 
34
  def normalize_to_bytes(image_input: str) -> Tuple[bytes, str]:
35
  """
@@ -308,9 +315,10 @@ def inference(image_input: str, parameters: Optional[Dict] = None) -> Dict[str,
308
  logger.info("Uploading to S3...")
309
  upload_to_s3(upload_url, image_bytes)
310
 
311
- # 4. Start detection job
312
  logger.info("Starting detection job...")
313
- start_detection_job(job_id, s3_url, parameters)
 
314
 
315
  # 5. Poll for completion
316
  logger.info("Polling for completion...")
 
30
  MAX_WAIT_SECONDS = 900 # 15 minutes
31
  POLL_INTERVAL_SECONDS = 5
32
 
33
+ # Default processing parameters (LearningStudio doesn't pass these)
34
+ DEFAULT_PARAMS = {
35
+ "tiling": {"tile": 2048, "overlap": 0.30},
36
+ "floodfill": {"erase_text": False, "min_fill_vs_text": 0.0},
37
+ "preclean": {"denoise_sw": 8}
38
+ }
39
+
40
 
41
  def normalize_to_bytes(image_input: str) -> Tuple[bytes, str]:
42
  """
 
315
  logger.info("Uploading to S3...")
316
  upload_to_s3(upload_url, image_bytes)
317
 
318
+ # 4. Start detection job (merge user params with defaults)
319
  logger.info("Starting detection job...")
320
+ merged_params = {**DEFAULT_PARAMS, **(parameters or {})}
321
+ start_detection_job(job_id, s3_url, merged_params)
322
 
323
  # 5. Poll for completion
324
  logger.info("Polling for completion...")
result.dat ADDED
The diff for this file is too large to render. See raw diff
 
test_inference.sh ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # Test the HuggingFace Inference Endpoint with a local image file
4
+ # Usage: ./test_inference.sh <image_file>
5
+
6
+ set -e
7
+
8
+ if [ -z "$1" ]; then
9
+ echo "Usage: $0 <image_file>"
10
+ exit 1
11
+ fi
12
+
13
+ IMAGE_FILE="$1"
14
+
15
+ if [ ! -f "$IMAGE_FILE" ]; then
16
+ echo "Error: File not found: $IMAGE_FILE"
17
+ exit 1
18
+ fi
19
+
20
+ # Required environment variables
21
+ if [ -z "$HF_ENDPOINT" ]; then
22
+ echo "Error: HF_ENDPOINT not set"
23
+ echo "Export it first: export HF_ENDPOINT=https://xxx.endpoints.huggingface.cloud"
24
+ exit 1
25
+ fi
26
+
27
+ if [ -z "$HF_TOKEN" ]; then
28
+ echo "Error: HF_TOKEN not set"
29
+ echo "Export it first: export HF_TOKEN=hf_xxx"
30
+ exit 1
31
+ fi
32
+
33
+ # Convert image to base64 and create JSON payload in temp file
34
+ echo "Converting $IMAGE_FILE to base64..."
35
+ TEMP_FILE=$(mktemp)
36
+ trap "rm -f $TEMP_FILE" EXIT
37
+
38
+ printf '{"inputs": "' > "$TEMP_FILE"
39
+ base64 -i "$IMAGE_FILE" | tr -d '\n' >> "$TEMP_FILE"
40
+ printf '"}' >> "$TEMP_FILE"
41
+
42
+ echo "Sending request to $HF_ENDPOINT..."
43
+ curl -s -X POST "$HF_ENDPOINT" \
44
+ -H "Authorization: Bearer $HF_TOKEN" \
45
+ -H "Content-Type: application/json" \
46
+ -d @"$TEMP_FILE" | python3 -m json.tool