A123123 commited on
Commit
be5a0b9
·
verified ·
1 Parent(s): 820ddf4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -23
app.py CHANGED
@@ -6,30 +6,22 @@ import gradio as gr
6
  from huggingface_hub import hf_hub_download
7
 
8
  REPO_ID = "A123123/AnimeAutoCensor"
9
-
10
  HF_TOKEN = os.getenv("HF_TOKEN")
11
 
12
- try:
13
-
14
- model_path = hf_hub_download(
15
- repo_id=REPO_ID,
16
- filename="model.onnx",
17
- token=HF_TOKEN
18
- )
19
 
20
- hf_hub_download(
21
- repo_id=REPO_ID,
22
- filename="model.onnx_data",
23
- token=HF_TOKEN
24
- )
25
 
26
- # Initialize Inference Engine (Hugging Face Free Tier supports CPU only)
27
  session = ort.InferenceSession(model_path, providers=['CPUExecutionProvider'])
28
  input_name = session.get_inputs()[0].name
29
- target_size = 640
30
  except Exception as e:
31
- print(f"Model loading failed. Please check Token and Repository Path: {e}")
32
-
33
 
34
  def apply_mosaic_mask(image_rgb, mask, mosaic_level=16):
35
  h, w = image_rgb.shape[:2]
@@ -41,8 +33,8 @@ def apply_mosaic_mask(image_rgb, mask, mosaic_level=16):
41
  return output_image
42
 
43
  def process_image(input_img):
44
- if input_img is None:
45
- return None
46
 
47
  h_orig, w_orig = input_img.shape[:2]
48
 
@@ -71,7 +63,7 @@ def process_image(input_img):
71
 
72
  with gr.Blocks(title="AI Anime Auto-Censor") as demo:
73
  gr.Markdown("# 🎨 AI Anime Auto-Censor (Trial Version)")
74
- gr.Markdown("This tool uses AI to automatically detect and censor specific content. The model weights are protected and not accessible to the public.")
75
 
76
  with gr.Row():
77
  with gr.Column():
@@ -81,9 +73,6 @@ with gr.Blocks(title="AI Anime Auto-Censor") as demo:
81
  output_i = gr.Image(type="numpy", label="Censored Result")
82
 
83
  run_btn.click(fn=process_image, inputs=input_i, outputs=output_i)
84
-
85
- gr.Markdown("---")
86
- gr.Markdown("### Instructions:\n1. Upload an image.\n2. Click 'Start Processing'.\n3. Download the result if satisfied.")
87
 
88
  if __name__ == "__main__":
89
  demo.launch()
 
6
  from huggingface_hub import hf_hub_download
7
 
8
  REPO_ID = "A123123/AnimeAutoCensor"
 
9
  HF_TOKEN = os.getenv("HF_TOKEN")
10
 
11
+ target_size = 640
12
+ session = None
13
+ input_name = None
 
 
 
 
14
 
15
+ try:
16
+ print("Downloading model files...")
17
+ model_path = hf_hub_download(repo_id=REPO_ID, filename="model.onnx", token=HF_TOKEN)
18
+ hf_hub_download(repo_id=REPO_ID, filename="model.onnx_data", token=HF_TOKEN)
 
19
 
 
20
  session = ort.InferenceSession(model_path, providers=['CPUExecutionProvider'])
21
  input_name = session.get_inputs()[0].name
22
+ print("Model loaded successfully!")
23
  except Exception as e:
24
+ print(f"CRITICAL ERROR: {e}")
 
25
 
26
  def apply_mosaic_mask(image_rgb, mask, mosaic_level=16):
27
  h, w = image_rgb.shape[:2]
 
33
  return output_image
34
 
35
  def process_image(input_img):
36
+ if input_img is None or session is None:
37
+ return input_img
38
 
39
  h_orig, w_orig = input_img.shape[:2]
40
 
 
63
 
64
  with gr.Blocks(title="AI Anime Auto-Censor") as demo:
65
  gr.Markdown("# 🎨 AI Anime Auto-Censor (Trial Version)")
66
+ gr.Markdown("This tool uses AI to detect and censor content. The model is protected.")
67
 
68
  with gr.Row():
69
  with gr.Column():
 
73
  output_i = gr.Image(type="numpy", label="Censored Result")
74
 
75
  run_btn.click(fn=process_image, inputs=input_i, outputs=output_i)
 
 
 
76
 
77
  if __name__ == "__main__":
78
  demo.launch()