cfoli commited on
Commit
0718174
·
verified ·
1 Parent(s): a7b5a5d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -37,8 +37,7 @@ MODEL_OPTIONS = {
37
  "CLIP-large": "openai/clip-vit-large-patch14",
38
  # "SigLIP-base": "google/siglip-base-patch16-224",
39
  # "SigLIP-large": "google/siglip-so400m-patch14-384"
40
- "ALIGN": "kakaobrain/align-base",
41
- "LLM2CLIP": "microsoft/LLM2CLIP-EVA02-L-14-336"
42
  }
43
 
44
  CANDIDATE_LABELS = ["a bat (baseball)", "a bat (mammal)",
@@ -112,7 +111,7 @@ example_list = [
112
 
113
  gradio_app = gradio.Interface(
114
  fn = run_classifer,
115
- inputs = [gradio.Dropdown(["CLIP-base", "CLIP-large", "ALIGN", "LLM2CLIP"], value="CLIP-large", label = "Select Classifier"),
116
  gradio.Image(type="pil", label="Load sample image here"),
117
  gradio.Slider(minimum = 0.1, maximum = 0.9, step = 0.05, value = 0.25, label = "Set Prediction Threshold")
118
  ],
 
37
  "CLIP-large": "openai/clip-vit-large-patch14",
38
  # "SigLIP-base": "google/siglip-base-patch16-224",
39
  # "SigLIP-large": "google/siglip-so400m-patch14-384"
40
+ "ALIGN": "kakaobrain/align-base"
 
41
  }
42
 
43
  CANDIDATE_LABELS = ["a bat (baseball)", "a bat (mammal)",
 
111
 
112
  gradio_app = gradio.Interface(
113
  fn = run_classifer,
114
+ inputs = [gradio.Dropdown(["CLIP-base", "CLIP-large", "ALIGN"], value="CLIP-base", label = "Select Classifier"),
115
  gradio.Image(type="pil", label="Load sample image here"),
116
  gradio.Slider(minimum = 0.1, maximum = 0.9, step = 0.05, value = 0.25, label = "Set Prediction Threshold")
117
  ],