cfoli commited on
Commit
a7b5a5d
·
verified ·
1 Parent(s): 826f6b8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -35,8 +35,11 @@ MODEL_CACHE = {}
35
  MODEL_OPTIONS = {
36
  "CLIP-base": "openai/clip-vit-base-patch32",
37
  "CLIP-large": "openai/clip-vit-large-patch14",
38
- "SigLIP-base": "google/siglip-base-patch16-224",
39
- "SigLIP-large": "google/siglip-so400m-patch14-384"}
 
 
 
40
 
41
  CANDIDATE_LABELS = ["a bat (baseball)", "a bat (mammal)",
42
  "a flower (plant)", "flour (baking powder)",
@@ -109,7 +112,7 @@ example_list = [
109
 
110
  gradio_app = gradio.Interface(
111
  fn = run_classifer,
112
- inputs = [gradio.Dropdown(["CLIP-base", "CLIP-large", "SigLIP-base", "SigLIP-large"], value="CLIP-large", label = "Select Classifier"),
113
  gradio.Image(type="pil", label="Load sample image here"),
114
  gradio.Slider(minimum = 0.1, maximum = 0.9, step = 0.05, value = 0.25, label = "Set Prediction Threshold")
115
  ],
 
35
  MODEL_OPTIONS = {
36
  "CLIP-base": "openai/clip-vit-base-patch32",
37
  "CLIP-large": "openai/clip-vit-large-patch14",
38
+ # "SigLIP-base": "google/siglip-base-patch16-224",
39
+ # "SigLIP-large": "google/siglip-so400m-patch14-384"
40
+ "ALIGN": "kakaobrain/align-base",
41
+ "LLM2CLIP": "microsoft/LLM2CLIP-EVA02-L-14-336"
42
+ }
43
 
44
  CANDIDATE_LABELS = ["a bat (baseball)", "a bat (mammal)",
45
  "a flower (plant)", "flour (baking powder)",
 
112
 
113
  gradio_app = gradio.Interface(
114
  fn = run_classifer,
115
+ inputs = [gradio.Dropdown(["CLIP-base", "CLIP-large", "ALIGN", "LLM2CLIP"], value="CLIP-large", label = "Select Classifier"),
116
  gradio.Image(type="pil", label="Load sample image here"),
117
  gradio.Slider(minimum = 0.1, maximum = 0.9, step = 0.05, value = 0.25, label = "Set Prediction Threshold")
118
  ],