aswin-raghavan commited on
Commit
7d846e2
·
1 Parent(s): d31062e

new load fn to initialize HD LUT and exemplars

Browse files
Files changed (1) hide show
  1. app.py +36 -3
app.py CHANGED
@@ -8,12 +8,16 @@ import pandas as pd
8
  from glob import glob
9
  import random
10
  from datetime import datetime
 
 
 
11
 
12
  clip_model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
13
  clip_processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
14
 
15
- # pipe = pipeline("zero-shot-image-classification", model="openai/clip-vit-base-patch32")
16
- # images="dog.jpg"
 
17
 
18
  def extract_features(image):
19
  PIL_image = Image.fromarray(np.uint8(image)).convert('RGB')
@@ -45,6 +49,29 @@ def update_table_up(img, img_name, df, state):
45
  def update_table_down(img, img_name, df, state):
46
  return update_table(img, img_name, df, state, 0)
47
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
  with gr.Blocks(title="End-User Personalization") as demo:
50
  img_list = glob('images/**/*.jpg')
@@ -63,5 +90,11 @@ with gr.Blocks(title="End-User Personalization") as demo:
63
  upvote.click(update_table_up, inputs=[image_display, image_fname, annotated_samples, images], outputs=[image_display, image_fname, annotated_samples, images])
64
  downvote.click(update_table_down, inputs=[image_display, image_fname, annotated_samples, images], outputs=[image_display, image_fname, annotated_samples, images])
65
 
66
- demo.load(lambda: (images.value[-1], images.value[-1]), inputs=[], outputs=[image_display, image_fname])
 
 
 
 
 
 
67
  demo.launch(show_error=True, debug=True)
 
8
  from glob import glob
9
  import random
10
  from datetime import datetime
11
+ import numpy as np
12
+ from numpy.random import MT19937
13
+ from numpy.random import RandomState, SeedSequence
14
 
15
  clip_model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
16
  clip_processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
17
 
18
+ HYPERDIMS = 1024
19
+ VALUE_BITS = 8
20
+ POS_BITS = 9 # CLIP features are 512 dims
21
 
22
  def extract_features(image):
23
  PIL_image = Image.fromarray(np.uint8(image)).convert('RGB')
 
49
  def update_table_down(img, img_name, df, state):
50
  return update_table(img, img_name, df, state, 0)
51
 
52
+ def make_LUT(nvalues, dims, rs):
53
+ lut = np.zeros(shape=(nvalues+1, dims))
54
+ lut[0, :] = rs.binomial(n=1, p=0.5, size=(dims))
55
+ for row in range(1, nvalues):
56
+ lut[row, :] = lut[row-1, :]
57
+ # flip few randomly
58
+ rand_idx = rs.choice(dims, size=dims//nvalues, replace=False)
59
+ lut[row, rand_idx] = 1 - lut[row, rand_idx]
60
+ assert np.abs(lut[row, :] - lut[row-1, :]).sum() ==dims//nvalues
61
+ unique_rows = np.unique(lut, axis=0)
62
+ assert len(unique_rows) == len(lut)
63
+ return lut
64
+
65
+ def load_fn(images, rng_state, exemplars_state, lut_state):
66
+ rs = RandomState(MT19937(SeedSequence(123456789)))
67
+ rng_state[0] = rs
68
+ exemplars_state[0] = rs.binomial(n=1, p=0.5, size=HYPERDIMS)
69
+ exemplars_state[1] = rs.binomial(n=1, p=0.5, size=HYPERDIMS)
70
+ lut_state[0] = make_LUT(2**VALUE_BITS, HYPERDIMS, rs)
71
+ lut_state[1] = make_LUT(2**POS_BITS, HYPERDIMS, rs)
72
+ return images[-1], images[-1], rng_state, exemplars_state, lut_state
73
+
74
+
75
 
76
  with gr.Blocks(title="End-User Personalization") as demo:
77
  img_list = glob('images/**/*.jpg')
 
90
  upvote.click(update_table_up, inputs=[image_display, image_fname, annotated_samples, images], outputs=[image_display, image_fname, annotated_samples, images])
91
  downvote.click(update_table_down, inputs=[image_display, image_fname, annotated_samples, images], outputs=[image_display, image_fname, annotated_samples, images])
92
 
93
+ rng = gr.State([None])
94
+ exemplars_state = gr.State([None, None])
95
+ lut_state = gr.State([None, None])
96
+
97
+ demo.load(load_fn, inputs=[images, rng, exemplars_state, lut_state], outputs=[image_display, image_fname, rng, exemplars_state, lut_state])
98
+
99
+ personalize = gr.Button("Personalize")
100
  demo.launch(show_error=True, debug=True)