GeorgeSherif commited on
Commit
6e3b3d1
·
1 Parent(s): ff00ea3

adding captions

Browse files
Files changed (2) hide show
  1. app.py +28 -5
  2. nearest_neighbors_with_captions.json +212 -0
app.py CHANGED
@@ -1,10 +1,10 @@
1
- #Latest working version
2
  import gradio as gr
3
  import os
4
  import threading
5
  import random
6
  from datasets import load_dataset, Dataset, Features, Value, concatenate_datasets
7
  from huggingface_hub import login
 
8
 
9
  # Authenticate with Hugging Face
10
  token = os.getenv("HUGGINGFACE_TOKEN")
@@ -31,6 +31,26 @@ image_folder = "images"
31
  image_files = [f for f in os.listdir(image_folder) if f.endswith(('.png', '.jpg', '.jpeg'))]
32
  lock = threading.Lock()
33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
  # Function to get a random image that hasn’t been annotated or skipped
35
  def get_next_image(session_data):
36
  with lock:
@@ -69,7 +89,8 @@ def save_annotation(caption, session_data):
69
  # Fetch the next image
70
  next_image = get_next_image(session_data)
71
  if next_image:
72
- return gr.update(value=next_image), gr.update(value="")
 
73
  else:
74
  return gr.update(visible=False), gr.update(value="All images have been annotated!")
75
 
@@ -81,7 +102,8 @@ def skip_image(session_data):
81
  def initialize_interface(session_data):
82
  next_image = get_next_image(session_data)
83
  if next_image:
84
- return gr.update(value=next_image), gr.update(value="")
 
85
  else:
86
  return gr.update(visible=False), gr.update(value="All images have been annotated!")
87
 
@@ -95,6 +117,7 @@ with gr.Blocks() as demo:
95
  with gr.Row():
96
  image = gr.Image()
97
  caption = gr.Textbox(placeholder="Enter caption here...")
 
98
  submit = gr.Button("Submit")
99
  skip = gr.Button("Skip") # Skip button
100
 
@@ -103,6 +126,6 @@ with gr.Blocks() as demo:
103
  skip.click(fn=skip_image, inputs=session_data, outputs=[image, caption])
104
 
105
  # Load initial image
106
- demo.load(fn=initialize_interface, inputs=session_data, outputs=[image, caption])
107
 
108
- demo.launch(share=True)
 
 
1
  import gradio as gr
2
  import os
3
  import threading
4
  import random
5
  from datasets import load_dataset, Dataset, Features, Value, concatenate_datasets
6
  from huggingface_hub import login
7
+ import json
8
 
9
  # Authenticate with Hugging Face
10
  token = os.getenv("HUGGINGFACE_TOKEN")
 
31
  image_files = [f for f in os.listdir(image_folder) if f.endswith(('.png', '.jpg', '.jpeg'))]
32
  lock = threading.Lock()
33
 
34
+ with open('nearest_neighbors_with_captions.json', 'r') as f:
35
+ results = json.load(f)
36
+
37
+ def get_caption_for_image_id(image_id):
38
+ """
39
+ Retrieve the caption for a given image_id from the JSON data.
40
+ """
41
+ # Check if image_id is a test image
42
+ if image_id in results:
43
+ return results[image_id]["caption"]
44
+
45
+ # If image_id is not a test image, search in nearest neighbors
46
+ for test_image_data in results.values():
47
+ for neighbor in test_image_data["nearest_neighbors"]:
48
+ if neighbor["image_id"] == image_id:
49
+ return neighbor["caption"]
50
+
51
+ # Return None if the image_id is not found
52
+ return None
53
+
54
  # Function to get a random image that hasn’t been annotated or skipped
55
  def get_next_image(session_data):
56
  with lock:
 
89
  # Fetch the next image
90
  next_image = get_next_image(session_data)
91
  if next_image:
92
+ next_caption = get_caption_for_image_id(image_id) # Retrieve the caption for the new image
93
+ return gr.update(value=next_image), gr.update(value=next_caption or "")
94
  else:
95
  return gr.update(visible=False), gr.update(value="All images have been annotated!")
96
 
 
102
  def initialize_interface(session_data):
103
  next_image = get_next_image(session_data)
104
  if next_image:
105
+ next_caption = get_caption_for_image_id(session_data["current_image"]) # Retrieve caption for initial image
106
+ return gr.update(value=next_image), gr.update(value=next_caption or "")
107
  else:
108
  return gr.update(visible=False), gr.update(value="All images have been annotated!")
109
 
 
117
  with gr.Row():
118
  image = gr.Image()
119
  caption = gr.Textbox(placeholder="Enter caption here...")
120
+ existing_caption = gr.Textbox(label="Existing Caption", interactive=False) # Display existing caption
121
  submit = gr.Button("Submit")
122
  skip = gr.Button("Skip") # Skip button
123
 
 
126
  skip.click(fn=skip_image, inputs=session_data, outputs=[image, caption])
127
 
128
  # Load initial image
129
+ demo.load(fn=initialize_interface, inputs=session_data, outputs=[image, existing_caption])
130
 
131
+ demo.launch(share=True)
nearest_neighbors_with_captions.json ADDED
@@ -0,0 +1,212 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "100001": {
3
+ "caption": "in this scene we see a person flying a kite with a flag attached .",
4
+ "nearest_neighbors": [
5
+ {
6
+ "image_id": "129440",
7
+ "caption": "a woman standing on a beach next to the ocean ."
8
+ },
9
+ {
10
+ "image_id": "74990",
11
+ "caption": "a kite flying over the ocean on a sandy beach ."
12
+ },
13
+ {
14
+ "image_id": "283279",
15
+ "caption": "someone flying a kite on the beach ."
16
+ },
17
+ {
18
+ "image_id": "328179",
19
+ "caption": "a young person flying a kite at the beach ."
20
+ }
21
+ ]
22
+ },
23
+ "100087": {
24
+ "caption": "a traffic light by a lutz road sign .",
25
+ "nearest_neighbors": [
26
+ {
27
+ "image_id": "571943",
28
+ "caption": "a pole with a street light and signs next to a bare tree"
29
+ },
30
+ {
31
+ "image_id": "486123",
32
+ "caption": "a white toyota drives under a stoplight with a google sign ."
33
+ },
34
+ {
35
+ "image_id": "521634",
36
+ "caption": "a street light with many signs on it stands to the foreground of a large building standing beside a naked tree ."
37
+ },
38
+ {
39
+ "image_id": "261746",
40
+ "caption": "a bunch of traffic lights on a street corner ."
41
+ }
42
+ ]
43
+ },
44
+ "100098": {
45
+ "caption": "a woman on a brown and white horse rides near trees .",
46
+ "nearest_neighbors": [
47
+ {
48
+ "image_id": "290881",
49
+ "caption": "a girl riding a brown horse and green grass with trees"
50
+ },
51
+ {
52
+ "image_id": "181953",
53
+ "caption": "a woman riding on the back of a brown horse ."
54
+ },
55
+ {
56
+ "image_id": "545950",
57
+ "caption": "a young woman riding a white horse on a gravel road ."
58
+ },
59
+ {
60
+ "image_id": "275392",
61
+ "caption": "a person riding a brown horse with blonde hair in a green field ."
62
+ }
63
+ ]
64
+ },
65
+ "100138": {
66
+ "caption": "a black motorcycle parked in front of trees .",
67
+ "nearest_neighbors": [
68
+ {
69
+ "image_id": "169174",
70
+ "caption": "a black and white photo of a parked motorcycle"
71
+ },
72
+ {
73
+ "image_id": "516581",
74
+ "caption": "the modern motorcycle is parked on a sunny day ."
75
+ },
76
+ {
77
+ "image_id": "474067",
78
+ "caption": "a man riding a motorcycle down a road ."
79
+ },
80
+ {
81
+ "image_id": "227969",
82
+ "caption": "a man sits on a yellow motorcycle ."
83
+ }
84
+ ]
85
+ },
86
+ "10014": {
87
+ "caption": "a city bus parked next to a crowd of people .",
88
+ "nearest_neighbors": [
89
+ {
90
+ "image_id": "523494",
91
+ "caption": "several people are waiting to get on a city bus ."
92
+ },
93
+ {
94
+ "image_id": "310695",
95
+ "caption": "the two buses are parked along the curb ."
96
+ },
97
+ {
98
+ "image_id": "4331",
99
+ "caption": "a person standing in front of a parked bus with a bicycle on it ."
100
+ },
101
+ {
102
+ "image_id": "166975",
103
+ "caption": "a man standing in front of a bus that has crashed into a car ."
104
+ }
105
+ ]
106
+ },
107
+ "100187": {
108
+ "caption": "a well lit living room with sofas and coffee table .",
109
+ "nearest_neighbors": [
110
+ {
111
+ "image_id": "555797",
112
+ "caption": "a modern living room with a television , seating , tables , and lights"
113
+ },
114
+ {
115
+ "image_id": "398037",
116
+ "caption": "a bedroom with a bed under two framed paintings ."
117
+ },
118
+ {
119
+ "image_id": "570022",
120
+ "caption": "a living room filled with blue furniture in front of a tv ."
121
+ },
122
+ {
123
+ "image_id": "350165",
124
+ "caption": "a room with a couch several chairs and a sink and vanity in the corner ."
125
+ }
126
+ ]
127
+ },
128
+ "100306": {
129
+ "caption": "a man standing on top of a snow covered ski slope .",
130
+ "nearest_neighbors": [
131
+ {
132
+ "image_id": "368049",
133
+ "caption": "a snow boarder riding down a snow covered summit ."
134
+ },
135
+ {
136
+ "image_id": "49384",
137
+ "caption": "a person hiking through a snow covered mountain on top of skis ."
138
+ },
139
+ {
140
+ "image_id": "266409",
141
+ "caption": "a man in a blue coat skiing through a snowy field ."
142
+ },
143
+ {
144
+ "image_id": "205631",
145
+ "caption": "a group of people riding down a snow covered sky slope ."
146
+ }
147
+ ]
148
+ },
149
+ "100594": {
150
+ "caption": "a dark and gloomy winter day in the city",
151
+ "nearest_neighbors": [
152
+ {
153
+ "image_id": "497158",
154
+ "caption": "street with light traffic at intersection in large city ."
155
+ },
156
+ {
157
+ "image_id": "488567",
158
+ "caption": "the middle of a highway during a cloudy day ."
159
+ },
160
+ {
161
+ "image_id": "10540",
162
+ "caption": "a couple of cars driving down a street under traffic lights ."
163
+ },
164
+ {
165
+ "image_id": "260275",
166
+ "caption": "a traffic light with car passing underneath on the road ."
167
+ }
168
+ ]
169
+ },
170
+ "100661": {
171
+ "caption": "some zebras are seen grazing in the field .",
172
+ "nearest_neighbors": [
173
+ {
174
+ "image_id": "154",
175
+ "caption": "three zeebras standing in a grassy field walking"
176
+ },
177
+ {
178
+ "image_id": "300157",
179
+ "caption": "pair of zebras standing in grassy field outdoors ."
180
+ },
181
+ {
182
+ "image_id": "48491",
183
+ "caption": "two zebras standing together while eating some grass"
184
+ },
185
+ {
186
+ "image_id": "392260",
187
+ "caption": "two zebras eating some grass together in an open area ."
188
+ }
189
+ ]
190
+ },
191
+ "100848": {
192
+ "caption": "a living room with a brown couch by a big window",
193
+ "nearest_neighbors": [
194
+ {
195
+ "image_id": "281246",
196
+ "caption": "a living room with lots of furniture under large windows ."
197
+ },
198
+ {
199
+ "image_id": "245823",
200
+ "caption": "home sweet home with boxes packed over in the corner ."
201
+ },
202
+ {
203
+ "image_id": "263261",
204
+ "caption": "a television and furniture in a small room ."
205
+ },
206
+ {
207
+ "image_id": "140108",
208
+ "caption": "a living room has two brown sofas and a cat tree ."
209
+ }
210
+ ]
211
+ }
212
+ }