fpessanha commited on
Commit
05589a7
·
1 Parent(s): 32942a8

Feat: Allow random IDs

Browse files
__pycache__/load_and_save.cpython-310.pyc CHANGED
Binary files a/__pycache__/load_and_save.cpython-310.pyc and b/__pycache__/load_and_save.cpython-310.pyc differ
 
__pycache__/text_explanations.cpython-310.pyc CHANGED
Binary files a/__pycache__/text_explanations.cpython-310.pyc and b/__pycache__/text_explanations.cpython-310.pyc differ
 
__pycache__/utils.cpython-310.pyc CHANGED
Binary files a/__pycache__/utils.cpython-310.pyc and b/__pycache__/utils.cpython-310.pyc differ
 
app.py CHANGED
@@ -34,7 +34,9 @@ with (gr.Blocks(theme=gr.themes.Soft(), css = css) as demo):
34
 
35
  # Instructions for emotion annotation
36
  with gr.Sidebar(open = True) as sidebar:
37
- participant_id = gr.Textbox(label='What is your participant ID?', interactive = True)
 
 
38
  lets_go = gr.Button("Let's go!")
39
  cheat_sheet = gr.HTML(side_bar_html, padding = False)
40
 
 
34
 
35
  # Instructions for emotion annotation
36
  with gr.Sidebar(open = True) as sidebar:
37
+
38
+ explanation = gr.Textbox(label="What is your participant ID?", interactive=False, value = "Please provide your Participant ID below. If you don't have one, feel free to define your own. Note that it's important to remember your ID so you can return to your annotations.")
39
+ participant_id = gr.Textbox(label="ID", interactive = True)
40
  lets_go = gr.Button("Let's go!")
41
  cheat_sheet = gr.HTML(side_bar_html, padding = False)
42
 
load_and_save.py CHANGED
@@ -11,19 +11,17 @@ from text_explanations import *
11
  from utils import *
12
  from datetime import datetime
13
 
14
- possible_ids = {'Tiger-001': 0, 'Falcon-002': 0,
15
- 'Elephant-003': 1, 'Panther-004': 1,
16
- 'Zebra-005': 2, 'Wolf-006': 2,
17
- 'Koala-007': 3, 'Otter-008': 3,
18
- 'Leopard-009': 4, 'Panda-010': 4,
19
- 'Cheetah-011': 5, 'Gorilla-012': 5,
20
- 'Dolphin-013' : 6, 'Lynx-014': 6,
21
- 'Moose-015': 7, 'Raccoon-016': 7,
22
- 'Rabbit-017': 0, 'Eagle-018': 8, 'Jaguar-019': 8}
23
 
24
  persistent_storage = Path('/data')
25
  password_files = os.getenv("password_files")
26
 
 
 
 
 
 
 
27
  def load_first_example(annotations_df, file_list_df, id, completed, index):
28
  """ Loads and first example and updates index
29
 
@@ -78,7 +76,7 @@ def load_example(annotations_df, file_list_df, index):
78
  """
79
  if index < len(file_list_df):
80
  row = file_list_df.iloc[index]
81
- audio_path = os.path.join(persistent_storage, 'files_to_annotate_2round', row["sample_id"].split('-')[0], row["sample_id"] + '.wav')
82
  sentence = row["sentence"]
83
 
84
  # If the user already made an annotation for this example, gradio will return said annotation
@@ -94,7 +92,7 @@ def load_example(annotations_df, file_list_df, index):
94
  else:
95
  index -= 1
96
  row = file_list_df.iloc[index]
97
- audio_path = os.path.join(persistent_storage, 'files_to_annotate_2round', row["sample_id"].split('-')[0], row["sample_id"] + '.wav')
98
  sentence = row["sentence"]
99
 
100
  # If the user already made an annotation for this example, gradio will return said annotation
@@ -236,25 +234,26 @@ def previous_example(annotations_df, file_list_df, emotion, confidence, comments
236
  def deactivate_participant_id(annotations_df, file_list_df, total, participant_id, lets_go, previous_button, next_button, sentence_text, audio_player, emotions, confidence, comments, n_clicks, ann_completed, current_index):
237
 
238
 
239
- if participant_id in possible_ids.keys():
240
- file_list_df = pd.read_csv(os.path.join(persistent_storage, 'files_to_annotate_2round', f'group_{possible_ids[participant_id]}_v2.csv'), keep_default_na=False)
 
 
241
 
242
- total = len(file_list_df)
243
-
244
 
245
- annotations_df, sentence, audio_player, emotions, confidence, comments, n_clicks, start, end, duration, ann_completed, current_index = load_first_example(annotations_df, file_list_df, participant_id, ann_completed, current_index)
246
-
247
- participant_id = gr.Textbox(label='What is your participant ID?', value = participant_id, interactive = False)
248
- lets_go = gr.Button("Participant selected!", interactive = False)
249
-
250
- sentence_text = gr.Textbox(label="Transcription", interactive=False, value = sentence)
251
- emotions = gr.Radio(["Blank", "Happy", "Sad", "Angry", "Neutral"], label="Predominant Emotion (Check the sidebar for major subclasses)", value = emotions, visible = True)
252
- confidence = gr.Radio(["Blank","Very Uncertain", "Somewhat Uncertain", "Neutral", "Somewhat confident", "Very confident"], label="How confident are you that the annotated emotion is present in the recording?", visible = True, value = confidence)
253
- comments = gr.Textbox(label="Comments", visible =True, value = comments)
254
- previous_button = gr.Button("Previous Example", visible = True)
255
- next_button = gr.Button("Next Example",visible = True)
256
-
257
- return annotations_df, file_list_df, participant_id, participant_id, lets_go, total, previous_button, next_button, sentence_text, audio_player, emotions, confidence, comments, n_clicks, start, end, duration, ann_completed, current_index
258
 
259
- else:
260
- raise gr.Error("Please insert a valid participant ID")
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  from utils import *
12
  from datetime import datetime
13
 
14
+ import pickle
 
 
 
 
 
 
 
 
15
 
16
  persistent_storage = Path('/data')
17
  password_files = os.getenv("password_files")
18
 
19
+ if os.path.exists(f'{persistent_storage}/possible_ids.pkl'):
20
+ with open(f'{persistent_storage}/possible_ids.pkl', 'wb') as f:
21
+ possible_ids = pickle.load(f)
22
+ else:
23
+ possible_ids = {}
24
+
25
  def load_first_example(annotations_df, file_list_df, id, completed, index):
26
  """ Loads and first example and updates index
27
 
 
76
  """
77
  if index < len(file_list_df):
78
  row = file_list_df.iloc[index]
79
+ audio_path = os.path.join(persistent_storage, 'files_to_annotate', row["sample_id"].split('-')[0], row["sample_id"] + '.wav')
80
  sentence = row["sentence"]
81
 
82
  # If the user already made an annotation for this example, gradio will return said annotation
 
92
  else:
93
  index -= 1
94
  row = file_list_df.iloc[index]
95
+ audio_path = os.path.join(persistent_storage, 'files_to_annotate', row["sample_id"].split('-')[0], row["sample_id"] + '.wav')
96
  sentence = row["sentence"]
97
 
98
  # If the user already made an annotation for this example, gradio will return said annotation
 
234
  def deactivate_participant_id(annotations_df, file_list_df, total, participant_id, lets_go, previous_button, next_button, sentence_text, audio_player, emotions, confidence, comments, n_clicks, ann_completed, current_index):
235
 
236
 
237
+ if participant_id not in possible_ids.keys():
238
+ possible_ids[participant_id] = 0
239
+ with open(f'{persistent_storage}/possible_ids.pkl', 'wb') as f:
240
+ pickle.dump(possible_ids, f)
241
 
242
+ file_list_df = pd.read_csv(os.path.join(persistent_storage, 'files_to_annotate', f'group_{possible_ids[participant_id]}.csv'), keep_default_na=False)
 
243
 
244
+ total = len(file_list_df)
 
 
 
 
 
 
 
 
 
 
 
 
245
 
246
+
247
+ annotations_df, sentence, audio_player, emotions, confidence, comments, n_clicks, start, end, duration, ann_completed, current_index = load_first_example(annotations_df, file_list_df, participant_id, ann_completed, current_index)
248
+
249
+ participant_id = gr.Textbox(label='What is your participant ID?', value = participant_id, interactive = False)
250
+ lets_go = gr.Button("Participant selected!", interactive = False)
251
+
252
+ sentence_text = gr.Textbox(label="Transcription", interactive=False, value = sentence)
253
+ emotions = gr.Radio(["Blank", "Happy", "Sad", "Angry", "Neutral"], label="Predominant Emotion (Check the sidebar for major subclasses)", value = emotions, visible = True)
254
+ confidence = gr.Radio(["Blank","Very Uncertain", "Somewhat Uncertain", "Neutral", "Somewhat confident", "Very confident"], label="How confident are you that the annotated emotion is present in the recording?", visible = True, value = confidence)
255
+ comments = gr.Textbox(label="Comments", visible =True, value = comments)
256
+ previous_button = gr.Button("Previous Example", visible = True)
257
+ next_button = gr.Button("Next Example",visible = True)
258
+
259
+ return annotations_df, file_list_df, participant_id, participant_id, lets_go, total, previous_button, next_button, sentence_text, audio_player, emotions, confidence, comments, n_clicks, start, end, duration, ann_completed, current_index
saved_dictionary.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2b985bf5b3423dfb3cd235f1ed7f4b44c3bbb606f10beb402d83e220ede8989
3
+ size 23