fpessanha commited on
Commit
8ae1600
·
verified ·
1 Parent(s): ea9b83c

Update annotation_interface.py

Browse files
Files changed (1) hide show
  1. annotation_interface.py +126 -0
annotation_interface.py CHANGED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import pandas as pd
3
+ from src.config import *
4
+ import os
5
+ from gryannote_audio import AudioLabeling
6
+ import gradio as gr
7
+ from pyannote.core import Annotation, Segment
8
+ from pyannote.audio import Pipeline
9
+
10
+ # List of all audio files to annotate
11
+ file_list = pd.read_excel(os.path.join(FILES_ANNOTATED, 'combined_annotations.xlsx'))
12
+
13
+ # Initialize an empty DataFrame to store annotations
14
+ annotations = pd.DataFrame(columns=['sample_id', 'sentence', 'emotion', 'comments'])
15
+ current_index = {"index": 0} # Dictionary to allow modifying inside functions
16
+
17
+ def load_example(index):
18
+ """Load the example (audio + text) by index."""
19
+ row = file_list.iloc[index]
20
+ audio_path = os.path.join(FILES_ANNOTATED, row["SAMPLE ID"].split('-')[0], row["SAMPLE ID"] + '.wav')
21
+ print(f"Audio path: {audio_path}, Exists: {os.path.exists(audio_path)}")
22
+ sentence = row["SENTENCE"]
23
+
24
+ # If the user already made an annotation for this example, gradio will return said annotation
25
+ previous_annotation = (
26
+ annotations.iloc[index].to_dict() if index < len(annotations) else {"sample_id": row["SAMPLE ID"], "emotion": '',
27
+ "comments": ''}
28
+ )
29
+ return (sentence, audio_path, previous_annotation['emotion'], previous_annotation["comments"])
30
+
31
+
32
+ def save_annotation(emotions, comments):
33
+ """Save the annotation for the current example."""
34
+ idx = current_index["index"]
35
+ row = file_list.iloc[idx]
36
+ sample_id = row["SAMPLE ID"]
37
+ sentence = row["SENTENCE"]
38
+
39
+ # Update or append annotation
40
+ if sample_id in annotations["sample_id"].values:
41
+ annotations.loc[annotations["sample_id"] == sample_id, ["emotion", "comments"]] = \
42
+ [emotions, comments]
43
+ else:
44
+ annotations.loc[len(annotations)] = [sample_id, sentence, emotions, comments]
45
+
46
+ annotations.to_csv("annotations.csv", index=False) # Save to a CSV file
47
+ #return f"Saved annotations for example {idx + 1}"
48
+ def next_example(emotions, comments):
49
+ """Move to the next example."""
50
+ save_annotation(emotions, comments)
51
+
52
+ if current_index["index"] < len(file_list) - 1:
53
+ current_index["index"] += 1
54
+ return load_example(current_index["index"])
55
+ return "End of examples", None, 0, 0, 0, 0, ''
56
+
57
+
58
+ def previous_example(emotion, comments):
59
+ """Move to the previous example."""
60
+ save_annotation(emotion, comments)
61
+ if current_index["index"] > 0:
62
+ current_index["index"] -= 1
63
+ return load_example(current_index["index"])
64
+ return load_example(current_index["index"])
65
+
66
+
67
+ # Gradio Interface
68
+ audio_path = 'test.mp4'
69
+
70
+
71
+ with (gr.Blocks() as demo):
72
+
73
+ with gr.Row():
74
+ audio_player = gr.Audio(value=audio_path, label="Audio", type="filepath", interactive=False)
75
+
76
+ with gr.Row():
77
+ with gr.Accordion(label="Click to see the sentence", open=False):
78
+ sentence_text = gr.Textbox(label="Sentence", interactive=False)
79
+
80
+ with gr.Row():
81
+ slider = gr.Slider(
82
+ minimum=-100,
83
+ maximum=100,
84
+ step=1,
85
+ label="Sentiment Slider",
86
+ info="Slide to the left for negative sentiment, to the right for positive sentiment",
87
+ show_label=True,
88
+ elem_classes=["sentiment-slider"]
89
+ )
90
+
91
+ emotions = gr.Radio(["Joy", "Sad", "Angry", "Neutral"], label="Predominant Emotion")
92
+
93
+ confidence = gr.Slider(label="Confidence (%)", minimum=0, maximum=100, step=10)
94
+
95
+ # Instructions for emotion annotation
96
+ with gr.Sidebar():
97
+ gr.Textbox()
98
+ gr.Button()
99
+
100
+ with gr.Row():
101
+ save_button = gr.Button("Save Annotation")
102
+ next_button = gr.Button("Next Example")
103
+ previous_button = gr.Button("Previous Example")
104
+
105
+ comments = gr.Textbox(label="Comments", interactive=True)
106
+ # Initial load
107
+ sentence_text.value, audio_player.value, emotions.value, comments.value = load_example(
108
+ current_index["index"]
109
+ )
110
+
111
+ save_button.click(
112
+ save_annotation,
113
+ inputs=[emotions, comments]
114
+ )
115
+ next_button.click(
116
+ next_example,
117
+ inputs=[emotions, comments],
118
+ outputs=[sentence_text, audio_player, emotions, comments],
119
+ )
120
+ previous_button.click(
121
+ previous_example,
122
+ inputs=[emotions, comments],
123
+ outputs=[sentence_text, audio_player, emotions, comments],
124
+ )
125
+
126
+ demo.launch()