FangSen9000 commited on
Commit
10691cc
·
1 Parent(s): 9f9e779

Add batch reasoning function

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. SignX/inference.sh +57 -0
  2. SignX/inference_output/detailed_prediction_20260102_202142/171921/171921.mp4 +3 -0
  3. SignX/inference_output/detailed_prediction_20260102_202142/171921/analysis_report.txt +41 -0
  4. SignX/inference_output/detailed_prediction_20260102_202142/171921/attention_heatmap.pdf +0 -0
  5. SignX/inference_output/detailed_prediction_20260102_202142/171921/attention_heatmap.png +3 -0
  6. SignX/inference_output/detailed_prediction_20260102_202142/171921/attention_keyframes/keyframes_index.txt +30 -0
  7. SignX/inference_output/detailed_prediction_20260102_202142/171921/attention_weights.npy +3 -0
  8. SignX/inference_output/detailed_prediction_20260102_202142/171921/debug_video_path.txt +4 -0
  9. SignX/inference_output/detailed_prediction_20260102_202142/171921/feature_frame_mapping.json +158 -0
  10. SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment.json +68 -0
  11. SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment.pdf +0 -0
  12. SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment.png +3 -0
  13. SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment_short.pdf +0 -0
  14. SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment_short.png +3 -0
  15. SignX/inference_output/detailed_prediction_20260102_202142/171921/gloss_to_frames.png +3 -0
  16. SignX/inference_output/detailed_prediction_20260102_202142/171921/interactive_alignment.html +579 -0
  17. SignX/inference_output/detailed_prediction_20260102_202142/171921/translation.txt +3 -0
  18. SignX/inference_output/detailed_prediction_20260102_202302/173238/173238.mp4 +3 -0
  19. SignX/inference_output/detailed_prediction_20260102_202302/173238/analysis_report.txt +40 -0
  20. SignX/inference_output/detailed_prediction_20260102_202302/173238/attention_heatmap.pdf +0 -0
  21. SignX/inference_output/detailed_prediction_20260102_202302/173238/attention_heatmap.png +3 -0
  22. SignX/inference_output/detailed_prediction_20260102_202302/173238/attention_keyframes/keyframes_index.txt +33 -0
  23. SignX/inference_output/detailed_prediction_20260102_202302/173238/attention_weights.npy +3 -0
  24. SignX/inference_output/detailed_prediction_20260102_202302/173238/debug_video_path.txt +4 -0
  25. SignX/inference_output/detailed_prediction_20260102_202302/173238/feature_frame_mapping.json +158 -0
  26. SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment.json +59 -0
  27. SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment.pdf +0 -0
  28. SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment.png +3 -0
  29. SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment_short.pdf +0 -0
  30. SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment_short.png +3 -0
  31. SignX/inference_output/detailed_prediction_20260102_202302/173238/gloss_to_frames.png +3 -0
  32. SignX/inference_output/detailed_prediction_20260102_202302/173238/interactive_alignment.html +579 -0
  33. SignX/inference_output/detailed_prediction_20260102_202302/173238/translation.txt +3 -0
  34. SignX/inference_output/detailed_prediction_20260102_202418/173745/173745.mp4 +3 -0
  35. SignX/inference_output/detailed_prediction_20260102_202418/173745/analysis_report.txt +37 -0
  36. SignX/inference_output/detailed_prediction_20260102_202418/173745/attention_heatmap.pdf +0 -0
  37. SignX/inference_output/detailed_prediction_20260102_202418/173745/attention_heatmap.png +3 -0
  38. SignX/inference_output/detailed_prediction_20260102_202418/173745/attention_keyframes/keyframes_index.txt +32 -0
  39. SignX/inference_output/detailed_prediction_20260102_202418/173745/attention_weights.npy +3 -0
  40. SignX/inference_output/detailed_prediction_20260102_202418/173745/debug_video_path.txt +4 -0
  41. SignX/inference_output/{detailed_prediction_20260102_183038/97998032 → detailed_prediction_20260102_202418/173745}/feature_frame_mapping.json +0 -0
  42. SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment.json +32 -0
  43. SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment.pdf +0 -0
  44. SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment.png +3 -0
  45. SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment_short.pdf +0 -0
  46. SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment_short.png +3 -0
  47. SignX/inference_output/detailed_prediction_20260102_202418/173745/gloss_to_frames.png +3 -0
  48. SignX/inference_output/detailed_prediction_20260102_202418/173745/interactive_alignment.html +579 -0
  49. SignX/inference_output/detailed_prediction_20260102_202418/173745/translation.txt +3 -0
  50. SignX/inference_output/detailed_prediction_20260102_202534/23880856/23880856.mp4 +3 -0
SignX/inference.sh CHANGED
@@ -83,6 +83,54 @@ if [ "$#" -lt 1 ]; then
83
  fi
84
 
85
  VIDEO_PATH="$1"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  if [ -z "$2" ]; then
87
  OUTPUT_PATH="$INFERENCE_ROOT/inference_output_$(date +%Y%m%d_%H%M%S)_$RANDOM.txt"
88
  else
@@ -404,6 +452,15 @@ if [ -f "$TEMP_DIR/prediction.txt" ]; then
404
  rm -f "$MOVED_CLEAN_FILE"
405
  fi
406
 
 
 
 
 
 
 
 
 
 
407
  OUTPUT_PATH="$TRANSLATION_FILE"
408
  OUTPUT_CLEAN_PATH="$TRANSLATION_FILE"
409
  fi
 
83
  fi
84
 
85
  VIDEO_PATH="$1"
86
+
87
+ # Batch mode: if a directory is provided, iterate over supported video files.
88
+ if [ -d "$VIDEO_PATH" ]; then
89
+ VIDEO_DIR=$(realpath "$VIDEO_PATH")
90
+ if [ -n "$2" ]; then
91
+ echo -e "${RED}Error: output path override is not supported in batch mode${NC}"
92
+ exit 1
93
+ fi
94
+
95
+ echo ""
96
+ echo "======================================================================"
97
+ echo " Batch Inference Mode"
98
+ echo "======================================================================"
99
+ echo " Directory: $VIDEO_DIR"
100
+ echo " Outputs: stored per-video using default locations"
101
+ echo "======================================================================"
102
+ echo ""
103
+
104
+ mapfile -d '' VIDEO_FILES < <(find "$VIDEO_DIR" -maxdepth 1 -type f \( -iname '*.mp4' -o -iname '*.mov' -o -iname '*.avi' -o -iname '*.mkv' \) -print0 | sort -z)
105
+ if [ ${#VIDEO_FILES[@]} -eq 0 ]; then
106
+ echo -e "${RED}Error: no video files (.mp4/.mov/.avi/.mkv) found under $VIDEO_DIR${NC}"
107
+ exit 1
108
+ fi
109
+
110
+ batch_status=0
111
+ total=${#VIDEO_FILES[@]}
112
+ index=1
113
+ for video_file in "${VIDEO_FILES[@]}"; do
114
+ echo ""
115
+ echo ">>> [Batch] Processing ($index/$total): $video_file"
116
+ if bash "$SCRIPT_DIR/$(basename "${BASH_SOURCE[0]}")" "$video_file"; then
117
+ echo ">>> [Batch] Completed: $video_file"
118
+ else
119
+ echo ">>> [Batch] Failed: $video_file"
120
+ batch_status=1
121
+ fi
122
+ index=$((index + 1))
123
+ done
124
+
125
+ echo ""
126
+ if [ $batch_status -eq 0 ]; then
127
+ echo -e "${GREEN}✓ Batch inference finished without errors${NC}"
128
+ else
129
+ echo -e "${YELLOW}⚠ Batch inference finished with some failures (see logs above)${NC}"
130
+ fi
131
+ exit $batch_status
132
+ fi
133
+
134
  if [ -z "$2" ]; then
135
  OUTPUT_PATH="$INFERENCE_ROOT/inference_output_$(date +%Y%m%d_%H%M%S)_$RANDOM.txt"
136
  else
 
452
  rm -f "$MOVED_CLEAN_FILE"
453
  fi
454
 
455
+ # Preserve a copy of the input video inside the sample directory for reference
456
+ if [ -f "$VIDEO_PATH" ]; then
457
+ VIDEO_BASENAME=$(basename "$VIDEO_PATH")
458
+ DEST_VIDEO_PATH="${PRIMARY_SAMPLE_DIR}/${VIDEO_BASENAME}"
459
+ if [ ! -f "$DEST_VIDEO_PATH" ]; then
460
+ cp "$VIDEO_PATH" "$DEST_VIDEO_PATH"
461
+ fi
462
+ fi
463
+
464
  OUTPUT_PATH="$TRANSLATION_FILE"
465
  OUTPUT_CLEAN_PATH="$TRANSLATION_FILE"
466
  fi
SignX/inference_output/detailed_prediction_20260102_202142/171921/171921.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b55f3fae16c90ecd7e799d2515aeea9af00df4efad003d84e6b0aba1a3527822
3
+ size 102121
SignX/inference_output/detailed_prediction_20260102_202142/171921/analysis_report.txt ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ================================================================================
2
+ Sign Language Recognition - Attention Analysis Report
3
+ ================================================================================
4
+
5
+ Generated at: 2026-01-02 20:21:46
6
+
7
+ Translation:
8
+ --------------------------------------------------------------------------------
9
+ IX-1p NOT LIKE PINEAPPLE fs-CREAM+CHEESE IX-1p
10
+
11
+ Video info:
12
+ --------------------------------------------------------------------------------
13
+ Total feature frames: 25
14
+ Word count: 6
15
+
16
+ Attention tensor:
17
+ --------------------------------------------------------------------------------
18
+ Shape: (21, 25)
19
+ - Decoder steps: 21
20
+
21
+ Word-to-frame details:
22
+ ================================================================================
23
+ No. Word Frames Peak Attn Conf
24
+ --------------------------------------------------------------------------------
25
+ 1 IX-1p 3-3 3 0.390 medium
26
+ 2 NOT 5-5 5 0.532 high
27
+ 3 LIKE 7-7 7 0.626 high
28
+ 4 PINEAPPLE 9-10 10 0.237 medium
29
+ 5 fs-CREAM+CHEESE 12-13 12 0.113 low
30
+ 6 IX-1p 0-24 24 0.211 medium
31
+
32
+ ================================================================================
33
+
34
+ Summary:
35
+ --------------------------------------------------------------------------------
36
+ Average attention weight: 0.352
37
+ High-confidence words: 2 (33.3%)
38
+ Medium-confidence words: 3 (50.0%)
39
+ Low-confidence words: 1 (16.7%)
40
+
41
+ ================================================================================
SignX/inference_output/detailed_prediction_20260102_202142/171921/attention_heatmap.pdf ADDED
Binary file (34.2 kB). View file
 
SignX/inference_output/detailed_prediction_20260102_202142/171921/attention_heatmap.png ADDED

Git LFS Details

  • SHA256: 1b9e6a84dcf1f908a00739b6a9a12356657b4736e0c430dc099a542c0fad4294
  • Pointer size: 130 Bytes
  • Size of remote file: 80.1 kB
SignX/inference_output/detailed_prediction_20260102_202142/171921/attention_keyframes/keyframes_index.txt ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Attention Keyframe Index
2
+ ============================================================
3
+
4
+ Sample directory: /research/cbim/vast/sf895/code/Sign-X/output/huggingface_asllrp_repo/SignX/inference_output/detailed_prediction_20260102_202142/171921
5
+ Video path: /common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/good_videos/171921.mp4
6
+ Total keyframes: 21
7
+
8
+ Keyframe list:
9
+ ------------------------------------------------------------
10
+ Gloss 0: keyframe_000_feat3_frame13_att0.390.jpg
11
+ Gloss 1: keyframe_001_feat5_frame21_att0.532.jpg
12
+ Gloss 2: keyframe_002_feat7_frame28_att0.626.jpg
13
+ Gloss 3: keyframe_003_feat10_frame40_att0.246.jpg
14
+ Gloss 4: keyframe_004_feat12_frame47_att0.118.jpg
15
+ Gloss 5: keyframe_005_feat24_frame94_att0.213.jpg
16
+ Gloss 6: keyframe_006_feat24_frame94_att0.249.jpg
17
+ Gloss 7: keyframe_007_feat24_frame94_att0.240.jpg
18
+ Gloss 8: keyframe_008_feat16_frame63_att0.108.jpg
19
+ Gloss 9: keyframe_009_feat22_frame86_att0.200.jpg
20
+ Gloss 10: keyframe_010_feat22_frame86_att0.177.jpg
21
+ Gloss 11: keyframe_011_feat22_frame86_att0.210.jpg
22
+ Gloss 12: keyframe_012_feat0_frame1_att0.317.jpg
23
+ Gloss 13: keyframe_013_feat24_frame94_att0.266.jpg
24
+ Gloss 14: keyframe_014_feat24_frame94_att0.358.jpg
25
+ Gloss 15: keyframe_015_feat22_frame86_att0.251.jpg
26
+ Gloss 16: keyframe_016_feat24_frame94_att0.249.jpg
27
+ Gloss 17: keyframe_017_feat24_frame94_att0.210.jpg
28
+ Gloss 18: keyframe_018_feat24_frame94_att0.321.jpg
29
+ Gloss 19: keyframe_019_feat24_frame94_att0.325.jpg
30
+ Gloss 20: keyframe_020_feat24_frame94_att0.214.jpg
SignX/inference_output/detailed_prediction_20260102_202142/171921/attention_weights.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e58f87c3e44e771f42094b907aa628b0da60088d0595a8e7ff5602ba250a7719
3
+ size 2228
SignX/inference_output/detailed_prediction_20260102_202142/171921/debug_video_path.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ video_path = '/common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/good_videos/171921.mp4'
2
+ video_path type = <class 'str'>
3
+ video_path is None: False
4
+ bool(video_path): True
SignX/inference_output/detailed_prediction_20260102_202142/171921/feature_frame_mapping.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "original_frame_count": 96,
3
+ "feature_count": 25,
4
+ "downsampling_ratio": 3.84,
5
+ "fps": 24.0,
6
+ "mapping": [
7
+ {
8
+ "feature_index": 0,
9
+ "frame_start": 0,
10
+ "frame_end": 3,
11
+ "frame_count": 3
12
+ },
13
+ {
14
+ "feature_index": 1,
15
+ "frame_start": 3,
16
+ "frame_end": 7,
17
+ "frame_count": 4
18
+ },
19
+ {
20
+ "feature_index": 2,
21
+ "frame_start": 7,
22
+ "frame_end": 11,
23
+ "frame_count": 4
24
+ },
25
+ {
26
+ "feature_index": 3,
27
+ "frame_start": 11,
28
+ "frame_end": 15,
29
+ "frame_count": 4
30
+ },
31
+ {
32
+ "feature_index": 4,
33
+ "frame_start": 15,
34
+ "frame_end": 19,
35
+ "frame_count": 4
36
+ },
37
+ {
38
+ "feature_index": 5,
39
+ "frame_start": 19,
40
+ "frame_end": 23,
41
+ "frame_count": 4
42
+ },
43
+ {
44
+ "feature_index": 6,
45
+ "frame_start": 23,
46
+ "frame_end": 26,
47
+ "frame_count": 3
48
+ },
49
+ {
50
+ "feature_index": 7,
51
+ "frame_start": 26,
52
+ "frame_end": 30,
53
+ "frame_count": 4
54
+ },
55
+ {
56
+ "feature_index": 8,
57
+ "frame_start": 30,
58
+ "frame_end": 34,
59
+ "frame_count": 4
60
+ },
61
+ {
62
+ "feature_index": 9,
63
+ "frame_start": 34,
64
+ "frame_end": 38,
65
+ "frame_count": 4
66
+ },
67
+ {
68
+ "feature_index": 10,
69
+ "frame_start": 38,
70
+ "frame_end": 42,
71
+ "frame_count": 4
72
+ },
73
+ {
74
+ "feature_index": 11,
75
+ "frame_start": 42,
76
+ "frame_end": 46,
77
+ "frame_count": 4
78
+ },
79
+ {
80
+ "feature_index": 12,
81
+ "frame_start": 46,
82
+ "frame_end": 49,
83
+ "frame_count": 3
84
+ },
85
+ {
86
+ "feature_index": 13,
87
+ "frame_start": 49,
88
+ "frame_end": 53,
89
+ "frame_count": 4
90
+ },
91
+ {
92
+ "feature_index": 14,
93
+ "frame_start": 53,
94
+ "frame_end": 57,
95
+ "frame_count": 4
96
+ },
97
+ {
98
+ "feature_index": 15,
99
+ "frame_start": 57,
100
+ "frame_end": 61,
101
+ "frame_count": 4
102
+ },
103
+ {
104
+ "feature_index": 16,
105
+ "frame_start": 61,
106
+ "frame_end": 65,
107
+ "frame_count": 4
108
+ },
109
+ {
110
+ "feature_index": 17,
111
+ "frame_start": 65,
112
+ "frame_end": 69,
113
+ "frame_count": 4
114
+ },
115
+ {
116
+ "feature_index": 18,
117
+ "frame_start": 69,
118
+ "frame_end": 72,
119
+ "frame_count": 3
120
+ },
121
+ {
122
+ "feature_index": 19,
123
+ "frame_start": 72,
124
+ "frame_end": 76,
125
+ "frame_count": 4
126
+ },
127
+ {
128
+ "feature_index": 20,
129
+ "frame_start": 76,
130
+ "frame_end": 80,
131
+ "frame_count": 4
132
+ },
133
+ {
134
+ "feature_index": 21,
135
+ "frame_start": 80,
136
+ "frame_end": 84,
137
+ "frame_count": 4
138
+ },
139
+ {
140
+ "feature_index": 22,
141
+ "frame_start": 84,
142
+ "frame_end": 88,
143
+ "frame_count": 4
144
+ },
145
+ {
146
+ "feature_index": 23,
147
+ "frame_start": 88,
148
+ "frame_end": 92,
149
+ "frame_count": 4
150
+ },
151
+ {
152
+ "feature_index": 24,
153
+ "frame_start": 92,
154
+ "frame_end": 96,
155
+ "frame_count": 4
156
+ }
157
+ ]
158
+ }
SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "translation": "IX-1p NOT LIKE PINEAPPLE fs-CREAM+CHEESE IX-1p",
3
+ "words": [
4
+ "IX-1p",
5
+ "NOT",
6
+ "LIKE",
7
+ "PINEAPPLE",
8
+ "fs-CREAM+CHEESE",
9
+ "IX-1p"
10
+ ],
11
+ "total_video_frames": 25,
12
+ "frame_ranges": [
13
+ {
14
+ "word": "IX-1p",
15
+ "start_frame": 3,
16
+ "end_frame": 3,
17
+ "peak_frame": 3,
18
+ "avg_attention": 0.38998693227767944,
19
+ "confidence": "medium"
20
+ },
21
+ {
22
+ "word": "NOT",
23
+ "start_frame": 5,
24
+ "end_frame": 5,
25
+ "peak_frame": 5,
26
+ "avg_attention": 0.5316620469093323,
27
+ "confidence": "high"
28
+ },
29
+ {
30
+ "word": "LIKE",
31
+ "start_frame": 7,
32
+ "end_frame": 7,
33
+ "peak_frame": 7,
34
+ "avg_attention": 0.626325249671936,
35
+ "confidence": "high"
36
+ },
37
+ {
38
+ "word": "PINEAPPLE",
39
+ "start_frame": 9,
40
+ "end_frame": 10,
41
+ "peak_frame": 10,
42
+ "avg_attention": 0.2374260425567627,
43
+ "confidence": "medium"
44
+ },
45
+ {
46
+ "word": "fs-CREAM+CHEESE",
47
+ "start_frame": 12,
48
+ "end_frame": 13,
49
+ "peak_frame": 12,
50
+ "avg_attention": 0.11307986080646515,
51
+ "confidence": "low"
52
+ },
53
+ {
54
+ "word": "IX-1p",
55
+ "start_frame": 0,
56
+ "end_frame": 24,
57
+ "peak_frame": 24,
58
+ "avg_attention": 0.21095514297485352,
59
+ "confidence": "medium"
60
+ }
61
+ ],
62
+ "statistics": {
63
+ "avg_confidence": 0.35157254586617154,
64
+ "high_confidence_words": 2,
65
+ "medium_confidence_words": 3,
66
+ "low_confidence_words": 1
67
+ }
68
+ }
SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment.pdf ADDED
Binary file (31.5 kB). View file
 
SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment.png ADDED

Git LFS Details

  • SHA256: d2a37ed27cd5acd6ed42ac666c0ee6ea77be2e51c16c99fd6e79e3fd40d84f19
  • Pointer size: 131 Bytes
  • Size of remote file: 139 kB
SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment_short.pdf ADDED
Binary file (31.5 kB). View file
 
SignX/inference_output/detailed_prediction_20260102_202142/171921/frame_alignment_short.png ADDED

Git LFS Details

  • SHA256: 0b6c52c216604f385c82a26f7137531d8b3ceeedee50bd18c44e1922e8482401
  • Pointer size: 131 Bytes
  • Size of remote file: 139 kB
SignX/inference_output/detailed_prediction_20260102_202142/171921/gloss_to_frames.png ADDED

Git LFS Details

  • SHA256: 57934ee098f8b07c9b3e07a535c16eae627117186cdb840c445afb86e6b6eaf3
  • Pointer size: 132 Bytes
  • Size of remote file: 2.87 MB
SignX/inference_output/detailed_prediction_20260102_202142/171921/interactive_alignment.html ADDED
@@ -0,0 +1,579 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Interactive Word-Frame Alignment</title>
7
+ <style>
8
+ body {
9
+ font-family: 'Arial', sans-serif;
10
+ margin: 20px;
11
+ background-color: #f5f5f5;
12
+ }
13
+ .container {
14
+ max-width: 1800px;
15
+ margin: 0 auto;
16
+ background-color: white;
17
+ padding: 30px;
18
+ border-radius: 8px;
19
+ box-shadow: 0 2px 10px rgba(0,0,0,0.1);
20
+ }
21
+ h1 {
22
+ color: #333;
23
+ border-bottom: 3px solid #4CAF50;
24
+ padding-bottom: 10px;
25
+ margin-bottom: 20px;
26
+ }
27
+ .stats {
28
+ background-color: #E3F2FD;
29
+ padding: 15px;
30
+ border-radius: 5px;
31
+ margin-bottom: 20px;
32
+ border-left: 4px solid #2196F3;
33
+ font-size: 14px;
34
+ }
35
+ .controls {
36
+ background-color: #f9f9f9;
37
+ padding: 20px;
38
+ border-radius: 5px;
39
+ margin-bottom: 30px;
40
+ border: 1px solid #ddd;
41
+ }
42
+ .control-group {
43
+ margin-bottom: 15px;
44
+ }
45
+ label {
46
+ font-weight: bold;
47
+ display: inline-block;
48
+ width: 250px;
49
+ color: #555;
50
+ }
51
+ input[type="range"] {
52
+ width: 400px;
53
+ vertical-align: middle;
54
+ }
55
+ .value-display {
56
+ display: inline-block;
57
+ width: 80px;
58
+ font-family: monospace;
59
+ font-size: 14px;
60
+ color: #2196F3;
61
+ font-weight: bold;
62
+ }
63
+ .reset-btn {
64
+ margin-top: 15px;
65
+ padding: 10px 25px;
66
+ background-color: #2196F3;
67
+ color: white;
68
+ border: none;
69
+ border-radius: 5px;
70
+ cursor: pointer;
71
+ font-size: 14px;
72
+ font-weight: bold;
73
+ }
74
+ .reset-btn:hover {
75
+ background-color: #1976D2;
76
+ }
77
+ canvas {
78
+ border: 1px solid #999;
79
+ display: block;
80
+ margin: 20px auto;
81
+ background: white;
82
+ }
83
+ .legend {
84
+ margin-top: 20px;
85
+ padding: 15px;
86
+ background-color: #fff;
87
+ border: 1px solid #ddd;
88
+ border-radius: 5px;
89
+ }
90
+ .legend-item {
91
+ display: inline-block;
92
+ margin-right: 25px;
93
+ font-size: 13px;
94
+ margin-bottom: 10px;
95
+ }
96
+ .color-box {
97
+ display: inline-block;
98
+ width: 30px;
99
+ height: 15px;
100
+ margin-right: 8px;
101
+ vertical-align: middle;
102
+ border: 1px solid #666;
103
+ }
104
+ .info-panel {
105
+ margin-top: 20px;
106
+ padding: 15px;
107
+ background-color: #f9f9f9;
108
+ border-radius: 5px;
109
+ border: 1px solid #ddd;
110
+ }
111
+ .confidence {
112
+ display: inline-block;
113
+ padding: 3px 10px;
114
+ border-radius: 10px;
115
+ font-weight: bold;
116
+ font-size: 11px;
117
+ text-transform: uppercase;
118
+ }
119
+ .confidence.high {
120
+ background-color: #4CAF50;
121
+ color: white;
122
+ }
123
+ .confidence.medium {
124
+ background-color: #FF9800;
125
+ color: white;
126
+ }
127
+ .confidence.low {
128
+ background-color: #f44336;
129
+ color: white;
130
+ }
131
+ </style>
132
+ </head>
133
+ <body>
134
+ <div class="container">
135
+ <h1>🎯 Interactive Word-to-Frame Alignment Visualizer</h1>
136
+
137
+ <div class="stats">
138
+ <strong>Translation:</strong> IX-1p NOT LIKE PINEAPPLE fs-CREAM+CHEESE IX-1p<br>
139
+ <strong>Total Words:</strong> 6 |
140
+ <strong>Total Features:</strong> 25
141
+ </div>
142
+
143
+ <div class="controls">
144
+ <h3>⚙️ Threshold Controls</h3>
145
+
146
+ <div class="control-group">
147
+ <label for="peak-threshold">Peak Threshold (% of max):</label>
148
+ <input type="range" id="peak-threshold" min="1" max="100" value="90" step="1">
149
+ <span class="value-display" id="peak-threshold-value">90%</span>
150
+ <br>
151
+ <small style="margin-left: 255px; color: #666;">
152
+ A frame is considered “significant” if its attention ≥ (peak × threshold%)
153
+ </small>
154
+ </div>
155
+
156
+ <div class="control-group">
157
+ <label for="confidence-high">High Confidence (avg attn >):</label>
158
+ <input type="range" id="confidence-high" min="0" max="100" value="50" step="1">
159
+ <span class="value-display" id="confidence-high-value">0.50</span>
160
+ </div>
161
+
162
+ <div class="control-group">
163
+ <label for="confidence-medium">Medium Confidence (avg attn >):</label>
164
+ <input type="range" id="confidence-medium" min="0" max="100" value="20" step="1">
165
+ <span class="value-display" id="confidence-medium-value">0.20</span>
166
+ </div>
167
+
168
+ <button class="reset-btn" onclick="resetDefaults()">
169
+ Reset to Defaults
170
+ </button>
171
+ </div>
172
+
173
+ <div>
174
+ <h3>Word-to-Frame Alignment</h3>
175
+ <p style="color: #666; font-size: 13px;">
176
+ Each word appears as a colored block. Width = frame span, ★ = peak frame, waveform = attention trace.
177
+ </p>
178
+ <canvas id="alignment-canvas" width="1600" height="600"></canvas>
179
+
180
+ <h3 style="margin-top: 30px;">Timeline Progress Bar</h3>
181
+ <canvas id="timeline-canvas" width="1600" height="100"></canvas>
182
+
183
+ <div class="legend">
184
+ <strong>Legend:</strong><br><br>
185
+ <div class="legend-item">
186
+ <span class="confidence high">High</span>
187
+ <span class="confidence medium">Medium</span>
188
+ <span class="confidence low">Low</span>
189
+ Confidence Levels (opacity reflects confidence)
190
+ </div>
191
+ <div class="legend-item">
192
+ <span style="color: red; font-size: 20px;">★</span>
193
+ Peak Frame (highest attention)
194
+ </div>
195
+ <div class="legend-item">
196
+ <span style="color: blue;">━</span>
197
+ Attention Waveform (within word region)
198
+ </div>
199
+ </div>
200
+ </div>
201
+
202
+ <div class="info-panel">
203
+ <h3>Alignment Details</h3>
204
+ <div id="alignment-details"></div>
205
+ </div>
206
+ </div>
207
+
208
+ <script>
209
+ // Attention data from Python
210
+ const attentionData = [{"word": "IX-1p", "word_idx": 0, "weights": [0.05238496512174606, 0.0976874902844429, 0.22361433506011963, 0.38998693227767944, 0.17873288691043854, 0.006679870188236237, 0.0036295060999691486, 0.0008571264916099608, 0.0010585205163806677, 0.0008011514437384903, 0.0013172859326004982, 0.001322466996498406, 0.0014414831530302763, 0.0014764397637918591, 0.0014997144462540746, 0.0015036852564662695, 0.0017976462841033936, 0.001069137710146606, 0.0007240657578222454, 0.0011783612426370382, 0.00439043901860714, 0.006896136794239283, 0.011946831829845905, 0.005145119037479162, 0.0028583481907844543]}, {"word": "NOT", "word_idx": 1, "weights": [0.05014055222272873, 0.03570036590099335, 0.024646738544106483, 0.0310512688010931, 0.05971718579530716, 0.5316620469093323, 0.23449379205703735, 0.00529087008908391, 0.0027947064954787493, 0.000875027384608984, 0.0007378943264484406, 0.0004647437308449298, 0.0004135824856348336, 0.000486223550979048, 0.0004921917570754886, 0.0003303340054117143, 0.00015648282715119421, 0.00021145731443539262, 0.00020821671932935715, 0.00010684868902899325, 0.0002003382396651432, 0.00031938249594531953, 0.0009686668636277318, 0.005626516416668892, 0.012904567644000053]}, {"word": "LIKE", "word_idx": 2, "weights": [0.00841811764985323, 0.004028731491416693, 0.0020009633153676987, 0.0016266442835330963, 0.003222851548343897, 0.005865377373993397, 0.007347611710429192, 0.626325249671936, 0.30912983417510986, 0.012665261514484882, 0.006091661285609007, 0.003784160129725933, 0.0020876466296613216, 0.001558956690132618, 0.0008166478946805, 0.0007060404750518501, 0.0006176729220896959, 0.000939459539949894, 0.0002989305939991027, 8.548794721718878e-05, 5.798463826067746e-05, 8.46595867187716e-05, 0.00022771398653276265, 0.000744526507332921, 0.0012678124476224184]}, {"word": "PINEAPPLE", "word_idx": 3, "weights": [0.018990369513630867, 0.005772765725851059, 0.0014627730706706643, 0.0005106691969558597, 0.0005122806178405881, 0.0006774469511583447, 0.0012575318105518818, 0.006952387746423483, 0.02189759910106659, 0.22889001667499542, 0.24596205353736877, 0.1395777314901352, 0.0990370512008667, 0.06233914941549301, 0.03861977905035019, 0.03933922201395035, 0.03362319990992546, 0.02999911829829216, 0.015454845502972603, 0.0040846471674740314, 0.000792931008618325, 0.00038262357702478766, 0.000193710409803316, 0.0007411144906654954, 0.002928979927673936]}, {"word": "fs-CREAM+CHEESE", "word_idx": 4, "weights": [0.06216610223054886, 0.02511739730834961, 0.008011610247194767, 0.0022258462850004435, 0.0013015446020290256, 0.0005411158781498671, 0.0006731877219863236, 0.0020649773068726063, 0.005022048018872738, 0.04262514039874077, 0.05756526440382004, 0.07348567247390747, 0.11768292635679245, 0.10847678780555725, 0.09763970226049423, 0.0874117985367775, 0.06481091678142548, 0.035931553691625595, 0.033074185252189636, 0.033235128968954086, 0.015178116038441658, 0.008458501659333706, 0.004052826203405857, 0.023612597957253456, 0.08963505923748016]}, {"word": "IX-1p", "word_idx": 5, "weights": [0.2090553492307663, 0.07592105120420456, 0.02048420161008835, 0.005190532188862562, 0.003573325462639332, 0.0031765794847160578, 0.002643104176968336, 0.009272502735257149, 0.014098540879786015, 0.03167528286576271, 0.036420486867427826, 0.04336267337203026, 0.03958175703883171, 0.03629697114229202, 0.032348908483982086, 0.02785353548824787, 0.025757484138011932, 0.041921358555555344, 0.04051389917731285, 0.021882373839616776, 0.009477143175899982, 0.006930893752723932, 0.004549332894384861, 0.04515770822763443, 0.21285495162010193]}];
211
+ const numGlosses = 6;
212
+ const numFeatures = 25;
213
+
214
+ // Colors for different words (matching matplotlib tab20)
215
+ const colors = [
216
+ '#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd',
217
+ '#8c564b', '#e377c2', '#7f7f7f', '#bcbd22', '#17becf',
218
+ '#aec7e8', '#ffbb78', '#98df8a', '#ff9896', '#c5b0d5',
219
+ '#c49c94', '#f7b6d2', '#c7c7c7', '#dbdb8d', '#9edae5'
220
+ ];
221
+
222
+ // Get controls
223
+ const peakThresholdSlider = document.getElementById('peak-threshold');
224
+ const peakThresholdValue = document.getElementById('peak-threshold-value');
225
+ const confidenceHighSlider = document.getElementById('confidence-high');
226
+ const confidenceHighValue = document.getElementById('confidence-high-value');
227
+ const confidenceMediumSlider = document.getElementById('confidence-medium');
228
+ const confidenceMediumValue = document.getElementById('confidence-medium-value');
229
+ const alignmentCanvas = document.getElementById('alignment-canvas');
230
+ const timelineCanvas = document.getElementById('timeline-canvas');
231
+ const alignmentCtx = alignmentCanvas.getContext('2d');
232
+ const timelineCtx = timelineCanvas.getContext('2d');
233
+
234
+ // Update displays when sliders change
235
+ peakThresholdSlider.oninput = function() {
236
+ peakThresholdValue.textContent = this.value + '%';
237
+ updateVisualization();
238
+ };
239
+
240
+ confidenceHighSlider.oninput = function() {
241
+ confidenceHighValue.textContent = (this.value / 100).toFixed(2);
242
+ updateVisualization();
243
+ };
244
+
245
+ confidenceMediumSlider.oninput = function() {
246
+ confidenceMediumValue.textContent = (this.value / 100).toFixed(2);
247
+ updateVisualization();
248
+ };
249
+
250
+ function resetDefaults() {
251
+ peakThresholdSlider.value = 90;
252
+ confidenceHighSlider.value = 50;
253
+ confidenceMediumSlider.value = 20;
254
+ peakThresholdValue.textContent = '90%';
255
+ confidenceHighValue.textContent = '0.50';
256
+ confidenceMediumValue.textContent = '0.20';
257
+ updateVisualization();
258
+ }
259
+
260
+ function calculateAlignment(weights, peakThreshold) {
261
+ // Find peak
262
+ let peakIdx = 0;
263
+ let peakWeight = weights[0];
264
+ for (let i = 1; i < weights.length; i++) {
265
+ if (weights[i] > peakWeight) {
266
+ peakWeight = weights[i];
267
+ peakIdx = i;
268
+ }
269
+ }
270
+
271
+ // Find significant frames
272
+ const threshold = peakWeight * (peakThreshold / 100);
273
+ let startIdx = peakIdx;
274
+ let endIdx = peakIdx;
275
+ let sumWeight = 0;
276
+ let count = 0;
277
+
278
+ for (let i = 0; i < weights.length; i++) {
279
+ if (weights[i] >= threshold) {
280
+ if (i < startIdx) startIdx = i;
281
+ if (i > endIdx) endIdx = i;
282
+ sumWeight += weights[i];
283
+ count++;
284
+ }
285
+ }
286
+
287
+ const avgWeight = count > 0 ? sumWeight / count : peakWeight;
288
+
289
+ return {
290
+ startIdx: startIdx,
291
+ endIdx: endIdx,
292
+ peakIdx: peakIdx,
293
+ peakWeight: peakWeight,
294
+ avgWeight: avgWeight,
295
+ threshold: threshold
296
+ };
297
+ }
298
+
299
+ function getConfidenceLevel(avgWeight, highThreshold, mediumThreshold) {
300
+ if (avgWeight > highThreshold) return 'high';
301
+ if (avgWeight > mediumThreshold) return 'medium';
302
+ return 'low';
303
+ }
304
+
305
+ function drawAlignmentChart() {
306
+ const peakThreshold = parseInt(peakThresholdSlider.value);
307
+ const highThreshold = parseInt(confidenceHighSlider.value) / 100;
308
+ const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
309
+
310
+ // Canvas dimensions
311
+ const width = alignmentCanvas.width;
312
+ const height = alignmentCanvas.height;
313
+ const leftMargin = 180;
314
+ const rightMargin = 50;
315
+ const topMargin = 60;
316
+ const bottomMargin = 80;
317
+
318
+ const plotWidth = width - leftMargin - rightMargin;
319
+ const plotHeight = height - topMargin - bottomMargin;
320
+
321
+ const rowHeight = plotHeight / numGlosses;
322
+ const featureWidth = plotWidth / numFeatures;
323
+
324
+ // Clear canvas
325
+ alignmentCtx.clearRect(0, 0, width, height);
326
+
327
+ // Draw title
328
+ alignmentCtx.fillStyle = '#333';
329
+ alignmentCtx.font = 'bold 18px Arial';
330
+ alignmentCtx.textAlign = 'center';
331
+ alignmentCtx.fillText('Word-to-Frame Alignment', width / 2, 30);
332
+ alignmentCtx.font = '13px Arial';
333
+ alignmentCtx.fillText('(based on attention peaks, ★ = peak frame)', width / 2, 48);
334
+
335
+ // Calculate alignments
336
+ const alignments = [];
337
+ for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
338
+ const data = attentionData[wordIdx];
339
+ const alignment = calculateAlignment(data.weights, peakThreshold);
340
+ alignment.word = data.word;
341
+ alignment.wordIdx = wordIdx;
342
+ alignment.weights = data.weights;
343
+ alignments.push(alignment);
344
+ }
345
+
346
+ // Draw grid
347
+ alignmentCtx.strokeStyle = '#e0e0e0';
348
+ alignmentCtx.lineWidth = 0.5;
349
+ for (let i = 0; i <= numFeatures; i++) {
350
+ const x = leftMargin + i * featureWidth;
351
+ alignmentCtx.beginPath();
352
+ alignmentCtx.moveTo(x, topMargin);
353
+ alignmentCtx.lineTo(x, topMargin + plotHeight);
354
+ alignmentCtx.stroke();
355
+ }
356
+
357
+ // Draw word regions
358
+ for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
359
+ const alignment = alignments[wordIdx];
360
+ const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
361
+ const y = topMargin + wordIdx * rowHeight;
362
+
363
+ // Alpha based on confidence
364
+ const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
365
+
366
+ // Draw rectangle for word region
367
+ const startX = leftMargin + alignment.startIdx * featureWidth;
368
+ const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
369
+
370
+ alignmentCtx.fillStyle = colors[wordIdx % 20];
371
+ alignmentCtx.globalAlpha = alpha;
372
+ alignmentCtx.fillRect(startX, y, rectWidth, rowHeight * 0.8);
373
+ alignmentCtx.globalAlpha = 1.0;
374
+
375
+ // Draw border
376
+ alignmentCtx.strokeStyle = '#000';
377
+ alignmentCtx.lineWidth = 2;
378
+ alignmentCtx.strokeRect(startX, y, rectWidth, rowHeight * 0.8);
379
+
380
+ // Draw attention waveform inside rectangle
381
+ alignmentCtx.strokeStyle = 'rgba(0, 0, 255, 0.8)';
382
+ alignmentCtx.lineWidth = 1.5;
383
+ alignmentCtx.beginPath();
384
+ for (let i = alignment.startIdx; i <= alignment.endIdx; i++) {
385
+ const x = leftMargin + i * featureWidth + featureWidth / 2;
386
+ const weight = alignment.weights[i];
387
+ const maxWeight = alignment.peakWeight;
388
+ const normalizedWeight = weight / (maxWeight * 1.2); // Scale for visibility
389
+ const waveY = y + rowHeight * 0.8 - (normalizedWeight * rowHeight * 0.6);
390
+
391
+ if (i === alignment.startIdx) {
392
+ alignmentCtx.moveTo(x, waveY);
393
+ } else {
394
+ alignmentCtx.lineTo(x, waveY);
395
+ }
396
+ }
397
+ alignmentCtx.stroke();
398
+
399
+ // Draw word label
400
+ const labelX = startX + rectWidth / 2;
401
+ const labelY = y + rowHeight * 0.4;
402
+
403
+ alignmentCtx.fillStyle = 'rgba(0, 0, 0, 0.7)';
404
+ alignmentCtx.fillRect(labelX - 60, labelY - 12, 120, 24);
405
+ alignmentCtx.fillStyle = '#fff';
406
+ alignmentCtx.font = 'bold 13px Arial';
407
+ alignmentCtx.textAlign = 'center';
408
+ alignmentCtx.textBaseline = 'middle';
409
+ alignmentCtx.fillText(alignment.word, labelX, labelY);
410
+
411
+ // Mark peak frame with star
412
+ const peakX = leftMargin + alignment.peakIdx * featureWidth + featureWidth / 2;
413
+ const peakY = y + rowHeight * 0.4;
414
+
415
+ // Draw star
416
+ alignmentCtx.fillStyle = '#ff0000';
417
+ alignmentCtx.strokeStyle = '#ffff00';
418
+ alignmentCtx.lineWidth = 1.5;
419
+ alignmentCtx.font = '20px Arial';
420
+ alignmentCtx.textAlign = 'center';
421
+ alignmentCtx.strokeText('★', peakX, peakY);
422
+ alignmentCtx.fillText('★', peakX, peakY);
423
+
424
+ // Y-axis label (word names)
425
+ alignmentCtx.fillStyle = '#333';
426
+ alignmentCtx.font = '12px Arial';
427
+ alignmentCtx.textAlign = 'right';
428
+ alignmentCtx.textBaseline = 'middle';
429
+ alignmentCtx.fillText(alignment.word, leftMargin - 10, y + rowHeight * 0.4);
430
+ }
431
+
432
+ // Draw horizontal grid lines
433
+ alignmentCtx.strokeStyle = '#ccc';
434
+ alignmentCtx.lineWidth = 0.5;
435
+ for (let i = 0; i <= numGlosses; i++) {
436
+ const y = topMargin + i * rowHeight;
437
+ alignmentCtx.beginPath();
438
+ alignmentCtx.moveTo(leftMargin, y);
439
+ alignmentCtx.lineTo(leftMargin + plotWidth, y);
440
+ alignmentCtx.stroke();
441
+ }
442
+
443
+ // Draw axes
444
+ alignmentCtx.strokeStyle = '#000';
445
+ alignmentCtx.lineWidth = 2;
446
+ alignmentCtx.strokeRect(leftMargin, topMargin, plotWidth, plotHeight);
447
+
448
+ // X-axis labels (frame indices)
449
+ alignmentCtx.fillStyle = '#000';
450
+ alignmentCtx.font = '11px Arial';
451
+ alignmentCtx.textAlign = 'center';
452
+ alignmentCtx.textBaseline = 'top';
453
+ for (let i = 0; i < numFeatures; i++) {
454
+ const x = leftMargin + i * featureWidth + featureWidth / 2;
455
+ alignmentCtx.fillText(i.toString(), x, topMargin + plotHeight + 10);
456
+ }
457
+
458
+ // Axis titles
459
+ alignmentCtx.fillStyle = '#333';
460
+ alignmentCtx.font = 'bold 14px Arial';
461
+ alignmentCtx.textAlign = 'center';
462
+ alignmentCtx.fillText('Feature Frame Index', leftMargin + plotWidth / 2, height - 20);
463
+
464
+ alignmentCtx.save();
465
+ alignmentCtx.translate(30, topMargin + plotHeight / 2);
466
+ alignmentCtx.rotate(-Math.PI / 2);
467
+ alignmentCtx.fillText('Generated Word', 0, 0);
468
+ alignmentCtx.restore();
469
+
470
+ return alignments;
471
+ }
472
+
473
+ function drawTimeline(alignments) {
474
+ const highThreshold = parseInt(confidenceHighSlider.value) / 100;
475
+ const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
476
+
477
+ const width = timelineCanvas.width;
478
+ const height = timelineCanvas.height;
479
+ const leftMargin = 180;
480
+ const rightMargin = 50;
481
+ const plotWidth = width - leftMargin - rightMargin;
482
+ const featureWidth = plotWidth / numFeatures;
483
+
484
+ // Clear canvas
485
+ timelineCtx.clearRect(0, 0, width, height);
486
+
487
+ // Background bar
488
+ timelineCtx.fillStyle = '#ddd';
489
+ timelineCtx.fillRect(leftMargin, 30, plotWidth, 40);
490
+ timelineCtx.strokeStyle = '#000';
491
+ timelineCtx.lineWidth = 2;
492
+ timelineCtx.strokeRect(leftMargin, 30, plotWidth, 40);
493
+
494
+ // Draw word regions on timeline
495
+ for (let wordIdx = 0; wordIdx < alignments.length; wordIdx++) {
496
+ const alignment = alignments[wordIdx];
497
+ const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
498
+ const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
499
+
500
+ const startX = leftMargin + alignment.startIdx * featureWidth;
501
+ const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
502
+
503
+ timelineCtx.fillStyle = colors[wordIdx % 20];
504
+ timelineCtx.globalAlpha = alpha;
505
+ timelineCtx.fillRect(startX, 30, rectWidth, 40);
506
+ timelineCtx.globalAlpha = 1.0;
507
+ timelineCtx.strokeStyle = '#000';
508
+ timelineCtx.lineWidth = 0.5;
509
+ timelineCtx.strokeRect(startX, 30, rectWidth, 40);
510
+ }
511
+
512
+ // Title
513
+ timelineCtx.fillStyle = '#333';
514
+ timelineCtx.font = 'bold 13px Arial';
515
+ timelineCtx.textAlign = 'left';
516
+ timelineCtx.fillText('Timeline Progress Bar', leftMargin, 20);
517
+ }
518
+
519
+ function updateDetailsPanel(alignments, highThreshold, mediumThreshold) {
520
+ const panel = document.getElementById('alignment-details');
521
+ let html = '<table style="width: 100%; border-collapse: collapse;">';
522
+ html += '<tr style="background: #f0f0f0; font-weight: bold;">';
523
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Word</th>';
524
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Feature Range</th>';
525
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Peak</th>';
526
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Span</th>';
527
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Avg Attention</th>';
528
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Confidence</th>';
529
+ html += '</tr>';
530
+
531
+ for (const align of alignments) {
532
+ const confidence = getConfidenceLevel(align.avgWeight, highThreshold, mediumThreshold);
533
+ const span = align.endIdx - align.startIdx + 1;
534
+
535
+ html += '<tr>';
536
+ html += `<td style="padding: 8px; border: 1px solid #ddd;"><strong>${align.word}</strong></td>`;
537
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.startIdx} → ${align.endIdx}</td>`;
538
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.peakIdx}</td>`;
539
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${span}</td>`;
540
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.avgWeight.toFixed(4)}</td>`;
541
+ html += `<td style="padding: 8px; border: 1px solid #ddd;"><span class="confidence ${confidence}">${confidence}</span></td>`;
542
+ html += '</tr>';
543
+ }
544
+
545
+ html += '</table>';
546
+ panel.innerHTML = html;
547
+ }
548
+
549
+ function updateVisualization() {
550
+ const alignments = drawAlignmentChart();
551
+ drawTimeline(alignments);
552
+ const highThreshold = parseInt(confidenceHighSlider.value) / 100;
553
+ const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
554
+ updateDetailsPanel(alignments, highThreshold, mediumThreshold);
555
+ }
556
+
557
+ // Event listeners for sliders
558
+ peakSlider.addEventListener('input', function() {
559
+ peakValue.textContent = peakSlider.value + '%';
560
+ updateVisualization();
561
+ });
562
+
563
+ confidenceHighSlider.addEventListener('input', function() {
564
+ const val = parseInt(confidenceHighSlider.value) / 100;
565
+ confidenceHighValue.textContent = val.toFixed(2);
566
+ updateVisualization();
567
+ });
568
+
569
+ confidenceMediumSlider.addEventListener('input', function() {
570
+ const val = parseInt(confidenceMediumSlider.value) / 100;
571
+ confidenceMediumValue.textContent = val.toFixed(2);
572
+ updateVisualization();
573
+ });
574
+
575
+ // Initial visualization
576
+ updateVisualization();
577
+ </script>
578
+ </body>
579
+ </html>
SignX/inference_output/detailed_prediction_20260102_202142/171921/translation.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ With BPE: IX-1p NOT LIKE PINEAPPLE fs-@@ CREA@@ M@@ +@@ CHEESE IX-1p
2
+ Clean: IX-1p NOT LIKE PINEAPPLE fs-CREAM+CHEESE IX-1p
3
+ Ground Truth: IX-1p NOT LIKE PINEAPPLE fs-CREAM+CHEESE IX-1p
SignX/inference_output/detailed_prediction_20260102_202302/173238/173238.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48c605e2c0dbd04fe25871a7b2e270615d02a198567f3f903c3ae7da68bcf8ca
3
+ size 98921
SignX/inference_output/detailed_prediction_20260102_202302/173238/analysis_report.txt ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ================================================================================
2
+ Sign Language Recognition - Attention Analysis Report
3
+ ================================================================================
4
+
5
+ Generated at: 2026-01-02 20:23:07
6
+
7
+ Translation:
8
+ --------------------------------------------------------------------------------
9
+ #NO IX-1p USE CORRECT FOR
10
+
11
+ Video info:
12
+ --------------------------------------------------------------------------------
13
+ Total feature frames: 25
14
+ Word count: 5
15
+
16
+ Attention tensor:
17
+ --------------------------------------------------------------------------------
18
+ Shape: (24, 25)
19
+ - Decoder steps: 24
20
+
21
+ Word-to-frame details:
22
+ ================================================================================
23
+ No. Word Frames Peak Attn Conf
24
+ --------------------------------------------------------------------------------
25
+ 1 #NO 7-7 7 0.532 high
26
+ 2 IX-1p 0-7 7 0.116 low
27
+ 3 USE 0-24 24 0.153 low
28
+ 4 CORRECT 11-11 11 0.336 medium
29
+ 5 FOR 12-13 12 0.427 medium
30
+
31
+ ================================================================================
32
+
33
+ Summary:
34
+ --------------------------------------------------------------------------------
35
+ Average attention weight: 0.313
36
+ High-confidence words: 1 (20.0%)
37
+ Medium-confidence words: 2 (40.0%)
38
+ Low-confidence words: 2 (40.0%)
39
+
40
+ ================================================================================
SignX/inference_output/detailed_prediction_20260102_202302/173238/attention_heatmap.pdf ADDED
Binary file (32.5 kB). View file
 
SignX/inference_output/detailed_prediction_20260102_202302/173238/attention_heatmap.png ADDED

Git LFS Details

  • SHA256: 22cbf0b19e395095f7efff01f0e01b1d41fa348249ac4457a4e135a692369e04
  • Pointer size: 130 Bytes
  • Size of remote file: 72.2 kB
SignX/inference_output/detailed_prediction_20260102_202302/173238/attention_keyframes/keyframes_index.txt ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Attention Keyframe Index
2
+ ============================================================
3
+
4
+ Sample directory: /research/cbim/vast/sf895/code/Sign-X/output/huggingface_asllrp_repo/SignX/inference_output/detailed_prediction_20260102_202302/173238
5
+ Video path: /common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/good_videos/173238.mp4
6
+ Total keyframes: 24
7
+
8
+ Keyframe list:
9
+ ------------------------------------------------------------
10
+ Gloss 0: keyframe_000_feat7_frame28_att0.532.jpg
11
+ Gloss 1: keyframe_001_feat7_frame28_att0.120.jpg
12
+ Gloss 2: keyframe_002_feat24_frame92_att0.156.jpg
13
+ Gloss 3: keyframe_003_feat11_frame43_att0.336.jpg
14
+ Gloss 4: keyframe_004_feat12_frame46_att0.448.jpg
15
+ Gloss 5: keyframe_005_feat15_frame58_att0.293.jpg
16
+ Gloss 6: keyframe_006_feat15_frame58_att0.212.jpg
17
+ Gloss 7: keyframe_007_feat16_frame61_att0.140.jpg
18
+ Gloss 8: keyframe_008_feat22_frame84_att0.093.jpg
19
+ Gloss 9: keyframe_009_feat16_frame61_att0.149.jpg
20
+ Gloss 10: keyframe_010_feat22_frame84_att0.095.jpg
21
+ Gloss 11: keyframe_011_feat20_frame76_att0.127.jpg
22
+ Gloss 12: keyframe_012_feat21_frame80_att0.131.jpg
23
+ Gloss 13: keyframe_013_feat1_frame5_att0.101.jpg
24
+ Gloss 14: keyframe_014_feat22_frame84_att0.113.jpg
25
+ Gloss 15: keyframe_015_feat22_frame84_att0.122.jpg
26
+ Gloss 16: keyframe_016_feat22_frame84_att0.122.jpg
27
+ Gloss 17: keyframe_017_feat22_frame84_att0.139.jpg
28
+ Gloss 18: keyframe_018_feat1_frame5_att0.119.jpg
29
+ Gloss 19: keyframe_019_feat22_frame84_att0.120.jpg
30
+ Gloss 20: keyframe_020_feat22_frame84_att0.112.jpg
31
+ Gloss 21: keyframe_021_feat1_frame5_att0.118.jpg
32
+ Gloss 22: keyframe_022_feat1_frame5_att0.121.jpg
33
+ Gloss 23: keyframe_023_feat22_frame84_att0.124.jpg
SignX/inference_output/detailed_prediction_20260102_202302/173238/attention_weights.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7433f9d84fa778550beaf67774d607f501c246976980d5c06143af061d2a5fbf
3
+ size 2528
SignX/inference_output/detailed_prediction_20260102_202302/173238/debug_video_path.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ video_path = '/common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/good_videos/173238.mp4'
2
+ video_path type = <class 'str'>
3
+ video_path is None: False
4
+ bool(video_path): True
SignX/inference_output/detailed_prediction_20260102_202302/173238/feature_frame_mapping.json ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "original_frame_count": 94,
3
+ "feature_count": 25,
4
+ "downsampling_ratio": 3.76,
5
+ "fps": 24.0,
6
+ "mapping": [
7
+ {
8
+ "feature_index": 0,
9
+ "frame_start": 0,
10
+ "frame_end": 3,
11
+ "frame_count": 3
12
+ },
13
+ {
14
+ "feature_index": 1,
15
+ "frame_start": 3,
16
+ "frame_end": 7,
17
+ "frame_count": 4
18
+ },
19
+ {
20
+ "feature_index": 2,
21
+ "frame_start": 7,
22
+ "frame_end": 11,
23
+ "frame_count": 4
24
+ },
25
+ {
26
+ "feature_index": 3,
27
+ "frame_start": 11,
28
+ "frame_end": 15,
29
+ "frame_count": 4
30
+ },
31
+ {
32
+ "feature_index": 4,
33
+ "frame_start": 15,
34
+ "frame_end": 18,
35
+ "frame_count": 3
36
+ },
37
+ {
38
+ "feature_index": 5,
39
+ "frame_start": 18,
40
+ "frame_end": 22,
41
+ "frame_count": 4
42
+ },
43
+ {
44
+ "feature_index": 6,
45
+ "frame_start": 22,
46
+ "frame_end": 26,
47
+ "frame_count": 4
48
+ },
49
+ {
50
+ "feature_index": 7,
51
+ "frame_start": 26,
52
+ "frame_end": 30,
53
+ "frame_count": 4
54
+ },
55
+ {
56
+ "feature_index": 8,
57
+ "frame_start": 30,
58
+ "frame_end": 33,
59
+ "frame_count": 3
60
+ },
61
+ {
62
+ "feature_index": 9,
63
+ "frame_start": 33,
64
+ "frame_end": 37,
65
+ "frame_count": 4
66
+ },
67
+ {
68
+ "feature_index": 10,
69
+ "frame_start": 37,
70
+ "frame_end": 41,
71
+ "frame_count": 4
72
+ },
73
+ {
74
+ "feature_index": 11,
75
+ "frame_start": 41,
76
+ "frame_end": 45,
77
+ "frame_count": 4
78
+ },
79
+ {
80
+ "feature_index": 12,
81
+ "frame_start": 45,
82
+ "frame_end": 48,
83
+ "frame_count": 3
84
+ },
85
+ {
86
+ "feature_index": 13,
87
+ "frame_start": 48,
88
+ "frame_end": 52,
89
+ "frame_count": 4
90
+ },
91
+ {
92
+ "feature_index": 14,
93
+ "frame_start": 52,
94
+ "frame_end": 56,
95
+ "frame_count": 4
96
+ },
97
+ {
98
+ "feature_index": 15,
99
+ "frame_start": 56,
100
+ "frame_end": 60,
101
+ "frame_count": 4
102
+ },
103
+ {
104
+ "feature_index": 16,
105
+ "frame_start": 60,
106
+ "frame_end": 63,
107
+ "frame_count": 3
108
+ },
109
+ {
110
+ "feature_index": 17,
111
+ "frame_start": 63,
112
+ "frame_end": 67,
113
+ "frame_count": 4
114
+ },
115
+ {
116
+ "feature_index": 18,
117
+ "frame_start": 67,
118
+ "frame_end": 71,
119
+ "frame_count": 4
120
+ },
121
+ {
122
+ "feature_index": 19,
123
+ "frame_start": 71,
124
+ "frame_end": 75,
125
+ "frame_count": 4
126
+ },
127
+ {
128
+ "feature_index": 20,
129
+ "frame_start": 75,
130
+ "frame_end": 78,
131
+ "frame_count": 3
132
+ },
133
+ {
134
+ "feature_index": 21,
135
+ "frame_start": 78,
136
+ "frame_end": 82,
137
+ "frame_count": 4
138
+ },
139
+ {
140
+ "feature_index": 22,
141
+ "frame_start": 82,
142
+ "frame_end": 86,
143
+ "frame_count": 4
144
+ },
145
+ {
146
+ "feature_index": 23,
147
+ "frame_start": 86,
148
+ "frame_end": 90,
149
+ "frame_count": 4
150
+ },
151
+ {
152
+ "feature_index": 24,
153
+ "frame_start": 90,
154
+ "frame_end": 94,
155
+ "frame_count": 4
156
+ }
157
+ ]
158
+ }
SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "translation": "#NO IX-1p USE CORRECT FOR",
3
+ "words": [
4
+ "#NO",
5
+ "IX-1p",
6
+ "USE",
7
+ "CORRECT",
8
+ "FOR"
9
+ ],
10
+ "total_video_frames": 25,
11
+ "frame_ranges": [
12
+ {
13
+ "word": "#NO",
14
+ "start_frame": 7,
15
+ "end_frame": 7,
16
+ "peak_frame": 7,
17
+ "avg_attention": 0.5316407680511475,
18
+ "confidence": "high"
19
+ },
20
+ {
21
+ "word": "IX-1p",
22
+ "start_frame": 0,
23
+ "end_frame": 7,
24
+ "peak_frame": 7,
25
+ "avg_attention": 0.1155116930603981,
26
+ "confidence": "low"
27
+ },
28
+ {
29
+ "word": "USE",
30
+ "start_frame": 0,
31
+ "end_frame": 24,
32
+ "peak_frame": 24,
33
+ "avg_attention": 0.15302085876464844,
34
+ "confidence": "low"
35
+ },
36
+ {
37
+ "word": "CORRECT",
38
+ "start_frame": 11,
39
+ "end_frame": 11,
40
+ "peak_frame": 11,
41
+ "avg_attention": 0.33560681343078613,
42
+ "confidence": "medium"
43
+ },
44
+ {
45
+ "word": "FOR",
46
+ "start_frame": 12,
47
+ "end_frame": 13,
48
+ "peak_frame": 12,
49
+ "avg_attention": 0.42703279852867126,
50
+ "confidence": "medium"
51
+ }
52
+ ],
53
+ "statistics": {
54
+ "avg_confidence": 0.3125625863671303,
55
+ "high_confidence_words": 1,
56
+ "medium_confidence_words": 2,
57
+ "low_confidence_words": 2
58
+ }
59
+ }
SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment.pdf ADDED
Binary file (29.3 kB). View file
 
SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment.png ADDED

Git LFS Details

  • SHA256: 385a309d656ec1a77284d4506538afacde5f69c4059501abcf5eb4207c39eb6f
  • Pointer size: 131 Bytes
  • Size of remote file: 133 kB
SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment_short.pdf ADDED
Binary file (29.2 kB). View file
 
SignX/inference_output/detailed_prediction_20260102_202302/173238/frame_alignment_short.png ADDED

Git LFS Details

  • SHA256: 9b722bf4d13e172a4cf3f069730a5cd2f8a150f2cb93b0415dc0e4418f7c4303
  • Pointer size: 131 Bytes
  • Size of remote file: 133 kB
SignX/inference_output/detailed_prediction_20260102_202302/173238/gloss_to_frames.png ADDED

Git LFS Details

  • SHA256: ca42cfa9ef08b02a6198c96457d7bbc3102bdefd4d96d08ad3d439e4a6560593
  • Pointer size: 132 Bytes
  • Size of remote file: 2.1 MB
SignX/inference_output/detailed_prediction_20260102_202302/173238/interactive_alignment.html ADDED
@@ -0,0 +1,579 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Interactive Word-Frame Alignment</title>
7
+ <style>
8
+ body {
9
+ font-family: 'Arial', sans-serif;
10
+ margin: 20px;
11
+ background-color: #f5f5f5;
12
+ }
13
+ .container {
14
+ max-width: 1800px;
15
+ margin: 0 auto;
16
+ background-color: white;
17
+ padding: 30px;
18
+ border-radius: 8px;
19
+ box-shadow: 0 2px 10px rgba(0,0,0,0.1);
20
+ }
21
+ h1 {
22
+ color: #333;
23
+ border-bottom: 3px solid #4CAF50;
24
+ padding-bottom: 10px;
25
+ margin-bottom: 20px;
26
+ }
27
+ .stats {
28
+ background-color: #E3F2FD;
29
+ padding: 15px;
30
+ border-radius: 5px;
31
+ margin-bottom: 20px;
32
+ border-left: 4px solid #2196F3;
33
+ font-size: 14px;
34
+ }
35
+ .controls {
36
+ background-color: #f9f9f9;
37
+ padding: 20px;
38
+ border-radius: 5px;
39
+ margin-bottom: 30px;
40
+ border: 1px solid #ddd;
41
+ }
42
+ .control-group {
43
+ margin-bottom: 15px;
44
+ }
45
+ label {
46
+ font-weight: bold;
47
+ display: inline-block;
48
+ width: 250px;
49
+ color: #555;
50
+ }
51
+ input[type="range"] {
52
+ width: 400px;
53
+ vertical-align: middle;
54
+ }
55
+ .value-display {
56
+ display: inline-block;
57
+ width: 80px;
58
+ font-family: monospace;
59
+ font-size: 14px;
60
+ color: #2196F3;
61
+ font-weight: bold;
62
+ }
63
+ .reset-btn {
64
+ margin-top: 15px;
65
+ padding: 10px 25px;
66
+ background-color: #2196F3;
67
+ color: white;
68
+ border: none;
69
+ border-radius: 5px;
70
+ cursor: pointer;
71
+ font-size: 14px;
72
+ font-weight: bold;
73
+ }
74
+ .reset-btn:hover {
75
+ background-color: #1976D2;
76
+ }
77
+ canvas {
78
+ border: 1px solid #999;
79
+ display: block;
80
+ margin: 20px auto;
81
+ background: white;
82
+ }
83
+ .legend {
84
+ margin-top: 20px;
85
+ padding: 15px;
86
+ background-color: #fff;
87
+ border: 1px solid #ddd;
88
+ border-radius: 5px;
89
+ }
90
+ .legend-item {
91
+ display: inline-block;
92
+ margin-right: 25px;
93
+ font-size: 13px;
94
+ margin-bottom: 10px;
95
+ }
96
+ .color-box {
97
+ display: inline-block;
98
+ width: 30px;
99
+ height: 15px;
100
+ margin-right: 8px;
101
+ vertical-align: middle;
102
+ border: 1px solid #666;
103
+ }
104
+ .info-panel {
105
+ margin-top: 20px;
106
+ padding: 15px;
107
+ background-color: #f9f9f9;
108
+ border-radius: 5px;
109
+ border: 1px solid #ddd;
110
+ }
111
+ .confidence {
112
+ display: inline-block;
113
+ padding: 3px 10px;
114
+ border-radius: 10px;
115
+ font-weight: bold;
116
+ font-size: 11px;
117
+ text-transform: uppercase;
118
+ }
119
+ .confidence.high {
120
+ background-color: #4CAF50;
121
+ color: white;
122
+ }
123
+ .confidence.medium {
124
+ background-color: #FF9800;
125
+ color: white;
126
+ }
127
+ .confidence.low {
128
+ background-color: #f44336;
129
+ color: white;
130
+ }
131
+ </style>
132
+ </head>
133
+ <body>
134
+ <div class="container">
135
+ <h1>🎯 Interactive Word-to-Frame Alignment Visualizer</h1>
136
+
137
+ <div class="stats">
138
+ <strong>Translation:</strong> #NO IX-1p USE CORRECT FOR<br>
139
+ <strong>Total Words:</strong> 5 |
140
+ <strong>Total Features:</strong> 25
141
+ </div>
142
+
143
+ <div class="controls">
144
+ <h3>⚙️ Threshold Controls</h3>
145
+
146
+ <div class="control-group">
147
+ <label for="peak-threshold">Peak Threshold (% of max):</label>
148
+ <input type="range" id="peak-threshold" min="1" max="100" value="90" step="1">
149
+ <span class="value-display" id="peak-threshold-value">90%</span>
150
+ <br>
151
+ <small style="margin-left: 255px; color: #666;">
152
+ A frame is considered “significant” if its attention ≥ (peak × threshold%)
153
+ </small>
154
+ </div>
155
+
156
+ <div class="control-group">
157
+ <label for="confidence-high">High Confidence (avg attn >):</label>
158
+ <input type="range" id="confidence-high" min="0" max="100" value="50" step="1">
159
+ <span class="value-display" id="confidence-high-value">0.50</span>
160
+ </div>
161
+
162
+ <div class="control-group">
163
+ <label for="confidence-medium">Medium Confidence (avg attn >):</label>
164
+ <input type="range" id="confidence-medium" min="0" max="100" value="20" step="1">
165
+ <span class="value-display" id="confidence-medium-value">0.20</span>
166
+ </div>
167
+
168
+ <button class="reset-btn" onclick="resetDefaults()">
169
+ Reset to Defaults
170
+ </button>
171
+ </div>
172
+
173
+ <div>
174
+ <h3>Word-to-Frame Alignment</h3>
175
+ <p style="color: #666; font-size: 13px;">
176
+ Each word appears as a colored block. Width = frame span, ★ = peak frame, waveform = attention trace.
177
+ </p>
178
+ <canvas id="alignment-canvas" width="1600" height="600"></canvas>
179
+
180
+ <h3 style="margin-top: 30px;">Timeline Progress Bar</h3>
181
+ <canvas id="timeline-canvas" width="1600" height="100"></canvas>
182
+
183
+ <div class="legend">
184
+ <strong>Legend:</strong><br><br>
185
+ <div class="legend-item">
186
+ <span class="confidence high">High</span>
187
+ <span class="confidence medium">Medium</span>
188
+ <span class="confidence low">Low</span>
189
+ Confidence Levels (opacity reflects confidence)
190
+ </div>
191
+ <div class="legend-item">
192
+ <span style="color: red; font-size: 20px;">★</span>
193
+ Peak Frame (highest attention)
194
+ </div>
195
+ <div class="legend-item">
196
+ <span style="color: blue;">━</span>
197
+ Attention Waveform (within word region)
198
+ </div>
199
+ </div>
200
+ </div>
201
+
202
+ <div class="info-panel">
203
+ <h3>Alignment Details</h3>
204
+ <div id="alignment-details"></div>
205
+ </div>
206
+ </div>
207
+
208
+ <script>
209
+ // Attention data from Python
210
+ const attentionData = [{"word": "#NO", "word_idx": 0, "weights": [0.012782563455402851, 0.011981594376266003, 0.01150900311768055, 0.010720917023718357, 0.009659628383815289, 0.015060730278491974, 0.22624146938323975, 0.5316407680511475, 0.13897353410720825, 0.009811749681830406, 0.005086212884634733, 0.006459908559918404, 0.0025683066342025995, 0.0020257264841347933, 0.0009967696387320757, 0.0005196572747081518, 0.0011819832725450397, 0.00012239675561431795, 3.519998426781967e-05, 9.520346793578938e-05, 0.00018389770411886275, 0.0002500070841051638, 0.00037205920671112835, 0.000725841848179698, 0.0009948475053533912]}, {"word": "IX-1p", "word_idx": 1, "weights": [0.1154966726899147, 0.11839839816093445, 0.10858778655529022, 0.09947863966226578, 0.09086362272500992, 0.09146849811077118, 0.07603903114795685, 0.11956391483545303, 0.07710319012403488, 0.019207479432225227, 0.00366093497723341, 0.0016571247251704335, 0.004489341285079718, 0.002439837669953704, 0.001201692852191627, 0.0005549773923121393, 0.0009384832228533924, 0.0005504037253558636, 0.0003465348854660988, 0.0008819004287943244, 0.0016493259463459253, 0.0022654831409454346, 0.00884854607284069, 0.023778975009918213, 0.030529193580150604]}, {"word": "USE", "word_idx": 2, "weights": [0.14988766610622406, 0.12994979321956635, 0.10789304971694946, 0.08856625854969025, 0.07732779532670975, 0.04263059422373772, 0.004822988994419575, 0.00905913207679987, 0.015443230979144573, 0.03147038072347641, 0.014399666339159012, 0.008924762718379498, 0.00825969036668539, 0.003495264332741499, 0.0015463761519640684, 0.0006412258371710777, 0.0011047126026824117, 0.0005361629882827401, 0.0005916748195886612, 0.0014564881566911936, 0.002115124138072133, 0.002776387147605419, 0.021915478631854057, 0.1190321147441864, 0.15615403652191162]}, {"word": "CORRECT", "word_idx": 3, "weights": [0.06937894970178604, 0.051489513367414474, 0.0445125512778759, 0.03950398042798042, 0.037877995520830154, 0.02452961727976799, 0.007906158454716206, 0.016188515350222588, 0.02675747498869896, 0.07620300352573395, 0.19777104258537292, 0.33560681343078613, 0.005275052040815353, 0.005951147526502609, 0.007349252700805664, 0.004849528893828392, 0.007685650140047073, 0.001965776551514864, 0.0007187770679593086, 0.0009715624619275331, 0.0008626979542896152, 0.0013258870458230376, 0.005855972412973642, 0.013570844195783138, 0.01589221879839897]}, {"word": "FOR", "word_idx": 4, "weights": [0.00033176588476635516, 0.00020028719154652208, 0.0001781086903065443, 0.00017031899187713861, 0.00017076355288736522, 0.00019543150847312063, 0.0007411232218146324, 0.0010241027921438217, 0.0017877332866191864, 0.00046761787962168455, 0.0013574255863204598, 0.006706785876303911, 0.44760289788246155, 0.406462699174881, 0.06950433552265167, 0.02205367386341095, 0.03949587419629097, 0.0011918380623683333, 7.672667561564595e-05, 9.517547732684761e-05, 5.0555227062432095e-05, 1.5771540347486734e-05, 1.0371734788350295e-05, 4.547540083876811e-05, 6.314207712421194e-05]}];
211
+ const numGlosses = 5;
212
+ const numFeatures = 25;
213
+
214
+ // Colors for different words (matching matplotlib tab20)
215
+ const colors = [
216
+ '#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd',
217
+ '#8c564b', '#e377c2', '#7f7f7f', '#bcbd22', '#17becf',
218
+ '#aec7e8', '#ffbb78', '#98df8a', '#ff9896', '#c5b0d5',
219
+ '#c49c94', '#f7b6d2', '#c7c7c7', '#dbdb8d', '#9edae5'
220
+ ];
221
+
222
+ // Get controls
223
+ const peakThresholdSlider = document.getElementById('peak-threshold');
224
+ const peakThresholdValue = document.getElementById('peak-threshold-value');
225
+ const confidenceHighSlider = document.getElementById('confidence-high');
226
+ const confidenceHighValue = document.getElementById('confidence-high-value');
227
+ const confidenceMediumSlider = document.getElementById('confidence-medium');
228
+ const confidenceMediumValue = document.getElementById('confidence-medium-value');
229
+ const alignmentCanvas = document.getElementById('alignment-canvas');
230
+ const timelineCanvas = document.getElementById('timeline-canvas');
231
+ const alignmentCtx = alignmentCanvas.getContext('2d');
232
+ const timelineCtx = timelineCanvas.getContext('2d');
233
+
234
+ // Update displays when sliders change
235
+ peakThresholdSlider.oninput = function() {
236
+ peakThresholdValue.textContent = this.value + '%';
237
+ updateVisualization();
238
+ };
239
+
240
+ confidenceHighSlider.oninput = function() {
241
+ confidenceHighValue.textContent = (this.value / 100).toFixed(2);
242
+ updateVisualization();
243
+ };
244
+
245
+ confidenceMediumSlider.oninput = function() {
246
+ confidenceMediumValue.textContent = (this.value / 100).toFixed(2);
247
+ updateVisualization();
248
+ };
249
+
250
+ function resetDefaults() {
251
+ peakThresholdSlider.value = 90;
252
+ confidenceHighSlider.value = 50;
253
+ confidenceMediumSlider.value = 20;
254
+ peakThresholdValue.textContent = '90%';
255
+ confidenceHighValue.textContent = '0.50';
256
+ confidenceMediumValue.textContent = '0.20';
257
+ updateVisualization();
258
+ }
259
+
260
+ function calculateAlignment(weights, peakThreshold) {
261
+ // Find peak
262
+ let peakIdx = 0;
263
+ let peakWeight = weights[0];
264
+ for (let i = 1; i < weights.length; i++) {
265
+ if (weights[i] > peakWeight) {
266
+ peakWeight = weights[i];
267
+ peakIdx = i;
268
+ }
269
+ }
270
+
271
+ // Find significant frames
272
+ const threshold = peakWeight * (peakThreshold / 100);
273
+ let startIdx = peakIdx;
274
+ let endIdx = peakIdx;
275
+ let sumWeight = 0;
276
+ let count = 0;
277
+
278
+ for (let i = 0; i < weights.length; i++) {
279
+ if (weights[i] >= threshold) {
280
+ if (i < startIdx) startIdx = i;
281
+ if (i > endIdx) endIdx = i;
282
+ sumWeight += weights[i];
283
+ count++;
284
+ }
285
+ }
286
+
287
+ const avgWeight = count > 0 ? sumWeight / count : peakWeight;
288
+
289
+ return {
290
+ startIdx: startIdx,
291
+ endIdx: endIdx,
292
+ peakIdx: peakIdx,
293
+ peakWeight: peakWeight,
294
+ avgWeight: avgWeight,
295
+ threshold: threshold
296
+ };
297
+ }
298
+
299
+ function getConfidenceLevel(avgWeight, highThreshold, mediumThreshold) {
300
+ if (avgWeight > highThreshold) return 'high';
301
+ if (avgWeight > mediumThreshold) return 'medium';
302
+ return 'low';
303
+ }
304
+
305
+ function drawAlignmentChart() {
306
+ const peakThreshold = parseInt(peakThresholdSlider.value);
307
+ const highThreshold = parseInt(confidenceHighSlider.value) / 100;
308
+ const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
309
+
310
+ // Canvas dimensions
311
+ const width = alignmentCanvas.width;
312
+ const height = alignmentCanvas.height;
313
+ const leftMargin = 180;
314
+ const rightMargin = 50;
315
+ const topMargin = 60;
316
+ const bottomMargin = 80;
317
+
318
+ const plotWidth = width - leftMargin - rightMargin;
319
+ const plotHeight = height - topMargin - bottomMargin;
320
+
321
+ const rowHeight = plotHeight / numGlosses;
322
+ const featureWidth = plotWidth / numFeatures;
323
+
324
+ // Clear canvas
325
+ alignmentCtx.clearRect(0, 0, width, height);
326
+
327
+ // Draw title
328
+ alignmentCtx.fillStyle = '#333';
329
+ alignmentCtx.font = 'bold 18px Arial';
330
+ alignmentCtx.textAlign = 'center';
331
+ alignmentCtx.fillText('Word-to-Frame Alignment', width / 2, 30);
332
+ alignmentCtx.font = '13px Arial';
333
+ alignmentCtx.fillText('(based on attention peaks, ★ = peak frame)', width / 2, 48);
334
+
335
+ // Calculate alignments
336
+ const alignments = [];
337
+ for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
338
+ const data = attentionData[wordIdx];
339
+ const alignment = calculateAlignment(data.weights, peakThreshold);
340
+ alignment.word = data.word;
341
+ alignment.wordIdx = wordIdx;
342
+ alignment.weights = data.weights;
343
+ alignments.push(alignment);
344
+ }
345
+
346
+ // Draw grid
347
+ alignmentCtx.strokeStyle = '#e0e0e0';
348
+ alignmentCtx.lineWidth = 0.5;
349
+ for (let i = 0; i <= numFeatures; i++) {
350
+ const x = leftMargin + i * featureWidth;
351
+ alignmentCtx.beginPath();
352
+ alignmentCtx.moveTo(x, topMargin);
353
+ alignmentCtx.lineTo(x, topMargin + plotHeight);
354
+ alignmentCtx.stroke();
355
+ }
356
+
357
+ // Draw word regions
358
+ for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
359
+ const alignment = alignments[wordIdx];
360
+ const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
361
+ const y = topMargin + wordIdx * rowHeight;
362
+
363
+ // Alpha based on confidence
364
+ const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
365
+
366
+ // Draw rectangle for word region
367
+ const startX = leftMargin + alignment.startIdx * featureWidth;
368
+ const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
369
+
370
+ alignmentCtx.fillStyle = colors[wordIdx % 20];
371
+ alignmentCtx.globalAlpha = alpha;
372
+ alignmentCtx.fillRect(startX, y, rectWidth, rowHeight * 0.8);
373
+ alignmentCtx.globalAlpha = 1.0;
374
+
375
+ // Draw border
376
+ alignmentCtx.strokeStyle = '#000';
377
+ alignmentCtx.lineWidth = 2;
378
+ alignmentCtx.strokeRect(startX, y, rectWidth, rowHeight * 0.8);
379
+
380
+ // Draw attention waveform inside rectangle
381
+ alignmentCtx.strokeStyle = 'rgba(0, 0, 255, 0.8)';
382
+ alignmentCtx.lineWidth = 1.5;
383
+ alignmentCtx.beginPath();
384
+ for (let i = alignment.startIdx; i <= alignment.endIdx; i++) {
385
+ const x = leftMargin + i * featureWidth + featureWidth / 2;
386
+ const weight = alignment.weights[i];
387
+ const maxWeight = alignment.peakWeight;
388
+ const normalizedWeight = weight / (maxWeight * 1.2); // Scale for visibility
389
+ const waveY = y + rowHeight * 0.8 - (normalizedWeight * rowHeight * 0.6);
390
+
391
+ if (i === alignment.startIdx) {
392
+ alignmentCtx.moveTo(x, waveY);
393
+ } else {
394
+ alignmentCtx.lineTo(x, waveY);
395
+ }
396
+ }
397
+ alignmentCtx.stroke();
398
+
399
+ // Draw word label
400
+ const labelX = startX + rectWidth / 2;
401
+ const labelY = y + rowHeight * 0.4;
402
+
403
+ alignmentCtx.fillStyle = 'rgba(0, 0, 0, 0.7)';
404
+ alignmentCtx.fillRect(labelX - 60, labelY - 12, 120, 24);
405
+ alignmentCtx.fillStyle = '#fff';
406
+ alignmentCtx.font = 'bold 13px Arial';
407
+ alignmentCtx.textAlign = 'center';
408
+ alignmentCtx.textBaseline = 'middle';
409
+ alignmentCtx.fillText(alignment.word, labelX, labelY);
410
+
411
+ // Mark peak frame with star
412
+ const peakX = leftMargin + alignment.peakIdx * featureWidth + featureWidth / 2;
413
+ const peakY = y + rowHeight * 0.4;
414
+
415
+ // Draw star
416
+ alignmentCtx.fillStyle = '#ff0000';
417
+ alignmentCtx.strokeStyle = '#ffff00';
418
+ alignmentCtx.lineWidth = 1.5;
419
+ alignmentCtx.font = '20px Arial';
420
+ alignmentCtx.textAlign = 'center';
421
+ alignmentCtx.strokeText('★', peakX, peakY);
422
+ alignmentCtx.fillText('★', peakX, peakY);
423
+
424
+ // Y-axis label (word names)
425
+ alignmentCtx.fillStyle = '#333';
426
+ alignmentCtx.font = '12px Arial';
427
+ alignmentCtx.textAlign = 'right';
428
+ alignmentCtx.textBaseline = 'middle';
429
+ alignmentCtx.fillText(alignment.word, leftMargin - 10, y + rowHeight * 0.4);
430
+ }
431
+
432
+ // Draw horizontal grid lines
433
+ alignmentCtx.strokeStyle = '#ccc';
434
+ alignmentCtx.lineWidth = 0.5;
435
+ for (let i = 0; i <= numGlosses; i++) {
436
+ const y = topMargin + i * rowHeight;
437
+ alignmentCtx.beginPath();
438
+ alignmentCtx.moveTo(leftMargin, y);
439
+ alignmentCtx.lineTo(leftMargin + plotWidth, y);
440
+ alignmentCtx.stroke();
441
+ }
442
+
443
+ // Draw axes
444
+ alignmentCtx.strokeStyle = '#000';
445
+ alignmentCtx.lineWidth = 2;
446
+ alignmentCtx.strokeRect(leftMargin, topMargin, plotWidth, plotHeight);
447
+
448
+ // X-axis labels (frame indices)
449
+ alignmentCtx.fillStyle = '#000';
450
+ alignmentCtx.font = '11px Arial';
451
+ alignmentCtx.textAlign = 'center';
452
+ alignmentCtx.textBaseline = 'top';
453
+ for (let i = 0; i < numFeatures; i++) {
454
+ const x = leftMargin + i * featureWidth + featureWidth / 2;
455
+ alignmentCtx.fillText(i.toString(), x, topMargin + plotHeight + 10);
456
+ }
457
+
458
+ // Axis titles
459
+ alignmentCtx.fillStyle = '#333';
460
+ alignmentCtx.font = 'bold 14px Arial';
461
+ alignmentCtx.textAlign = 'center';
462
+ alignmentCtx.fillText('Feature Frame Index', leftMargin + plotWidth / 2, height - 20);
463
+
464
+ alignmentCtx.save();
465
+ alignmentCtx.translate(30, topMargin + plotHeight / 2);
466
+ alignmentCtx.rotate(-Math.PI / 2);
467
+ alignmentCtx.fillText('Generated Word', 0, 0);
468
+ alignmentCtx.restore();
469
+
470
+ return alignments;
471
+ }
472
+
473
+ function drawTimeline(alignments) {
474
+ const highThreshold = parseInt(confidenceHighSlider.value) / 100;
475
+ const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
476
+
477
+ const width = timelineCanvas.width;
478
+ const height = timelineCanvas.height;
479
+ const leftMargin = 180;
480
+ const rightMargin = 50;
481
+ const plotWidth = width - leftMargin - rightMargin;
482
+ const featureWidth = plotWidth / numFeatures;
483
+
484
+ // Clear canvas
485
+ timelineCtx.clearRect(0, 0, width, height);
486
+
487
+ // Background bar
488
+ timelineCtx.fillStyle = '#ddd';
489
+ timelineCtx.fillRect(leftMargin, 30, plotWidth, 40);
490
+ timelineCtx.strokeStyle = '#000';
491
+ timelineCtx.lineWidth = 2;
492
+ timelineCtx.strokeRect(leftMargin, 30, plotWidth, 40);
493
+
494
+ // Draw word regions on timeline
495
+ for (let wordIdx = 0; wordIdx < alignments.length; wordIdx++) {
496
+ const alignment = alignments[wordIdx];
497
+ const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
498
+ const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
499
+
500
+ const startX = leftMargin + alignment.startIdx * featureWidth;
501
+ const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
502
+
503
+ timelineCtx.fillStyle = colors[wordIdx % 20];
504
+ timelineCtx.globalAlpha = alpha;
505
+ timelineCtx.fillRect(startX, 30, rectWidth, 40);
506
+ timelineCtx.globalAlpha = 1.0;
507
+ timelineCtx.strokeStyle = '#000';
508
+ timelineCtx.lineWidth = 0.5;
509
+ timelineCtx.strokeRect(startX, 30, rectWidth, 40);
510
+ }
511
+
512
+ // Title
513
+ timelineCtx.fillStyle = '#333';
514
+ timelineCtx.font = 'bold 13px Arial';
515
+ timelineCtx.textAlign = 'left';
516
+ timelineCtx.fillText('Timeline Progress Bar', leftMargin, 20);
517
+ }
518
+
519
+ function updateDetailsPanel(alignments, highThreshold, mediumThreshold) {
520
+ const panel = document.getElementById('alignment-details');
521
+ let html = '<table style="width: 100%; border-collapse: collapse;">';
522
+ html += '<tr style="background: #f0f0f0; font-weight: bold;">';
523
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Word</th>';
524
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Feature Range</th>';
525
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Peak</th>';
526
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Span</th>';
527
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Avg Attention</th>';
528
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Confidence</th>';
529
+ html += '</tr>';
530
+
531
+ for (const align of alignments) {
532
+ const confidence = getConfidenceLevel(align.avgWeight, highThreshold, mediumThreshold);
533
+ const span = align.endIdx - align.startIdx + 1;
534
+
535
+ html += '<tr>';
536
+ html += `<td style="padding: 8px; border: 1px solid #ddd;"><strong>${align.word}</strong></td>`;
537
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.startIdx} → ${align.endIdx}</td>`;
538
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.peakIdx}</td>`;
539
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${span}</td>`;
540
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.avgWeight.toFixed(4)}</td>`;
541
+ html += `<td style="padding: 8px; border: 1px solid #ddd;"><span class="confidence ${confidence}">${confidence}</span></td>`;
542
+ html += '</tr>';
543
+ }
544
+
545
+ html += '</table>';
546
+ panel.innerHTML = html;
547
+ }
548
+
549
+ function updateVisualization() {
550
+ const alignments = drawAlignmentChart();
551
+ drawTimeline(alignments);
552
+ const highThreshold = parseInt(confidenceHighSlider.value) / 100;
553
+ const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
554
+ updateDetailsPanel(alignments, highThreshold, mediumThreshold);
555
+ }
556
+
557
+ // Event listeners for sliders
558
+ peakSlider.addEventListener('input', function() {
559
+ peakValue.textContent = peakSlider.value + '%';
560
+ updateVisualization();
561
+ });
562
+
563
+ confidenceHighSlider.addEventListener('input', function() {
564
+ const val = parseInt(confidenceHighSlider.value) / 100;
565
+ confidenceHighValue.textContent = val.toFixed(2);
566
+ updateVisualization();
567
+ });
568
+
569
+ confidenceMediumSlider.addEventListener('input', function() {
570
+ const val = parseInt(confidenceMediumSlider.value) / 100;
571
+ confidenceMediumValue.textContent = val.toFixed(2);
572
+ updateVisualization();
573
+ });
574
+
575
+ // Initial visualization
576
+ updateVisualization();
577
+ </script>
578
+ </body>
579
+ </html>
SignX/inference_output/detailed_prediction_20260102_202302/173238/translation.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ With BPE: #@@ N@@ O IX-1p USE CORRECT F@@ O@@ R
2
+ Clean: #NO IX-1p USE CORRECT FOR
3
+ Ground Truth: #NO IX-1p USE CORRECT KEY
SignX/inference_output/detailed_prediction_20260102_202418/173745/173745.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:289b2fa3e751823c99b78a40e38ff799dfb7e28cdc53053b045c5e9dbb15d7fb
3
+ size 494802
SignX/inference_output/detailed_prediction_20260102_202418/173745/analysis_report.txt ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ================================================================================
2
+ Sign Language Recognition - Attention Analysis Report
3
+ ================================================================================
4
+
5
+ Generated at: 2026-01-02 20:24:24
6
+
7
+ Translation:
8
+ --------------------------------------------------------------------------------
9
+ CAR BREAK-DOWN
10
+
11
+ Video info:
12
+ --------------------------------------------------------------------------------
13
+ Total feature frames: 19
14
+ Word count: 2
15
+
16
+ Attention tensor:
17
+ --------------------------------------------------------------------------------
18
+ Shape: (23, 19)
19
+ - Decoder steps: 23
20
+
21
+ Word-to-frame details:
22
+ ================================================================================
23
+ No. Word Frames Peak Attn Conf
24
+ --------------------------------------------------------------------------------
25
+ 1 CAR 4-4 4 0.233 medium
26
+ 2 BREAK-DOWN 0-1 0 0.144 low
27
+
28
+ ================================================================================
29
+
30
+ Summary:
31
+ --------------------------------------------------------------------------------
32
+ Average attention weight: 0.189
33
+ High-confidence words: 0 (0.0%)
34
+ Medium-confidence words: 1 (50.0%)
35
+ Low-confidence words: 1 (50.0%)
36
+
37
+ ================================================================================
SignX/inference_output/detailed_prediction_20260102_202418/173745/attention_heatmap.pdf ADDED
Binary file (30.1 kB). View file
 
SignX/inference_output/detailed_prediction_20260102_202418/173745/attention_heatmap.png ADDED

Git LFS Details

  • SHA256: 66c4825a177d2e98d67656846fbb2fa60a9a581c38d1faa9a517d2d9b61a9213
  • Pointer size: 130 Bytes
  • Size of remote file: 74.9 kB
SignX/inference_output/detailed_prediction_20260102_202418/173745/attention_keyframes/keyframes_index.txt ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Attention Keyframe Index
2
+ ============================================================
3
+
4
+ Sample directory: /research/cbim/vast/sf895/code/Sign-X/output/huggingface_asllrp_repo/SignX/inference_output/detailed_prediction_20260102_202418/173745
5
+ Video path: /common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/good_videos/173745.mp4
6
+ Total keyframes: 23
7
+
8
+ Keyframe list:
9
+ ------------------------------------------------------------
10
+ Gloss 0: keyframe_000_feat4_frame16_att0.233.jpg
11
+ Gloss 1: keyframe_001_feat0_frame1_att0.149.jpg
12
+ Gloss 2: keyframe_002_feat18_frame70_att0.165.jpg
13
+ Gloss 3: keyframe_003_feat9_frame35_att0.221.jpg
14
+ Gloss 4: keyframe_004_feat0_frame1_att0.123.jpg
15
+ Gloss 5: keyframe_005_feat18_frame70_att0.145.jpg
16
+ Gloss 6: keyframe_006_feat18_frame70_att0.115.jpg
17
+ Gloss 7: keyframe_007_feat0_frame1_att0.131.jpg
18
+ Gloss 8: keyframe_008_feat0_frame1_att0.154.jpg
19
+ Gloss 9: keyframe_009_feat0_frame1_att0.214.jpg
20
+ Gloss 10: keyframe_010_feat17_frame66_att0.090.jpg
21
+ Gloss 11: keyframe_011_feat0_frame1_att0.191.jpg
22
+ Gloss 12: keyframe_012_feat0_frame1_att0.152.jpg
23
+ Gloss 13: keyframe_013_feat0_frame1_att0.198.jpg
24
+ Gloss 14: keyframe_014_feat0_frame1_att0.255.jpg
25
+ Gloss 15: keyframe_015_feat0_frame1_att0.268.jpg
26
+ Gloss 16: keyframe_016_feat0_frame1_att0.197.jpg
27
+ Gloss 17: keyframe_017_feat0_frame1_att0.193.jpg
28
+ Gloss 18: keyframe_018_feat0_frame1_att0.186.jpg
29
+ Gloss 19: keyframe_019_feat0_frame1_att0.222.jpg
30
+ Gloss 20: keyframe_020_feat0_frame1_att0.263.jpg
31
+ Gloss 21: keyframe_021_feat0_frame1_att0.254.jpg
32
+ Gloss 22: keyframe_022_feat0_frame1_att0.204.jpg
SignX/inference_output/detailed_prediction_20260102_202418/173745/attention_weights.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebd77af65d3a9c36f3bdaa8b3f52b230fc8990fde9bdcf42c033e12e331a9c82
3
+ size 1876
SignX/inference_output/detailed_prediction_20260102_202418/173745/debug_video_path.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ video_path = '/common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/good_videos/173745.mp4'
2
+ video_path type = <class 'str'>
3
+ video_path is None: False
4
+ bool(video_path): True
SignX/inference_output/{detailed_prediction_20260102_183038/97998032 → detailed_prediction_20260102_202418/173745}/feature_frame_mapping.json RENAMED
File without changes
SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "translation": "CAR BREAK-DOWN",
3
+ "words": [
4
+ "CAR",
5
+ "BREAK-DOWN"
6
+ ],
7
+ "total_video_frames": 19,
8
+ "frame_ranges": [
9
+ {
10
+ "word": "CAR",
11
+ "start_frame": 4,
12
+ "end_frame": 4,
13
+ "peak_frame": 4,
14
+ "avg_attention": 0.2333953082561493,
15
+ "confidence": "medium"
16
+ },
17
+ {
18
+ "word": "BREAK-DOWN",
19
+ "start_frame": 0,
20
+ "end_frame": 1,
21
+ "peak_frame": 0,
22
+ "avg_attention": 0.14435341954231262,
23
+ "confidence": "low"
24
+ }
25
+ ],
26
+ "statistics": {
27
+ "avg_confidence": 0.18887436389923096,
28
+ "high_confidence_words": 0,
29
+ "medium_confidence_words": 1,
30
+ "low_confidence_words": 1
31
+ }
32
+ }
SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment.pdf ADDED
Binary file (26.8 kB). View file
 
SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment.png ADDED

Git LFS Details

  • SHA256: 4467a3091d4a5340d2f5ee393a95797a714c79df3f0fbd76d017266376b89d07
  • Pointer size: 131 Bytes
  • Size of remote file: 116 kB
SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment_short.pdf ADDED
Binary file (26.8 kB). View file
 
SignX/inference_output/detailed_prediction_20260102_202418/173745/frame_alignment_short.png ADDED

Git LFS Details

  • SHA256: 9efe22bb13aab734ac181cc120dc5587e2048756f27b9af7951b049bd1411a2e
  • Pointer size: 131 Bytes
  • Size of remote file: 124 kB
SignX/inference_output/detailed_prediction_20260102_202418/173745/gloss_to_frames.png ADDED

Git LFS Details

  • SHA256: 53ee45393815a51ec1f8d226f1210dae503de3f9930116a8356047dc8208234a
  • Pointer size: 132 Bytes
  • Size of remote file: 1.14 MB
SignX/inference_output/detailed_prediction_20260102_202418/173745/interactive_alignment.html ADDED
@@ -0,0 +1,579 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Interactive Word-Frame Alignment</title>
7
+ <style>
8
+ body {
9
+ font-family: 'Arial', sans-serif;
10
+ margin: 20px;
11
+ background-color: #f5f5f5;
12
+ }
13
+ .container {
14
+ max-width: 1800px;
15
+ margin: 0 auto;
16
+ background-color: white;
17
+ padding: 30px;
18
+ border-radius: 8px;
19
+ box-shadow: 0 2px 10px rgba(0,0,0,0.1);
20
+ }
21
+ h1 {
22
+ color: #333;
23
+ border-bottom: 3px solid #4CAF50;
24
+ padding-bottom: 10px;
25
+ margin-bottom: 20px;
26
+ }
27
+ .stats {
28
+ background-color: #E3F2FD;
29
+ padding: 15px;
30
+ border-radius: 5px;
31
+ margin-bottom: 20px;
32
+ border-left: 4px solid #2196F3;
33
+ font-size: 14px;
34
+ }
35
+ .controls {
36
+ background-color: #f9f9f9;
37
+ padding: 20px;
38
+ border-radius: 5px;
39
+ margin-bottom: 30px;
40
+ border: 1px solid #ddd;
41
+ }
42
+ .control-group {
43
+ margin-bottom: 15px;
44
+ }
45
+ label {
46
+ font-weight: bold;
47
+ display: inline-block;
48
+ width: 250px;
49
+ color: #555;
50
+ }
51
+ input[type="range"] {
52
+ width: 400px;
53
+ vertical-align: middle;
54
+ }
55
+ .value-display {
56
+ display: inline-block;
57
+ width: 80px;
58
+ font-family: monospace;
59
+ font-size: 14px;
60
+ color: #2196F3;
61
+ font-weight: bold;
62
+ }
63
+ .reset-btn {
64
+ margin-top: 15px;
65
+ padding: 10px 25px;
66
+ background-color: #2196F3;
67
+ color: white;
68
+ border: none;
69
+ border-radius: 5px;
70
+ cursor: pointer;
71
+ font-size: 14px;
72
+ font-weight: bold;
73
+ }
74
+ .reset-btn:hover {
75
+ background-color: #1976D2;
76
+ }
77
+ canvas {
78
+ border: 1px solid #999;
79
+ display: block;
80
+ margin: 20px auto;
81
+ background: white;
82
+ }
83
+ .legend {
84
+ margin-top: 20px;
85
+ padding: 15px;
86
+ background-color: #fff;
87
+ border: 1px solid #ddd;
88
+ border-radius: 5px;
89
+ }
90
+ .legend-item {
91
+ display: inline-block;
92
+ margin-right: 25px;
93
+ font-size: 13px;
94
+ margin-bottom: 10px;
95
+ }
96
+ .color-box {
97
+ display: inline-block;
98
+ width: 30px;
99
+ height: 15px;
100
+ margin-right: 8px;
101
+ vertical-align: middle;
102
+ border: 1px solid #666;
103
+ }
104
+ .info-panel {
105
+ margin-top: 20px;
106
+ padding: 15px;
107
+ background-color: #f9f9f9;
108
+ border-radius: 5px;
109
+ border: 1px solid #ddd;
110
+ }
111
+ .confidence {
112
+ display: inline-block;
113
+ padding: 3px 10px;
114
+ border-radius: 10px;
115
+ font-weight: bold;
116
+ font-size: 11px;
117
+ text-transform: uppercase;
118
+ }
119
+ .confidence.high {
120
+ background-color: #4CAF50;
121
+ color: white;
122
+ }
123
+ .confidence.medium {
124
+ background-color: #FF9800;
125
+ color: white;
126
+ }
127
+ .confidence.low {
128
+ background-color: #f44336;
129
+ color: white;
130
+ }
131
+ </style>
132
+ </head>
133
+ <body>
134
+ <div class="container">
135
+ <h1>🎯 Interactive Word-to-Frame Alignment Visualizer</h1>
136
+
137
+ <div class="stats">
138
+ <strong>Translation:</strong> CAR BREAK-DOWN<br>
139
+ <strong>Total Words:</strong> 2 |
140
+ <strong>Total Features:</strong> 19
141
+ </div>
142
+
143
+ <div class="controls">
144
+ <h3>⚙️ Threshold Controls</h3>
145
+
146
+ <div class="control-group">
147
+ <label for="peak-threshold">Peak Threshold (% of max):</label>
148
+ <input type="range" id="peak-threshold" min="1" max="100" value="90" step="1">
149
+ <span class="value-display" id="peak-threshold-value">90%</span>
150
+ <br>
151
+ <small style="margin-left: 255px; color: #666;">
152
+ A frame is considered “significant” if its attention ≥ (peak × threshold%)
153
+ </small>
154
+ </div>
155
+
156
+ <div class="control-group">
157
+ <label for="confidence-high">High Confidence (avg attn >):</label>
158
+ <input type="range" id="confidence-high" min="0" max="100" value="50" step="1">
159
+ <span class="value-display" id="confidence-high-value">0.50</span>
160
+ </div>
161
+
162
+ <div class="control-group">
163
+ <label for="confidence-medium">Medium Confidence (avg attn >):</label>
164
+ <input type="range" id="confidence-medium" min="0" max="100" value="20" step="1">
165
+ <span class="value-display" id="confidence-medium-value">0.20</span>
166
+ </div>
167
+
168
+ <button class="reset-btn" onclick="resetDefaults()">
169
+ Reset to Defaults
170
+ </button>
171
+ </div>
172
+
173
+ <div>
174
+ <h3>Word-to-Frame Alignment</h3>
175
+ <p style="color: #666; font-size: 13px;">
176
+ Each word appears as a colored block. Width = frame span, ★ = peak frame, waveform = attention trace.
177
+ </p>
178
+ <canvas id="alignment-canvas" width="1600" height="600"></canvas>
179
+
180
+ <h3 style="margin-top: 30px;">Timeline Progress Bar</h3>
181
+ <canvas id="timeline-canvas" width="1600" height="100"></canvas>
182
+
183
+ <div class="legend">
184
+ <strong>Legend:</strong><br><br>
185
+ <div class="legend-item">
186
+ <span class="confidence high">High</span>
187
+ <span class="confidence medium">Medium</span>
188
+ <span class="confidence low">Low</span>
189
+ Confidence Levels (opacity reflects confidence)
190
+ </div>
191
+ <div class="legend-item">
192
+ <span style="color: red; font-size: 20px;">★</span>
193
+ Peak Frame (highest attention)
194
+ </div>
195
+ <div class="legend-item">
196
+ <span style="color: blue;">━</span>
197
+ Attention Waveform (within word region)
198
+ </div>
199
+ </div>
200
+ </div>
201
+
202
+ <div class="info-panel">
203
+ <h3>Alignment Details</h3>
204
+ <div id="alignment-details"></div>
205
+ </div>
206
+ </div>
207
+
208
+ <script>
209
+ // Attention data from Python
210
+ const attentionData = [{"word": "CAR", "word_idx": 0, "weights": [0.019201714545488358, 0.016010498628020287, 0.04009818658232689, 0.2021431028842926, 0.2333953082561493, 0.19581404328346252, 0.13894161581993103, 0.08961869776248932, 0.017304163426160812, 0.009614335373044014, 0.0044919578358531, 0.003000154159963131, 0.0018093630205839872, 0.002432898385450244, 0.0032934064511209726, 0.00507147703319788, 0.005474661942571402, 0.006064476445317268, 0.006219940260052681]}, {"word": "BREAK-DOWN", "word_idx": 1, "weights": [0.14899039268493652, 0.13971643149852753, 0.06643137335777283, 0.0284759271889925, 0.025709832087159157, 0.024263687431812286, 0.020394574850797653, 0.01726449280977249, 0.010557206347584724, 0.0072791315615177155, 0.00612180819734931, 0.0075492458418011665, 0.01024742890149355, 0.013071726076304913, 0.03896228224039078, 0.0870385617017746, 0.1054471880197525, 0.1188133955001831, 0.12366533279418945]}];
211
+ const numGlosses = 2;
212
+ const numFeatures = 19;
213
+
214
+ // Colors for different words (matching matplotlib tab20)
215
+ const colors = [
216
+ '#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd',
217
+ '#8c564b', '#e377c2', '#7f7f7f', '#bcbd22', '#17becf',
218
+ '#aec7e8', '#ffbb78', '#98df8a', '#ff9896', '#c5b0d5',
219
+ '#c49c94', '#f7b6d2', '#c7c7c7', '#dbdb8d', '#9edae5'
220
+ ];
221
+
222
+ // Get controls
223
+ const peakThresholdSlider = document.getElementById('peak-threshold');
224
+ const peakThresholdValue = document.getElementById('peak-threshold-value');
225
+ const confidenceHighSlider = document.getElementById('confidence-high');
226
+ const confidenceHighValue = document.getElementById('confidence-high-value');
227
+ const confidenceMediumSlider = document.getElementById('confidence-medium');
228
+ const confidenceMediumValue = document.getElementById('confidence-medium-value');
229
+ const alignmentCanvas = document.getElementById('alignment-canvas');
230
+ const timelineCanvas = document.getElementById('timeline-canvas');
231
+ const alignmentCtx = alignmentCanvas.getContext('2d');
232
+ const timelineCtx = timelineCanvas.getContext('2d');
233
+
234
+ // Update displays when sliders change
235
+ peakThresholdSlider.oninput = function() {
236
+ peakThresholdValue.textContent = this.value + '%';
237
+ updateVisualization();
238
+ };
239
+
240
+ confidenceHighSlider.oninput = function() {
241
+ confidenceHighValue.textContent = (this.value / 100).toFixed(2);
242
+ updateVisualization();
243
+ };
244
+
245
+ confidenceMediumSlider.oninput = function() {
246
+ confidenceMediumValue.textContent = (this.value / 100).toFixed(2);
247
+ updateVisualization();
248
+ };
249
+
250
+ function resetDefaults() {
251
+ peakThresholdSlider.value = 90;
252
+ confidenceHighSlider.value = 50;
253
+ confidenceMediumSlider.value = 20;
254
+ peakThresholdValue.textContent = '90%';
255
+ confidenceHighValue.textContent = '0.50';
256
+ confidenceMediumValue.textContent = '0.20';
257
+ updateVisualization();
258
+ }
259
+
260
+ function calculateAlignment(weights, peakThreshold) {
261
+ // Find peak
262
+ let peakIdx = 0;
263
+ let peakWeight = weights[0];
264
+ for (let i = 1; i < weights.length; i++) {
265
+ if (weights[i] > peakWeight) {
266
+ peakWeight = weights[i];
267
+ peakIdx = i;
268
+ }
269
+ }
270
+
271
+ // Find significant frames
272
+ const threshold = peakWeight * (peakThreshold / 100);
273
+ let startIdx = peakIdx;
274
+ let endIdx = peakIdx;
275
+ let sumWeight = 0;
276
+ let count = 0;
277
+
278
+ for (let i = 0; i < weights.length; i++) {
279
+ if (weights[i] >= threshold) {
280
+ if (i < startIdx) startIdx = i;
281
+ if (i > endIdx) endIdx = i;
282
+ sumWeight += weights[i];
283
+ count++;
284
+ }
285
+ }
286
+
287
+ const avgWeight = count > 0 ? sumWeight / count : peakWeight;
288
+
289
+ return {
290
+ startIdx: startIdx,
291
+ endIdx: endIdx,
292
+ peakIdx: peakIdx,
293
+ peakWeight: peakWeight,
294
+ avgWeight: avgWeight,
295
+ threshold: threshold
296
+ };
297
+ }
298
+
299
+ function getConfidenceLevel(avgWeight, highThreshold, mediumThreshold) {
300
+ if (avgWeight > highThreshold) return 'high';
301
+ if (avgWeight > mediumThreshold) return 'medium';
302
+ return 'low';
303
+ }
304
+
305
+ function drawAlignmentChart() {
306
+ const peakThreshold = parseInt(peakThresholdSlider.value);
307
+ const highThreshold = parseInt(confidenceHighSlider.value) / 100;
308
+ const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
309
+
310
+ // Canvas dimensions
311
+ const width = alignmentCanvas.width;
312
+ const height = alignmentCanvas.height;
313
+ const leftMargin = 180;
314
+ const rightMargin = 50;
315
+ const topMargin = 60;
316
+ const bottomMargin = 80;
317
+
318
+ const plotWidth = width - leftMargin - rightMargin;
319
+ const plotHeight = height - topMargin - bottomMargin;
320
+
321
+ const rowHeight = plotHeight / numGlosses;
322
+ const featureWidth = plotWidth / numFeatures;
323
+
324
+ // Clear canvas
325
+ alignmentCtx.clearRect(0, 0, width, height);
326
+
327
+ // Draw title
328
+ alignmentCtx.fillStyle = '#333';
329
+ alignmentCtx.font = 'bold 18px Arial';
330
+ alignmentCtx.textAlign = 'center';
331
+ alignmentCtx.fillText('Word-to-Frame Alignment', width / 2, 30);
332
+ alignmentCtx.font = '13px Arial';
333
+ alignmentCtx.fillText('(based on attention peaks, ★ = peak frame)', width / 2, 48);
334
+
335
+ // Calculate alignments
336
+ const alignments = [];
337
+ for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
338
+ const data = attentionData[wordIdx];
339
+ const alignment = calculateAlignment(data.weights, peakThreshold);
340
+ alignment.word = data.word;
341
+ alignment.wordIdx = wordIdx;
342
+ alignment.weights = data.weights;
343
+ alignments.push(alignment);
344
+ }
345
+
346
+ // Draw grid
347
+ alignmentCtx.strokeStyle = '#e0e0e0';
348
+ alignmentCtx.lineWidth = 0.5;
349
+ for (let i = 0; i <= numFeatures; i++) {
350
+ const x = leftMargin + i * featureWidth;
351
+ alignmentCtx.beginPath();
352
+ alignmentCtx.moveTo(x, topMargin);
353
+ alignmentCtx.lineTo(x, topMargin + plotHeight);
354
+ alignmentCtx.stroke();
355
+ }
356
+
357
+ // Draw word regions
358
+ for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
359
+ const alignment = alignments[wordIdx];
360
+ const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
361
+ const y = topMargin + wordIdx * rowHeight;
362
+
363
+ // Alpha based on confidence
364
+ const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
365
+
366
+ // Draw rectangle for word region
367
+ const startX = leftMargin + alignment.startIdx * featureWidth;
368
+ const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
369
+
370
+ alignmentCtx.fillStyle = colors[wordIdx % 20];
371
+ alignmentCtx.globalAlpha = alpha;
372
+ alignmentCtx.fillRect(startX, y, rectWidth, rowHeight * 0.8);
373
+ alignmentCtx.globalAlpha = 1.0;
374
+
375
+ // Draw border
376
+ alignmentCtx.strokeStyle = '#000';
377
+ alignmentCtx.lineWidth = 2;
378
+ alignmentCtx.strokeRect(startX, y, rectWidth, rowHeight * 0.8);
379
+
380
+ // Draw attention waveform inside rectangle
381
+ alignmentCtx.strokeStyle = 'rgba(0, 0, 255, 0.8)';
382
+ alignmentCtx.lineWidth = 1.5;
383
+ alignmentCtx.beginPath();
384
+ for (let i = alignment.startIdx; i <= alignment.endIdx; i++) {
385
+ const x = leftMargin + i * featureWidth + featureWidth / 2;
386
+ const weight = alignment.weights[i];
387
+ const maxWeight = alignment.peakWeight;
388
+ const normalizedWeight = weight / (maxWeight * 1.2); // Scale for visibility
389
+ const waveY = y + rowHeight * 0.8 - (normalizedWeight * rowHeight * 0.6);
390
+
391
+ if (i === alignment.startIdx) {
392
+ alignmentCtx.moveTo(x, waveY);
393
+ } else {
394
+ alignmentCtx.lineTo(x, waveY);
395
+ }
396
+ }
397
+ alignmentCtx.stroke();
398
+
399
+ // Draw word label
400
+ const labelX = startX + rectWidth / 2;
401
+ const labelY = y + rowHeight * 0.4;
402
+
403
+ alignmentCtx.fillStyle = 'rgba(0, 0, 0, 0.7)';
404
+ alignmentCtx.fillRect(labelX - 60, labelY - 12, 120, 24);
405
+ alignmentCtx.fillStyle = '#fff';
406
+ alignmentCtx.font = 'bold 13px Arial';
407
+ alignmentCtx.textAlign = 'center';
408
+ alignmentCtx.textBaseline = 'middle';
409
+ alignmentCtx.fillText(alignment.word, labelX, labelY);
410
+
411
+ // Mark peak frame with star
412
+ const peakX = leftMargin + alignment.peakIdx * featureWidth + featureWidth / 2;
413
+ const peakY = y + rowHeight * 0.4;
414
+
415
+ // Draw star
416
+ alignmentCtx.fillStyle = '#ff0000';
417
+ alignmentCtx.strokeStyle = '#ffff00';
418
+ alignmentCtx.lineWidth = 1.5;
419
+ alignmentCtx.font = '20px Arial';
420
+ alignmentCtx.textAlign = 'center';
421
+ alignmentCtx.strokeText('★', peakX, peakY);
422
+ alignmentCtx.fillText('★', peakX, peakY);
423
+
424
+ // Y-axis label (word names)
425
+ alignmentCtx.fillStyle = '#333';
426
+ alignmentCtx.font = '12px Arial';
427
+ alignmentCtx.textAlign = 'right';
428
+ alignmentCtx.textBaseline = 'middle';
429
+ alignmentCtx.fillText(alignment.word, leftMargin - 10, y + rowHeight * 0.4);
430
+ }
431
+
432
+ // Draw horizontal grid lines
433
+ alignmentCtx.strokeStyle = '#ccc';
434
+ alignmentCtx.lineWidth = 0.5;
435
+ for (let i = 0; i <= numGlosses; i++) {
436
+ const y = topMargin + i * rowHeight;
437
+ alignmentCtx.beginPath();
438
+ alignmentCtx.moveTo(leftMargin, y);
439
+ alignmentCtx.lineTo(leftMargin + plotWidth, y);
440
+ alignmentCtx.stroke();
441
+ }
442
+
443
+ // Draw axes
444
+ alignmentCtx.strokeStyle = '#000';
445
+ alignmentCtx.lineWidth = 2;
446
+ alignmentCtx.strokeRect(leftMargin, topMargin, plotWidth, plotHeight);
447
+
448
+ // X-axis labels (frame indices)
449
+ alignmentCtx.fillStyle = '#000';
450
+ alignmentCtx.font = '11px Arial';
451
+ alignmentCtx.textAlign = 'center';
452
+ alignmentCtx.textBaseline = 'top';
453
+ for (let i = 0; i < numFeatures; i++) {
454
+ const x = leftMargin + i * featureWidth + featureWidth / 2;
455
+ alignmentCtx.fillText(i.toString(), x, topMargin + plotHeight + 10);
456
+ }
457
+
458
+ // Axis titles
459
+ alignmentCtx.fillStyle = '#333';
460
+ alignmentCtx.font = 'bold 14px Arial';
461
+ alignmentCtx.textAlign = 'center';
462
+ alignmentCtx.fillText('Feature Frame Index', leftMargin + plotWidth / 2, height - 20);
463
+
464
+ alignmentCtx.save();
465
+ alignmentCtx.translate(30, topMargin + plotHeight / 2);
466
+ alignmentCtx.rotate(-Math.PI / 2);
467
+ alignmentCtx.fillText('Generated Word', 0, 0);
468
+ alignmentCtx.restore();
469
+
470
+ return alignments;
471
+ }
472
+
473
+ function drawTimeline(alignments) {
474
+ const highThreshold = parseInt(confidenceHighSlider.value) / 100;
475
+ const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
476
+
477
+ const width = timelineCanvas.width;
478
+ const height = timelineCanvas.height;
479
+ const leftMargin = 180;
480
+ const rightMargin = 50;
481
+ const plotWidth = width - leftMargin - rightMargin;
482
+ const featureWidth = plotWidth / numFeatures;
483
+
484
+ // Clear canvas
485
+ timelineCtx.clearRect(0, 0, width, height);
486
+
487
+ // Background bar
488
+ timelineCtx.fillStyle = '#ddd';
489
+ timelineCtx.fillRect(leftMargin, 30, plotWidth, 40);
490
+ timelineCtx.strokeStyle = '#000';
491
+ timelineCtx.lineWidth = 2;
492
+ timelineCtx.strokeRect(leftMargin, 30, plotWidth, 40);
493
+
494
+ // Draw word regions on timeline
495
+ for (let wordIdx = 0; wordIdx < alignments.length; wordIdx++) {
496
+ const alignment = alignments[wordIdx];
497
+ const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
498
+ const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
499
+
500
+ const startX = leftMargin + alignment.startIdx * featureWidth;
501
+ const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
502
+
503
+ timelineCtx.fillStyle = colors[wordIdx % 20];
504
+ timelineCtx.globalAlpha = alpha;
505
+ timelineCtx.fillRect(startX, 30, rectWidth, 40);
506
+ timelineCtx.globalAlpha = 1.0;
507
+ timelineCtx.strokeStyle = '#000';
508
+ timelineCtx.lineWidth = 0.5;
509
+ timelineCtx.strokeRect(startX, 30, rectWidth, 40);
510
+ }
511
+
512
+ // Title
513
+ timelineCtx.fillStyle = '#333';
514
+ timelineCtx.font = 'bold 13px Arial';
515
+ timelineCtx.textAlign = 'left';
516
+ timelineCtx.fillText('Timeline Progress Bar', leftMargin, 20);
517
+ }
518
+
519
+ function updateDetailsPanel(alignments, highThreshold, mediumThreshold) {
520
+ const panel = document.getElementById('alignment-details');
521
+ let html = '<table style="width: 100%; border-collapse: collapse;">';
522
+ html += '<tr style="background: #f0f0f0; font-weight: bold;">';
523
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Word</th>';
524
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Feature Range</th>';
525
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Peak</th>';
526
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Span</th>';
527
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Avg Attention</th>';
528
+ html += '<th style="padding: 8px; border: 1px solid #ddd;">Confidence</th>';
529
+ html += '</tr>';
530
+
531
+ for (const align of alignments) {
532
+ const confidence = getConfidenceLevel(align.avgWeight, highThreshold, mediumThreshold);
533
+ const span = align.endIdx - align.startIdx + 1;
534
+
535
+ html += '<tr>';
536
+ html += `<td style="padding: 8px; border: 1px solid #ddd;"><strong>${align.word}</strong></td>`;
537
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.startIdx} → ${align.endIdx}</td>`;
538
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.peakIdx}</td>`;
539
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${span}</td>`;
540
+ html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.avgWeight.toFixed(4)}</td>`;
541
+ html += `<td style="padding: 8px; border: 1px solid #ddd;"><span class="confidence ${confidence}">${confidence}</span></td>`;
542
+ html += '</tr>';
543
+ }
544
+
545
+ html += '</table>';
546
+ panel.innerHTML = html;
547
+ }
548
+
549
+ function updateVisualization() {
550
+ const alignments = drawAlignmentChart();
551
+ drawTimeline(alignments);
552
+ const highThreshold = parseInt(confidenceHighSlider.value) / 100;
553
+ const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
554
+ updateDetailsPanel(alignments, highThreshold, mediumThreshold);
555
+ }
556
+
557
+ // Event listeners for sliders
558
+ peakSlider.addEventListener('input', function() {
559
+ peakValue.textContent = peakSlider.value + '%';
560
+ updateVisualization();
561
+ });
562
+
563
+ confidenceHighSlider.addEventListener('input', function() {
564
+ const val = parseInt(confidenceHighSlider.value) / 100;
565
+ confidenceHighValue.textContent = val.toFixed(2);
566
+ updateVisualization();
567
+ });
568
+
569
+ confidenceMediumSlider.addEventListener('input', function() {
570
+ const val = parseInt(confidenceMediumSlider.value) / 100;
571
+ confidenceMediumValue.textContent = val.toFixed(2);
572
+ updateVisualization();
573
+ });
574
+
575
+ // Initial visualization
576
+ updateVisualization();
577
+ </script>
578
+ </body>
579
+ </html>
SignX/inference_output/detailed_prediction_20260102_202418/173745/translation.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ With BPE: C@@ A@@ R BREAK-DOWN
2
+ Clean: CAR BREAK-DOWN
3
+ Ground Truth: CAR BREAK-DOWN
SignX/inference_output/detailed_prediction_20260102_202534/23880856/23880856.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:555d737baa311faa5ff33afa7d8a6ca4c3090c41e8c47eaa39569e493dce7282
3
+ size 87354