FangSen9000 commited on
Commit ·
741864d
1
Parent(s): cfee709
Increase the threshold corresponding to the feature-frame; add the corresponding original image of smkd
Browse files- SignX/detailed_prediction_20251225_192957/sample_000/frame_alignment.json +0 -86
- SignX/detailed_prediction_20251225_192957/sample_000/translation.txt +0 -2
- SignX/detailed_prediction_20251225_193758/sample_000/frame_alignment.json +0 -86
- SignX/detailed_prediction_20251225_193758/sample_000/translation.txt +0 -2
- SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_155113}/sample_000/analysis_report.txt +20 -18
- SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_155113}/sample_000/attention_heatmap.png +2 -2
- SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_155113}/sample_000/attention_weights.npy +2 -2
- SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_155113}/sample_000/debug_video_path.txt +1 -1
- SignX/detailed_prediction_20251226_155113/sample_000/feature_frame_mapping.json +176 -0
- SignX/detailed_prediction_20251226_155113/sample_000/frame_alignment.json +104 -0
- SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_155113}/sample_000/frame_alignment.png +2 -2
- SignX/detailed_prediction_20251226_155113/sample_000/frame_alignment_NEW.png +3 -0
- SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_155113}/sample_000/gloss_to_frames.png +2 -2
- SignX/detailed_prediction_20251226_155113/sample_000/gloss_to_frames_NEW.png +3 -0
- SignX/detailed_prediction_20251226_155113/sample_000/interactive_alignment.html +579 -0
- SignX/detailed_prediction_20251226_155113/sample_000/translation.txt +2 -0
- SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_161814}/sample_000/analysis_report.txt +20 -18
- SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_161814}/sample_000/attention_heatmap.png +2 -2
- SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_161814}/sample_000/attention_weights.npy +2 -2
- SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_161814}/sample_000/debug_video_path.txt +1 -1
- SignX/detailed_prediction_20251226_161814/sample_000/feature_frame_mapping.json +176 -0
- SignX/detailed_prediction_20251226_161814/sample_000/frame_alignment.json +104 -0
- SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_161814}/sample_000/frame_alignment.png +2 -2
- SignX/detailed_prediction_20251226_161814/sample_000/gloss_to_frames.png +3 -0
- SignX/detailed_prediction_20251226_161814/sample_000/interactive_alignment.html +579 -0
- SignX/detailed_prediction_20251226_161814/sample_000/translation.txt +2 -0
- SignX/eval/attention_analysis.py +94 -24
- SignX/eval/generate_feature_mapping.py +119 -0
- SignX/eval/generate_interactive_alignment.py +4 -4
- SignX/eval/regenerate_visualizations.py +123 -0
- SignX/inference.sh +38 -21
- SignX/inference_output.txt +1 -1
- SignX/inference_output.txt.clean +1 -1
- SignX/models/evalu.py +42 -0
SignX/detailed_prediction_20251225_192957/sample_000/frame_alignment.json
DELETED
|
@@ -1,86 +0,0 @@
|
|
| 1 |
-
{
|
| 2 |
-
"translation": "<unk> NOW-WEEK STUDENT IX HAVE NONE/NOTHING GO NONE/NOTHING",
|
| 3 |
-
"words": [
|
| 4 |
-
"<unk>",
|
| 5 |
-
"NOW-WEEK",
|
| 6 |
-
"STUDENT",
|
| 7 |
-
"IX",
|
| 8 |
-
"HAVE",
|
| 9 |
-
"NONE/NOTHING",
|
| 10 |
-
"GO",
|
| 11 |
-
"NONE/NOTHING"
|
| 12 |
-
],
|
| 13 |
-
"total_video_frames": 24,
|
| 14 |
-
"frame_ranges": [
|
| 15 |
-
{
|
| 16 |
-
"word": "<unk>",
|
| 17 |
-
"start_frame": 0,
|
| 18 |
-
"end_frame": 23,
|
| 19 |
-
"peak_frame": 0,
|
| 20 |
-
"avg_attention": 0.06790952384471893,
|
| 21 |
-
"confidence": "low"
|
| 22 |
-
},
|
| 23 |
-
{
|
| 24 |
-
"word": "NOW-WEEK",
|
| 25 |
-
"start_frame": 2,
|
| 26 |
-
"end_frame": 3,
|
| 27 |
-
"peak_frame": 2,
|
| 28 |
-
"avg_attention": 0.4792596399784088,
|
| 29 |
-
"confidence": "medium"
|
| 30 |
-
},
|
| 31 |
-
{
|
| 32 |
-
"word": "STUDENT",
|
| 33 |
-
"start_frame": 1,
|
| 34 |
-
"end_frame": 23,
|
| 35 |
-
"peak_frame": 21,
|
| 36 |
-
"avg_attention": 0.13404551148414612,
|
| 37 |
-
"confidence": "low"
|
| 38 |
-
},
|
| 39 |
-
{
|
| 40 |
-
"word": "IX",
|
| 41 |
-
"start_frame": 1,
|
| 42 |
-
"end_frame": 23,
|
| 43 |
-
"peak_frame": 3,
|
| 44 |
-
"avg_attention": 0.09226731956005096,
|
| 45 |
-
"confidence": "low"
|
| 46 |
-
},
|
| 47 |
-
{
|
| 48 |
-
"word": "HAVE",
|
| 49 |
-
"start_frame": 4,
|
| 50 |
-
"end_frame": 6,
|
| 51 |
-
"peak_frame": 5,
|
| 52 |
-
"avg_attention": 0.27426692843437195,
|
| 53 |
-
"confidence": "medium"
|
| 54 |
-
},
|
| 55 |
-
{
|
| 56 |
-
"word": "NONE/NOTHING",
|
| 57 |
-
"start_frame": 7,
|
| 58 |
-
"end_frame": 8,
|
| 59 |
-
"peak_frame": 7,
|
| 60 |
-
"avg_attention": 0.3239603638648987,
|
| 61 |
-
"confidence": "medium"
|
| 62 |
-
},
|
| 63 |
-
{
|
| 64 |
-
"word": "GO",
|
| 65 |
-
"start_frame": 7,
|
| 66 |
-
"end_frame": 23,
|
| 67 |
-
"peak_frame": 7,
|
| 68 |
-
"avg_attention": 0.1878073364496231,
|
| 69 |
-
"confidence": "low"
|
| 70 |
-
},
|
| 71 |
-
{
|
| 72 |
-
"word": "NONE/NOTHING",
|
| 73 |
-
"start_frame": 8,
|
| 74 |
-
"end_frame": 8,
|
| 75 |
-
"peak_frame": 8,
|
| 76 |
-
"avg_attention": 0.7333312630653381,
|
| 77 |
-
"confidence": "high"
|
| 78 |
-
}
|
| 79 |
-
],
|
| 80 |
-
"statistics": {
|
| 81 |
-
"avg_confidence": 0.2866059858351946,
|
| 82 |
-
"high_confidence_words": 1,
|
| 83 |
-
"medium_confidence_words": 3,
|
| 84 |
-
"low_confidence_words": 4
|
| 85 |
-
}
|
| 86 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SignX/detailed_prediction_20251225_192957/sample_000/translation.txt
DELETED
|
@@ -1,2 +0,0 @@
|
|
| 1 |
-
With BPE: <unk> NOW@@ -@@ WEEK STUDENT I@@ X HAVE NONE/NOTHING GO NONE/NOTHING
|
| 2 |
-
Clean: <unk> NOW-WEEK STUDENT IX HAVE NONE/NOTHING GO NONE/NOTHING
|
|
|
|
|
|
|
|
|
SignX/detailed_prediction_20251225_193758/sample_000/frame_alignment.json
DELETED
|
@@ -1,86 +0,0 @@
|
|
| 1 |
-
{
|
| 2 |
-
"translation": "<unk> NOW-WEEK STUDENT IX HAVE NONE/NOTHING GO NONE/NOTHING",
|
| 3 |
-
"words": [
|
| 4 |
-
"<unk>",
|
| 5 |
-
"NOW-WEEK",
|
| 6 |
-
"STUDENT",
|
| 7 |
-
"IX",
|
| 8 |
-
"HAVE",
|
| 9 |
-
"NONE/NOTHING",
|
| 10 |
-
"GO",
|
| 11 |
-
"NONE/NOTHING"
|
| 12 |
-
],
|
| 13 |
-
"total_video_frames": 24,
|
| 14 |
-
"frame_ranges": [
|
| 15 |
-
{
|
| 16 |
-
"word": "<unk>",
|
| 17 |
-
"start_frame": 0,
|
| 18 |
-
"end_frame": 23,
|
| 19 |
-
"peak_frame": 0,
|
| 20 |
-
"avg_attention": 0.06790952384471893,
|
| 21 |
-
"confidence": "low"
|
| 22 |
-
},
|
| 23 |
-
{
|
| 24 |
-
"word": "NOW-WEEK",
|
| 25 |
-
"start_frame": 2,
|
| 26 |
-
"end_frame": 3,
|
| 27 |
-
"peak_frame": 2,
|
| 28 |
-
"avg_attention": 0.4792596399784088,
|
| 29 |
-
"confidence": "medium"
|
| 30 |
-
},
|
| 31 |
-
{
|
| 32 |
-
"word": "STUDENT",
|
| 33 |
-
"start_frame": 1,
|
| 34 |
-
"end_frame": 23,
|
| 35 |
-
"peak_frame": 21,
|
| 36 |
-
"avg_attention": 0.13404551148414612,
|
| 37 |
-
"confidence": "low"
|
| 38 |
-
},
|
| 39 |
-
{
|
| 40 |
-
"word": "IX",
|
| 41 |
-
"start_frame": 1,
|
| 42 |
-
"end_frame": 23,
|
| 43 |
-
"peak_frame": 3,
|
| 44 |
-
"avg_attention": 0.09226731956005096,
|
| 45 |
-
"confidence": "low"
|
| 46 |
-
},
|
| 47 |
-
{
|
| 48 |
-
"word": "HAVE",
|
| 49 |
-
"start_frame": 4,
|
| 50 |
-
"end_frame": 6,
|
| 51 |
-
"peak_frame": 5,
|
| 52 |
-
"avg_attention": 0.27426692843437195,
|
| 53 |
-
"confidence": "medium"
|
| 54 |
-
},
|
| 55 |
-
{
|
| 56 |
-
"word": "NONE/NOTHING",
|
| 57 |
-
"start_frame": 7,
|
| 58 |
-
"end_frame": 8,
|
| 59 |
-
"peak_frame": 7,
|
| 60 |
-
"avg_attention": 0.3239603638648987,
|
| 61 |
-
"confidence": "medium"
|
| 62 |
-
},
|
| 63 |
-
{
|
| 64 |
-
"word": "GO",
|
| 65 |
-
"start_frame": 7,
|
| 66 |
-
"end_frame": 23,
|
| 67 |
-
"peak_frame": 7,
|
| 68 |
-
"avg_attention": 0.1878073364496231,
|
| 69 |
-
"confidence": "low"
|
| 70 |
-
},
|
| 71 |
-
{
|
| 72 |
-
"word": "NONE/NOTHING",
|
| 73 |
-
"start_frame": 8,
|
| 74 |
-
"end_frame": 8,
|
| 75 |
-
"peak_frame": 8,
|
| 76 |
-
"avg_attention": 0.7333312630653381,
|
| 77 |
-
"confidence": "high"
|
| 78 |
-
}
|
| 79 |
-
],
|
| 80 |
-
"statistics": {
|
| 81 |
-
"avg_confidence": 0.2866059858351946,
|
| 82 |
-
"high_confidence_words": 1,
|
| 83 |
-
"medium_confidence_words": 3,
|
| 84 |
-
"low_confidence_words": 4
|
| 85 |
-
}
|
| 86 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SignX/detailed_prediction_20251225_193758/sample_000/translation.txt
DELETED
|
@@ -1,2 +0,0 @@
|
|
| 1 |
-
With BPE: <unk> NOW@@ -@@ WEEK STUDENT I@@ X HAVE NONE/NOTHING GO NONE/NOTHING
|
| 2 |
-
Clean: <unk> NOW-WEEK STUDENT IX HAVE NONE/NOTHING GO NONE/NOTHING
|
|
|
|
|
|
|
|
|
SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_155113}/sample_000/analysis_report.txt
RENAMED
|
@@ -2,43 +2,45 @@
|
|
| 2 |
Sign Language Recognition - Attention分析报告
|
| 3 |
================================================================================
|
| 4 |
|
| 5 |
-
生成时间: 2025-12-
|
| 6 |
|
| 7 |
翻译结果:
|
| 8 |
--------------------------------------------------------------------------------
|
| 9 |
-
<unk>
|
| 10 |
|
| 11 |
视频信息:
|
| 12 |
--------------------------------------------------------------------------------
|
| 13 |
-
总帧数:
|
| 14 |
-
词数量:
|
| 15 |
|
| 16 |
Attention权重信息:
|
| 17 |
--------------------------------------------------------------------------------
|
| 18 |
-
形状: (
|
| 19 |
-
- 解码步数:
|
| 20 |
- Batch大小: 8
|
| 21 |
|
| 22 |
词-帧对应详情:
|
| 23 |
================================================================================
|
| 24 |
No. Word Frames Peak Attn Conf
|
| 25 |
--------------------------------------------------------------------------------
|
| 26 |
-
1 <unk> 0-
|
| 27 |
-
2
|
| 28 |
-
3
|
| 29 |
-
4
|
| 30 |
-
5
|
| 31 |
-
6
|
| 32 |
-
7
|
| 33 |
-
8
|
|
|
|
|
|
|
| 34 |
|
| 35 |
================================================================================
|
| 36 |
|
| 37 |
统计摘要:
|
| 38 |
--------------------------------------------------------------------------------
|
| 39 |
-
平均attention权重: 0.
|
| 40 |
-
高置信度词:
|
| 41 |
-
中置信度词:
|
| 42 |
-
低置信度词:
|
| 43 |
|
| 44 |
================================================================================
|
|
|
|
| 2 |
Sign Language Recognition - Attention分析报告
|
| 3 |
================================================================================
|
| 4 |
|
| 5 |
+
生成时间: 2025-12-26 15:51:17
|
| 6 |
|
| 7 |
翻译结果:
|
| 8 |
--------------------------------------------------------------------------------
|
| 9 |
+
<unk> #IF FRIEND GROUP/TOGETHER DEPART PARTY IX-1p FINISH JOIN IX-1p
|
| 10 |
|
| 11 |
视频信息:
|
| 12 |
--------------------------------------------------------------------------------
|
| 13 |
+
总帧数: 28
|
| 14 |
+
词数量: 10
|
| 15 |
|
| 16 |
Attention权重信息:
|
| 17 |
--------------------------------------------------------------------------------
|
| 18 |
+
形状: (28, 8, 28)
|
| 19 |
+
- 解码步数: 28
|
| 20 |
- Batch大小: 8
|
| 21 |
|
| 22 |
词-帧对应详情:
|
| 23 |
================================================================================
|
| 24 |
No. Word Frames Peak Attn Conf
|
| 25 |
--------------------------------------------------------------------------------
|
| 26 |
+
1 <unk> 0-0 0 0.133 low
|
| 27 |
+
2 #IF 2-3 2 0.359 medium
|
| 28 |
+
3 FRIEND 5-5 5 0.449 medium
|
| 29 |
+
4 GROUP/TOGETHER 8-8 8 0.371 medium
|
| 30 |
+
5 DEPART 27-27 27 0.305 medium
|
| 31 |
+
6 PARTY 27-27 27 0.296 medium
|
| 32 |
+
7 IX-1p 27-27 27 0.326 medium
|
| 33 |
+
8 FINISH 11-12 12 0.467 medium
|
| 34 |
+
9 JOIN 13-14 14 0.317 medium
|
| 35 |
+
10 IX-1p 17-17 17 0.358 medium
|
| 36 |
|
| 37 |
================================================================================
|
| 38 |
|
| 39 |
统计摘要:
|
| 40 |
--------------------------------------------------------------------------------
|
| 41 |
+
平均attention权重: 0.338
|
| 42 |
+
高置信度词: 0 (0.0%)
|
| 43 |
+
中置信度词: 9 (90.0%)
|
| 44 |
+
低置信度词: 1 (10.0%)
|
| 45 |
|
| 46 |
================================================================================
|
SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_155113}/sample_000/attention_heatmap.png
RENAMED
|
File without changes
|
SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_155113}/sample_000/attention_weights.npy
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7414e5ab870540255a4bc963aa612d837eca27b95da7b4603c4c8e39f82b8c01
|
| 3 |
+
size 25216
|
SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_155113}/sample_000/debug_video_path.txt
RENAMED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
video_path = '/common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/videos/
|
| 2 |
video_path type = <class 'str'>
|
| 3 |
video_path is None: False
|
| 4 |
bool(video_path): True
|
|
|
|
| 1 |
+
video_path = '/common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/videos/632051.mp4'
|
| 2 |
video_path type = <class 'str'>
|
| 3 |
video_path is None: False
|
| 4 |
bool(video_path): True
|
SignX/detailed_prediction_20251226_155113/sample_000/feature_frame_mapping.json
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"original_frame_count": 106,
|
| 3 |
+
"feature_count": 28,
|
| 4 |
+
"downsampling_ratio": 3.7857142857142856,
|
| 5 |
+
"fps": 24.0,
|
| 6 |
+
"mapping": [
|
| 7 |
+
{
|
| 8 |
+
"feature_index": 0,
|
| 9 |
+
"frame_start": 0,
|
| 10 |
+
"frame_end": 3,
|
| 11 |
+
"frame_count": 3
|
| 12 |
+
},
|
| 13 |
+
{
|
| 14 |
+
"feature_index": 1,
|
| 15 |
+
"frame_start": 3,
|
| 16 |
+
"frame_end": 7,
|
| 17 |
+
"frame_count": 4
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"feature_index": 2,
|
| 21 |
+
"frame_start": 7,
|
| 22 |
+
"frame_end": 11,
|
| 23 |
+
"frame_count": 4
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"feature_index": 3,
|
| 27 |
+
"frame_start": 11,
|
| 28 |
+
"frame_end": 15,
|
| 29 |
+
"frame_count": 4
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"feature_index": 4,
|
| 33 |
+
"frame_start": 15,
|
| 34 |
+
"frame_end": 18,
|
| 35 |
+
"frame_count": 3
|
| 36 |
+
},
|
| 37 |
+
{
|
| 38 |
+
"feature_index": 5,
|
| 39 |
+
"frame_start": 18,
|
| 40 |
+
"frame_end": 22,
|
| 41 |
+
"frame_count": 4
|
| 42 |
+
},
|
| 43 |
+
{
|
| 44 |
+
"feature_index": 6,
|
| 45 |
+
"frame_start": 22,
|
| 46 |
+
"frame_end": 26,
|
| 47 |
+
"frame_count": 4
|
| 48 |
+
},
|
| 49 |
+
{
|
| 50 |
+
"feature_index": 7,
|
| 51 |
+
"frame_start": 26,
|
| 52 |
+
"frame_end": 30,
|
| 53 |
+
"frame_count": 4
|
| 54 |
+
},
|
| 55 |
+
{
|
| 56 |
+
"feature_index": 8,
|
| 57 |
+
"frame_start": 30,
|
| 58 |
+
"frame_end": 34,
|
| 59 |
+
"frame_count": 4
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"feature_index": 9,
|
| 63 |
+
"frame_start": 34,
|
| 64 |
+
"frame_end": 37,
|
| 65 |
+
"frame_count": 3
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"feature_index": 10,
|
| 69 |
+
"frame_start": 37,
|
| 70 |
+
"frame_end": 41,
|
| 71 |
+
"frame_count": 4
|
| 72 |
+
},
|
| 73 |
+
{
|
| 74 |
+
"feature_index": 11,
|
| 75 |
+
"frame_start": 41,
|
| 76 |
+
"frame_end": 45,
|
| 77 |
+
"frame_count": 4
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
"feature_index": 12,
|
| 81 |
+
"frame_start": 45,
|
| 82 |
+
"frame_end": 49,
|
| 83 |
+
"frame_count": 4
|
| 84 |
+
},
|
| 85 |
+
{
|
| 86 |
+
"feature_index": 13,
|
| 87 |
+
"frame_start": 49,
|
| 88 |
+
"frame_end": 53,
|
| 89 |
+
"frame_count": 4
|
| 90 |
+
},
|
| 91 |
+
{
|
| 92 |
+
"feature_index": 14,
|
| 93 |
+
"frame_start": 53,
|
| 94 |
+
"frame_end": 56,
|
| 95 |
+
"frame_count": 3
|
| 96 |
+
},
|
| 97 |
+
{
|
| 98 |
+
"feature_index": 15,
|
| 99 |
+
"frame_start": 56,
|
| 100 |
+
"frame_end": 60,
|
| 101 |
+
"frame_count": 4
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"feature_index": 16,
|
| 105 |
+
"frame_start": 60,
|
| 106 |
+
"frame_end": 64,
|
| 107 |
+
"frame_count": 4
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"feature_index": 17,
|
| 111 |
+
"frame_start": 64,
|
| 112 |
+
"frame_end": 68,
|
| 113 |
+
"frame_count": 4
|
| 114 |
+
},
|
| 115 |
+
{
|
| 116 |
+
"feature_index": 18,
|
| 117 |
+
"frame_start": 68,
|
| 118 |
+
"frame_end": 71,
|
| 119 |
+
"frame_count": 3
|
| 120 |
+
},
|
| 121 |
+
{
|
| 122 |
+
"feature_index": 19,
|
| 123 |
+
"frame_start": 71,
|
| 124 |
+
"frame_end": 75,
|
| 125 |
+
"frame_count": 4
|
| 126 |
+
},
|
| 127 |
+
{
|
| 128 |
+
"feature_index": 20,
|
| 129 |
+
"frame_start": 75,
|
| 130 |
+
"frame_end": 79,
|
| 131 |
+
"frame_count": 4
|
| 132 |
+
},
|
| 133 |
+
{
|
| 134 |
+
"feature_index": 21,
|
| 135 |
+
"frame_start": 79,
|
| 136 |
+
"frame_end": 83,
|
| 137 |
+
"frame_count": 4
|
| 138 |
+
},
|
| 139 |
+
{
|
| 140 |
+
"feature_index": 22,
|
| 141 |
+
"frame_start": 83,
|
| 142 |
+
"frame_end": 87,
|
| 143 |
+
"frame_count": 4
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"feature_index": 23,
|
| 147 |
+
"frame_start": 87,
|
| 148 |
+
"frame_end": 90,
|
| 149 |
+
"frame_count": 3
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"feature_index": 24,
|
| 153 |
+
"frame_start": 90,
|
| 154 |
+
"frame_end": 94,
|
| 155 |
+
"frame_count": 4
|
| 156 |
+
},
|
| 157 |
+
{
|
| 158 |
+
"feature_index": 25,
|
| 159 |
+
"frame_start": 94,
|
| 160 |
+
"frame_end": 98,
|
| 161 |
+
"frame_count": 4
|
| 162 |
+
},
|
| 163 |
+
{
|
| 164 |
+
"feature_index": 26,
|
| 165 |
+
"frame_start": 98,
|
| 166 |
+
"frame_end": 102,
|
| 167 |
+
"frame_count": 4
|
| 168 |
+
},
|
| 169 |
+
{
|
| 170 |
+
"feature_index": 27,
|
| 171 |
+
"frame_start": 102,
|
| 172 |
+
"frame_end": 106,
|
| 173 |
+
"frame_count": 4
|
| 174 |
+
}
|
| 175 |
+
]
|
| 176 |
+
}
|
SignX/detailed_prediction_20251226_155113/sample_000/frame_alignment.json
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"translation": "<unk> #IF FRIEND GROUP/TOGETHER DEPART PARTY IX-1p FINISH JOIN IX-1p",
|
| 3 |
+
"words": [
|
| 4 |
+
"<unk>",
|
| 5 |
+
"#IF",
|
| 6 |
+
"FRIEND",
|
| 7 |
+
"GROUP/TOGETHER",
|
| 8 |
+
"DEPART",
|
| 9 |
+
"PARTY",
|
| 10 |
+
"IX-1p",
|
| 11 |
+
"FINISH",
|
| 12 |
+
"JOIN",
|
| 13 |
+
"IX-1p"
|
| 14 |
+
],
|
| 15 |
+
"total_video_frames": 28,
|
| 16 |
+
"frame_ranges": [
|
| 17 |
+
{
|
| 18 |
+
"word": "<unk>",
|
| 19 |
+
"start_frame": 0,
|
| 20 |
+
"end_frame": 0,
|
| 21 |
+
"peak_frame": 0,
|
| 22 |
+
"avg_attention": 0.13272422552108765,
|
| 23 |
+
"confidence": "low"
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"word": "#IF",
|
| 27 |
+
"start_frame": 2,
|
| 28 |
+
"end_frame": 3,
|
| 29 |
+
"peak_frame": 2,
|
| 30 |
+
"avg_attention": 0.35901427268981934,
|
| 31 |
+
"confidence": "medium"
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"word": "FRIEND",
|
| 35 |
+
"start_frame": 5,
|
| 36 |
+
"end_frame": 5,
|
| 37 |
+
"peak_frame": 5,
|
| 38 |
+
"avg_attention": 0.4494199752807617,
|
| 39 |
+
"confidence": "medium"
|
| 40 |
+
},
|
| 41 |
+
{
|
| 42 |
+
"word": "GROUP/TOGETHER",
|
| 43 |
+
"start_frame": 8,
|
| 44 |
+
"end_frame": 8,
|
| 45 |
+
"peak_frame": 8,
|
| 46 |
+
"avg_attention": 0.3710141181945801,
|
| 47 |
+
"confidence": "medium"
|
| 48 |
+
},
|
| 49 |
+
{
|
| 50 |
+
"word": "DEPART",
|
| 51 |
+
"start_frame": 27,
|
| 52 |
+
"end_frame": 27,
|
| 53 |
+
"peak_frame": 27,
|
| 54 |
+
"avg_attention": 0.30533191561698914,
|
| 55 |
+
"confidence": "medium"
|
| 56 |
+
},
|
| 57 |
+
{
|
| 58 |
+
"word": "PARTY",
|
| 59 |
+
"start_frame": 27,
|
| 60 |
+
"end_frame": 27,
|
| 61 |
+
"peak_frame": 27,
|
| 62 |
+
"avg_attention": 0.2963099479675293,
|
| 63 |
+
"confidence": "medium"
|
| 64 |
+
},
|
| 65 |
+
{
|
| 66 |
+
"word": "IX-1p",
|
| 67 |
+
"start_frame": 27,
|
| 68 |
+
"end_frame": 27,
|
| 69 |
+
"peak_frame": 27,
|
| 70 |
+
"avg_attention": 0.3264133930206299,
|
| 71 |
+
"confidence": "medium"
|
| 72 |
+
},
|
| 73 |
+
{
|
| 74 |
+
"word": "FINISH",
|
| 75 |
+
"start_frame": 11,
|
| 76 |
+
"end_frame": 12,
|
| 77 |
+
"peak_frame": 12,
|
| 78 |
+
"avg_attention": 0.46679070591926575,
|
| 79 |
+
"confidence": "medium"
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"word": "JOIN",
|
| 83 |
+
"start_frame": 13,
|
| 84 |
+
"end_frame": 14,
|
| 85 |
+
"peak_frame": 14,
|
| 86 |
+
"avg_attention": 0.3172740340232849,
|
| 87 |
+
"confidence": "medium"
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"word": "IX-1p",
|
| 91 |
+
"start_frame": 17,
|
| 92 |
+
"end_frame": 17,
|
| 93 |
+
"peak_frame": 17,
|
| 94 |
+
"avg_attention": 0.3579559326171875,
|
| 95 |
+
"confidence": "medium"
|
| 96 |
+
}
|
| 97 |
+
],
|
| 98 |
+
"statistics": {
|
| 99 |
+
"avg_confidence": 0.33822485208511355,
|
| 100 |
+
"high_confidence_words": 0,
|
| 101 |
+
"medium_confidence_words": 9,
|
| 102 |
+
"low_confidence_words": 1
|
| 103 |
+
}
|
| 104 |
+
}
|
SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_155113}/sample_000/frame_alignment.png
RENAMED
|
File without changes
|
SignX/detailed_prediction_20251226_155113/sample_000/frame_alignment_NEW.png
ADDED
|
Git LFS Details
|
SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_155113}/sample_000/gloss_to_frames.png
RENAMED
|
File without changes
|
SignX/detailed_prediction_20251226_155113/sample_000/gloss_to_frames_NEW.png
ADDED
|
Git LFS Details
|
SignX/detailed_prediction_20251226_155113/sample_000/interactive_alignment.html
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="zh-CN">
|
| 3 |
+
<head>
|
| 4 |
+
<meta charset="UTF-8">
|
| 5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 6 |
+
<title>Interactive Word-Frame Alignment</title>
|
| 7 |
+
<style>
|
| 8 |
+
body {
|
| 9 |
+
font-family: 'Arial', sans-serif;
|
| 10 |
+
margin: 20px;
|
| 11 |
+
background-color: #f5f5f5;
|
| 12 |
+
}
|
| 13 |
+
.container {
|
| 14 |
+
max-width: 1800px;
|
| 15 |
+
margin: 0 auto;
|
| 16 |
+
background-color: white;
|
| 17 |
+
padding: 30px;
|
| 18 |
+
border-radius: 8px;
|
| 19 |
+
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
|
| 20 |
+
}
|
| 21 |
+
h1 {
|
| 22 |
+
color: #333;
|
| 23 |
+
border-bottom: 3px solid #4CAF50;
|
| 24 |
+
padding-bottom: 10px;
|
| 25 |
+
margin-bottom: 20px;
|
| 26 |
+
}
|
| 27 |
+
.stats {
|
| 28 |
+
background-color: #E3F2FD;
|
| 29 |
+
padding: 15px;
|
| 30 |
+
border-radius: 5px;
|
| 31 |
+
margin-bottom: 20px;
|
| 32 |
+
border-left: 4px solid #2196F3;
|
| 33 |
+
font-size: 14px;
|
| 34 |
+
}
|
| 35 |
+
.controls {
|
| 36 |
+
background-color: #f9f9f9;
|
| 37 |
+
padding: 20px;
|
| 38 |
+
border-radius: 5px;
|
| 39 |
+
margin-bottom: 30px;
|
| 40 |
+
border: 1px solid #ddd;
|
| 41 |
+
}
|
| 42 |
+
.control-group {
|
| 43 |
+
margin-bottom: 15px;
|
| 44 |
+
}
|
| 45 |
+
label {
|
| 46 |
+
font-weight: bold;
|
| 47 |
+
display: inline-block;
|
| 48 |
+
width: 250px;
|
| 49 |
+
color: #555;
|
| 50 |
+
}
|
| 51 |
+
input[type="range"] {
|
| 52 |
+
width: 400px;
|
| 53 |
+
vertical-align: middle;
|
| 54 |
+
}
|
| 55 |
+
.value-display {
|
| 56 |
+
display: inline-block;
|
| 57 |
+
width: 80px;
|
| 58 |
+
font-family: monospace;
|
| 59 |
+
font-size: 14px;
|
| 60 |
+
color: #2196F3;
|
| 61 |
+
font-weight: bold;
|
| 62 |
+
}
|
| 63 |
+
.reset-btn {
|
| 64 |
+
margin-top: 15px;
|
| 65 |
+
padding: 10px 25px;
|
| 66 |
+
background-color: #2196F3;
|
| 67 |
+
color: white;
|
| 68 |
+
border: none;
|
| 69 |
+
border-radius: 5px;
|
| 70 |
+
cursor: pointer;
|
| 71 |
+
font-size: 14px;
|
| 72 |
+
font-weight: bold;
|
| 73 |
+
}
|
| 74 |
+
.reset-btn:hover {
|
| 75 |
+
background-color: #1976D2;
|
| 76 |
+
}
|
| 77 |
+
canvas {
|
| 78 |
+
border: 1px solid #999;
|
| 79 |
+
display: block;
|
| 80 |
+
margin: 20px auto;
|
| 81 |
+
background: white;
|
| 82 |
+
}
|
| 83 |
+
.legend {
|
| 84 |
+
margin-top: 20px;
|
| 85 |
+
padding: 15px;
|
| 86 |
+
background-color: #fff;
|
| 87 |
+
border: 1px solid #ddd;
|
| 88 |
+
border-radius: 5px;
|
| 89 |
+
}
|
| 90 |
+
.legend-item {
|
| 91 |
+
display: inline-block;
|
| 92 |
+
margin-right: 25px;
|
| 93 |
+
font-size: 13px;
|
| 94 |
+
margin-bottom: 10px;
|
| 95 |
+
}
|
| 96 |
+
.color-box {
|
| 97 |
+
display: inline-block;
|
| 98 |
+
width: 30px;
|
| 99 |
+
height: 15px;
|
| 100 |
+
margin-right: 8px;
|
| 101 |
+
vertical-align: middle;
|
| 102 |
+
border: 1px solid #666;
|
| 103 |
+
}
|
| 104 |
+
.info-panel {
|
| 105 |
+
margin-top: 20px;
|
| 106 |
+
padding: 15px;
|
| 107 |
+
background-color: #f9f9f9;
|
| 108 |
+
border-radius: 5px;
|
| 109 |
+
border: 1px solid #ddd;
|
| 110 |
+
}
|
| 111 |
+
.confidence {
|
| 112 |
+
display: inline-block;
|
| 113 |
+
padding: 3px 10px;
|
| 114 |
+
border-radius: 10px;
|
| 115 |
+
font-weight: bold;
|
| 116 |
+
font-size: 11px;
|
| 117 |
+
text-transform: uppercase;
|
| 118 |
+
}
|
| 119 |
+
.confidence.high {
|
| 120 |
+
background-color: #4CAF50;
|
| 121 |
+
color: white;
|
| 122 |
+
}
|
| 123 |
+
.confidence.medium {
|
| 124 |
+
background-color: #FF9800;
|
| 125 |
+
color: white;
|
| 126 |
+
}
|
| 127 |
+
.confidence.low {
|
| 128 |
+
background-color: #f44336;
|
| 129 |
+
color: white;
|
| 130 |
+
}
|
| 131 |
+
</style>
|
| 132 |
+
</head>
|
| 133 |
+
<body>
|
| 134 |
+
<div class="container">
|
| 135 |
+
<h1>🎯 Interactive Word-to-Frame Alignment Visualizer</h1>
|
| 136 |
+
|
| 137 |
+
<div class="stats">
|
| 138 |
+
<strong>Translation:</strong> <unk> #IF FRIEND GROUP/TOGETHER DEPART PARTY IX-1p FINISH JOIN IX-1p<br>
|
| 139 |
+
<strong>Total Words:</strong> 10 |
|
| 140 |
+
<strong>Total Features:</strong> 8
|
| 141 |
+
</div>
|
| 142 |
+
|
| 143 |
+
<div class="controls">
|
| 144 |
+
<h3>⚙️ Threshold Controls</h3>
|
| 145 |
+
|
| 146 |
+
<div class="control-group">
|
| 147 |
+
<label for="peak-threshold">Peak Threshold (% of max):</label>
|
| 148 |
+
<input type="range" id="peak-threshold" min="1" max="100" value="90" step="1">
|
| 149 |
+
<span class="value-display" id="peak-threshold-value">90%</span>
|
| 150 |
+
<br>
|
| 151 |
+
<small style="margin-left: 255px; color: #666;">
|
| 152 |
+
帧的注意力权重 ≥ (峰值权重 × 阈值%) 时被认为是"显著帧"
|
| 153 |
+
</small>
|
| 154 |
+
</div>
|
| 155 |
+
|
| 156 |
+
<div class="control-group">
|
| 157 |
+
<label for="confidence-high">High Confidence (avg attn >):</label>
|
| 158 |
+
<input type="range" id="confidence-high" min="0" max="100" value="50" step="1">
|
| 159 |
+
<span class="value-display" id="confidence-high-value">0.50</span>
|
| 160 |
+
</div>
|
| 161 |
+
|
| 162 |
+
<div class="control-group">
|
| 163 |
+
<label for="confidence-medium">Medium Confidence (avg attn >):</label>
|
| 164 |
+
<input type="range" id="confidence-medium" min="0" max="100" value="20" step="1">
|
| 165 |
+
<span class="value-display" id="confidence-medium-value">0.20</span>
|
| 166 |
+
</div>
|
| 167 |
+
|
| 168 |
+
<button class="reset-btn" onclick="resetDefaults()">
|
| 169 |
+
Reset to Defaults
|
| 170 |
+
</button>
|
| 171 |
+
</div>
|
| 172 |
+
|
| 173 |
+
<div>
|
| 174 |
+
<h3>Word-to-Frame Alignment</h3>
|
| 175 |
+
<p style="color: #666; font-size: 13px;">
|
| 176 |
+
每个词显示为彩色矩形,宽度表示该词对应的特征帧范围。★ = 峰值帧。矩形内部显示注意力权重波形。
|
| 177 |
+
</p>
|
| 178 |
+
<canvas id="alignment-canvas" width="1600" height="600"></canvas>
|
| 179 |
+
|
| 180 |
+
<h3 style="margin-top: 30px;">Timeline Progress Bar</h3>
|
| 181 |
+
<canvas id="timeline-canvas" width="1600" height="100"></canvas>
|
| 182 |
+
|
| 183 |
+
<div class="legend">
|
| 184 |
+
<strong>Legend:</strong><br><br>
|
| 185 |
+
<div class="legend-item">
|
| 186 |
+
<span class="confidence high">High</span>
|
| 187 |
+
<span class="confidence medium">Medium</span>
|
| 188 |
+
<span class="confidence low">Low</span>
|
| 189 |
+
Confidence Levels (opacity reflects confidence)
|
| 190 |
+
</div>
|
| 191 |
+
<div class="legend-item">
|
| 192 |
+
<span style="color: red; font-size: 20px;">★</span>
|
| 193 |
+
Peak Frame (highest attention)
|
| 194 |
+
</div>
|
| 195 |
+
<div class="legend-item">
|
| 196 |
+
<span style="color: blue;">━</span>
|
| 197 |
+
Attention Waveform (within word region)
|
| 198 |
+
</div>
|
| 199 |
+
</div>
|
| 200 |
+
</div>
|
| 201 |
+
|
| 202 |
+
<div class="info-panel">
|
| 203 |
+
<h3>Alignment Details</h3>
|
| 204 |
+
<div id="alignment-details"></div>
|
| 205 |
+
</div>
|
| 206 |
+
</div>
|
| 207 |
+
|
| 208 |
+
<script>
|
| 209 |
+
// Attention data from Python
|
| 210 |
+
const attentionData = [{"word": "<unk>", "word_idx": 0, "weights": [0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765]}, {"word": "#IF", "word_idx": 1, "weights": [0.035573869943618774, 0.03952416777610779, 0.03952416777610779, 0.03952416777610779, 0.03952416777610779, 0.03952416777610779, 0.03952416777610779, 0.04215671867132187]}, {"word": "FRIEND", "word_idx": 2, "weights": [0.0035305300261825323, 0.06121520698070526, 0.038338951766490936, 0.03721731901168823, 0.0396437793970108, 0.06121520698070526, 0.03798501193523407, 0.06121520698070526]}, {"word": "GROUP/TOGETHER", "word_idx": 3, "weights": [0.0032519344240427017, 0.06749927252531052, 0.06769093871116638, 0.06797394156455994, 0.05660902336239815, 0.0032519344240427017, 0.06791087239980698, 0.06749927252531052]}, {"word": "DEPART", "word_idx": 4, "weights": [0.1269645392894745, 0.011337662115693092, 0.011169591918587685, 0.011129447259008884, 0.011337662115693092, 0.14123021066188812, 0.007393963634967804, 0.011116056703031063]}, {"word": "PARTY", "word_idx": 5, "weights": [0.08145920932292938, 0.003315121866762638, 0.24030542373657227, 0.0034980459604412317, 0.0034834302496165037, 0.16299134492874146, 0.003404060145840049, 0.0034867869690060616]}, {"word": "IX-1p", "word_idx": 6, "weights": [0.08820953965187073, 0.14399001002311707, 0.00422912510111928, 0.09592650085687637, 0.004478602670133114, 0.004475735127925873, 0.00422912510111928, 0.00439990172162652]}, {"word": "FINISH", "word_idx": 7, "weights": [0.0013184626586735249, 0.0013184626586735249, 0.0013184626586735249, 0.1313352882862091, 0.0013184626586735249, 0.14886727929115295, 0.14913317561149597, 0.004780622664839029]}, {"word": "JOIN", "word_idx": 8, "weights": [0.00555413169786334, 0.006608393043279648, 0.00555413169786334, 0.1470479518175125, 0.13233929872512817, 0.09560006111860275, 0.00555413169786334, 0.08190114796161652]}, {"word": "IX-1p", "word_idx": 9, "weights": [0.03655996546149254, 0.03403225541114807, 0.27225977182388306, 0.35054582357406616, 0.03655996546149254, 0.27422454953193665, 0.03655996546149254, 0.09239426255226135]}];
|
| 211 |
+
const numGlosses = 10;
|
| 212 |
+
const numFeatures = 8;
|
| 213 |
+
|
| 214 |
+
// Colors for different words (matching matplotlib tab20)
|
| 215 |
+
const colors = [
|
| 216 |
+
'#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd',
|
| 217 |
+
'#8c564b', '#e377c2', '#7f7f7f', '#bcbd22', '#17becf',
|
| 218 |
+
'#aec7e8', '#ffbb78', '#98df8a', '#ff9896', '#c5b0d5',
|
| 219 |
+
'#c49c94', '#f7b6d2', '#c7c7c7', '#dbdb8d', '#9edae5'
|
| 220 |
+
];
|
| 221 |
+
|
| 222 |
+
// Get controls
|
| 223 |
+
const peakThresholdSlider = document.getElementById('peak-threshold');
|
| 224 |
+
const peakThresholdValue = document.getElementById('peak-threshold-value');
|
| 225 |
+
const confidenceHighSlider = document.getElementById('confidence-high');
|
| 226 |
+
const confidenceHighValue = document.getElementById('confidence-high-value');
|
| 227 |
+
const confidenceMediumSlider = document.getElementById('confidence-medium');
|
| 228 |
+
const confidenceMediumValue = document.getElementById('confidence-medium-value');
|
| 229 |
+
const alignmentCanvas = document.getElementById('alignment-canvas');
|
| 230 |
+
const timelineCanvas = document.getElementById('timeline-canvas');
|
| 231 |
+
const alignmentCtx = alignmentCanvas.getContext('2d');
|
| 232 |
+
const timelineCtx = timelineCanvas.getContext('2d');
|
| 233 |
+
|
| 234 |
+
// Update displays when sliders change
|
| 235 |
+
peakThresholdSlider.oninput = function() {
|
| 236 |
+
peakThresholdValue.textContent = this.value + '%';
|
| 237 |
+
updateVisualization();
|
| 238 |
+
};
|
| 239 |
+
|
| 240 |
+
confidenceHighSlider.oninput = function() {
|
| 241 |
+
confidenceHighValue.textContent = (this.value / 100).toFixed(2);
|
| 242 |
+
updateVisualization();
|
| 243 |
+
};
|
| 244 |
+
|
| 245 |
+
confidenceMediumSlider.oninput = function() {
|
| 246 |
+
confidenceMediumValue.textContent = (this.value / 100).toFixed(2);
|
| 247 |
+
updateVisualization();
|
| 248 |
+
};
|
| 249 |
+
|
| 250 |
+
function resetDefaults() {
|
| 251 |
+
peakThresholdSlider.value = 90;
|
| 252 |
+
confidenceHighSlider.value = 50;
|
| 253 |
+
confidenceMediumSlider.value = 20;
|
| 254 |
+
peakThresholdValue.textContent = '90%';
|
| 255 |
+
confidenceHighValue.textContent = '0.50';
|
| 256 |
+
confidenceMediumValue.textContent = '0.20';
|
| 257 |
+
updateVisualization();
|
| 258 |
+
}
|
| 259 |
+
|
| 260 |
+
function calculateAlignment(weights, peakThreshold) {
|
| 261 |
+
// Find peak
|
| 262 |
+
let peakIdx = 0;
|
| 263 |
+
let peakWeight = weights[0];
|
| 264 |
+
for (let i = 1; i < weights.length; i++) {
|
| 265 |
+
if (weights[i] > peakWeight) {
|
| 266 |
+
peakWeight = weights[i];
|
| 267 |
+
peakIdx = i;
|
| 268 |
+
}
|
| 269 |
+
}
|
| 270 |
+
|
| 271 |
+
// Find significant frames
|
| 272 |
+
const threshold = peakWeight * (peakThreshold / 100);
|
| 273 |
+
let startIdx = peakIdx;
|
| 274 |
+
let endIdx = peakIdx;
|
| 275 |
+
let sumWeight = 0;
|
| 276 |
+
let count = 0;
|
| 277 |
+
|
| 278 |
+
for (let i = 0; i < weights.length; i++) {
|
| 279 |
+
if (weights[i] >= threshold) {
|
| 280 |
+
if (i < startIdx) startIdx = i;
|
| 281 |
+
if (i > endIdx) endIdx = i;
|
| 282 |
+
sumWeight += weights[i];
|
| 283 |
+
count++;
|
| 284 |
+
}
|
| 285 |
+
}
|
| 286 |
+
|
| 287 |
+
const avgWeight = count > 0 ? sumWeight / count : peakWeight;
|
| 288 |
+
|
| 289 |
+
return {
|
| 290 |
+
startIdx: startIdx,
|
| 291 |
+
endIdx: endIdx,
|
| 292 |
+
peakIdx: peakIdx,
|
| 293 |
+
peakWeight: peakWeight,
|
| 294 |
+
avgWeight: avgWeight,
|
| 295 |
+
threshold: threshold
|
| 296 |
+
};
|
| 297 |
+
}
|
| 298 |
+
|
| 299 |
+
function getConfidenceLevel(avgWeight, highThreshold, mediumThreshold) {
|
| 300 |
+
if (avgWeight > highThreshold) return 'high';
|
| 301 |
+
if (avgWeight > mediumThreshold) return 'medium';
|
| 302 |
+
return 'low';
|
| 303 |
+
}
|
| 304 |
+
|
| 305 |
+
function drawAlignmentChart() {
|
| 306 |
+
const peakThreshold = parseInt(peakThresholdSlider.value);
|
| 307 |
+
const highThreshold = parseInt(confidenceHighSlider.value) / 100;
|
| 308 |
+
const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
|
| 309 |
+
|
| 310 |
+
// Canvas dimensions
|
| 311 |
+
const width = alignmentCanvas.width;
|
| 312 |
+
const height = alignmentCanvas.height;
|
| 313 |
+
const leftMargin = 180;
|
| 314 |
+
const rightMargin = 50;
|
| 315 |
+
const topMargin = 60;
|
| 316 |
+
const bottomMargin = 80;
|
| 317 |
+
|
| 318 |
+
const plotWidth = width - leftMargin - rightMargin;
|
| 319 |
+
const plotHeight = height - topMargin - bottomMargin;
|
| 320 |
+
|
| 321 |
+
const rowHeight = plotHeight / numGlosses;
|
| 322 |
+
const featureWidth = plotWidth / numFeatures;
|
| 323 |
+
|
| 324 |
+
// Clear canvas
|
| 325 |
+
alignmentCtx.clearRect(0, 0, width, height);
|
| 326 |
+
|
| 327 |
+
// Draw title
|
| 328 |
+
alignmentCtx.fillStyle = '#333';
|
| 329 |
+
alignmentCtx.font = 'bold 18px Arial';
|
| 330 |
+
alignmentCtx.textAlign = 'center';
|
| 331 |
+
alignmentCtx.fillText('Word-to-Frame Alignment', width / 2, 30);
|
| 332 |
+
alignmentCtx.font = '13px Arial';
|
| 333 |
+
alignmentCtx.fillText('(based on attention peaks, ★ = peak frame)', width / 2, 48);
|
| 334 |
+
|
| 335 |
+
// Calculate alignments
|
| 336 |
+
const alignments = [];
|
| 337 |
+
for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
|
| 338 |
+
const data = attentionData[wordIdx];
|
| 339 |
+
const alignment = calculateAlignment(data.weights, peakThreshold);
|
| 340 |
+
alignment.word = data.word;
|
| 341 |
+
alignment.wordIdx = wordIdx;
|
| 342 |
+
alignment.weights = data.weights;
|
| 343 |
+
alignments.push(alignment);
|
| 344 |
+
}
|
| 345 |
+
|
| 346 |
+
// Draw grid
|
| 347 |
+
alignmentCtx.strokeStyle = '#e0e0e0';
|
| 348 |
+
alignmentCtx.lineWidth = 0.5;
|
| 349 |
+
for (let i = 0; i <= numFeatures; i++) {
|
| 350 |
+
const x = leftMargin + i * featureWidth;
|
| 351 |
+
alignmentCtx.beginPath();
|
| 352 |
+
alignmentCtx.moveTo(x, topMargin);
|
| 353 |
+
alignmentCtx.lineTo(x, topMargin + plotHeight);
|
| 354 |
+
alignmentCtx.stroke();
|
| 355 |
+
}
|
| 356 |
+
|
| 357 |
+
// Draw word regions
|
| 358 |
+
for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
|
| 359 |
+
const alignment = alignments[wordIdx];
|
| 360 |
+
const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
|
| 361 |
+
const y = topMargin + wordIdx * rowHeight;
|
| 362 |
+
|
| 363 |
+
// Alpha based on confidence
|
| 364 |
+
const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
|
| 365 |
+
|
| 366 |
+
// Draw rectangle for word region
|
| 367 |
+
const startX = leftMargin + alignment.startIdx * featureWidth;
|
| 368 |
+
const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
|
| 369 |
+
|
| 370 |
+
alignmentCtx.fillStyle = colors[wordIdx % 20];
|
| 371 |
+
alignmentCtx.globalAlpha = alpha;
|
| 372 |
+
alignmentCtx.fillRect(startX, y, rectWidth, rowHeight * 0.8);
|
| 373 |
+
alignmentCtx.globalAlpha = 1.0;
|
| 374 |
+
|
| 375 |
+
// Draw border
|
| 376 |
+
alignmentCtx.strokeStyle = '#000';
|
| 377 |
+
alignmentCtx.lineWidth = 2;
|
| 378 |
+
alignmentCtx.strokeRect(startX, y, rectWidth, rowHeight * 0.8);
|
| 379 |
+
|
| 380 |
+
// Draw attention waveform inside rectangle
|
| 381 |
+
alignmentCtx.strokeStyle = 'rgba(0, 0, 255, 0.8)';
|
| 382 |
+
alignmentCtx.lineWidth = 1.5;
|
| 383 |
+
alignmentCtx.beginPath();
|
| 384 |
+
for (let i = alignment.startIdx; i <= alignment.endIdx; i++) {
|
| 385 |
+
const x = leftMargin + i * featureWidth + featureWidth / 2;
|
| 386 |
+
const weight = alignment.weights[i];
|
| 387 |
+
const maxWeight = alignment.peakWeight;
|
| 388 |
+
const normalizedWeight = weight / (maxWeight * 1.2); // Scale for visibility
|
| 389 |
+
const waveY = y + rowHeight * 0.8 - (normalizedWeight * rowHeight * 0.6);
|
| 390 |
+
|
| 391 |
+
if (i === alignment.startIdx) {
|
| 392 |
+
alignmentCtx.moveTo(x, waveY);
|
| 393 |
+
} else {
|
| 394 |
+
alignmentCtx.lineTo(x, waveY);
|
| 395 |
+
}
|
| 396 |
+
}
|
| 397 |
+
alignmentCtx.stroke();
|
| 398 |
+
|
| 399 |
+
// Draw word label
|
| 400 |
+
const labelX = startX + rectWidth / 2;
|
| 401 |
+
const labelY = y + rowHeight * 0.4;
|
| 402 |
+
|
| 403 |
+
alignmentCtx.fillStyle = 'rgba(0, 0, 0, 0.7)';
|
| 404 |
+
alignmentCtx.fillRect(labelX - 60, labelY - 12, 120, 24);
|
| 405 |
+
alignmentCtx.fillStyle = '#fff';
|
| 406 |
+
alignmentCtx.font = 'bold 13px Arial';
|
| 407 |
+
alignmentCtx.textAlign = 'center';
|
| 408 |
+
alignmentCtx.textBaseline = 'middle';
|
| 409 |
+
alignmentCtx.fillText(alignment.word, labelX, labelY);
|
| 410 |
+
|
| 411 |
+
// Mark peak frame with star
|
| 412 |
+
const peakX = leftMargin + alignment.peakIdx * featureWidth + featureWidth / 2;
|
| 413 |
+
const peakY = y + rowHeight * 0.4;
|
| 414 |
+
|
| 415 |
+
// Draw star
|
| 416 |
+
alignmentCtx.fillStyle = '#ff0000';
|
| 417 |
+
alignmentCtx.strokeStyle = '#ffff00';
|
| 418 |
+
alignmentCtx.lineWidth = 1.5;
|
| 419 |
+
alignmentCtx.font = '20px Arial';
|
| 420 |
+
alignmentCtx.textAlign = 'center';
|
| 421 |
+
alignmentCtx.strokeText('★', peakX, peakY);
|
| 422 |
+
alignmentCtx.fillText('★', peakX, peakY);
|
| 423 |
+
|
| 424 |
+
// Y-axis label (word names)
|
| 425 |
+
alignmentCtx.fillStyle = '#333';
|
| 426 |
+
alignmentCtx.font = '12px Arial';
|
| 427 |
+
alignmentCtx.textAlign = 'right';
|
| 428 |
+
alignmentCtx.textBaseline = 'middle';
|
| 429 |
+
alignmentCtx.fillText(alignment.word, leftMargin - 10, y + rowHeight * 0.4);
|
| 430 |
+
}
|
| 431 |
+
|
| 432 |
+
// Draw horizontal grid lines
|
| 433 |
+
alignmentCtx.strokeStyle = '#ccc';
|
| 434 |
+
alignmentCtx.lineWidth = 0.5;
|
| 435 |
+
for (let i = 0; i <= numGlosses; i++) {
|
| 436 |
+
const y = topMargin + i * rowHeight;
|
| 437 |
+
alignmentCtx.beginPath();
|
| 438 |
+
alignmentCtx.moveTo(leftMargin, y);
|
| 439 |
+
alignmentCtx.lineTo(leftMargin + plotWidth, y);
|
| 440 |
+
alignmentCtx.stroke();
|
| 441 |
+
}
|
| 442 |
+
|
| 443 |
+
// Draw axes
|
| 444 |
+
alignmentCtx.strokeStyle = '#000';
|
| 445 |
+
alignmentCtx.lineWidth = 2;
|
| 446 |
+
alignmentCtx.strokeRect(leftMargin, topMargin, plotWidth, plotHeight);
|
| 447 |
+
|
| 448 |
+
// X-axis labels (frame indices)
|
| 449 |
+
alignmentCtx.fillStyle = '#000';
|
| 450 |
+
alignmentCtx.font = '11px Arial';
|
| 451 |
+
alignmentCtx.textAlign = 'center';
|
| 452 |
+
alignmentCtx.textBaseline = 'top';
|
| 453 |
+
for (let i = 0; i < numFeatures; i++) {
|
| 454 |
+
const x = leftMargin + i * featureWidth + featureWidth / 2;
|
| 455 |
+
alignmentCtx.fillText(i.toString(), x, topMargin + plotHeight + 10);
|
| 456 |
+
}
|
| 457 |
+
|
| 458 |
+
// Axis titles
|
| 459 |
+
alignmentCtx.fillStyle = '#333';
|
| 460 |
+
alignmentCtx.font = 'bold 14px Arial';
|
| 461 |
+
alignmentCtx.textAlign = 'center';
|
| 462 |
+
alignmentCtx.fillText('Feature Frame Index', leftMargin + plotWidth / 2, height - 20);
|
| 463 |
+
|
| 464 |
+
alignmentCtx.save();
|
| 465 |
+
alignmentCtx.translate(30, topMargin + plotHeight / 2);
|
| 466 |
+
alignmentCtx.rotate(-Math.PI / 2);
|
| 467 |
+
alignmentCtx.fillText('Generated Word', 0, 0);
|
| 468 |
+
alignmentCtx.restore();
|
| 469 |
+
|
| 470 |
+
return alignments;
|
| 471 |
+
}
|
| 472 |
+
|
| 473 |
+
function drawTimeline(alignments) {
|
| 474 |
+
const highThreshold = parseInt(confidenceHighSlider.value) / 100;
|
| 475 |
+
const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
|
| 476 |
+
|
| 477 |
+
const width = timelineCanvas.width;
|
| 478 |
+
const height = timelineCanvas.height;
|
| 479 |
+
const leftMargin = 180;
|
| 480 |
+
const rightMargin = 50;
|
| 481 |
+
const plotWidth = width - leftMargin - rightMargin;
|
| 482 |
+
const featureWidth = plotWidth / numFeatures;
|
| 483 |
+
|
| 484 |
+
// Clear canvas
|
| 485 |
+
timelineCtx.clearRect(0, 0, width, height);
|
| 486 |
+
|
| 487 |
+
// Background bar
|
| 488 |
+
timelineCtx.fillStyle = '#ddd';
|
| 489 |
+
timelineCtx.fillRect(leftMargin, 30, plotWidth, 40);
|
| 490 |
+
timelineCtx.strokeStyle = '#000';
|
| 491 |
+
timelineCtx.lineWidth = 2;
|
| 492 |
+
timelineCtx.strokeRect(leftMargin, 30, plotWidth, 40);
|
| 493 |
+
|
| 494 |
+
// Draw word regions on timeline
|
| 495 |
+
for (let wordIdx = 0; wordIdx < alignments.length; wordIdx++) {
|
| 496 |
+
const alignment = alignments[wordIdx];
|
| 497 |
+
const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
|
| 498 |
+
const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
|
| 499 |
+
|
| 500 |
+
const startX = leftMargin + alignment.startIdx * featureWidth;
|
| 501 |
+
const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
|
| 502 |
+
|
| 503 |
+
timelineCtx.fillStyle = colors[wordIdx % 20];
|
| 504 |
+
timelineCtx.globalAlpha = alpha;
|
| 505 |
+
timelineCtx.fillRect(startX, 30, rectWidth, 40);
|
| 506 |
+
timelineCtx.globalAlpha = 1.0;
|
| 507 |
+
timelineCtx.strokeStyle = '#000';
|
| 508 |
+
timelineCtx.lineWidth = 0.5;
|
| 509 |
+
timelineCtx.strokeRect(startX, 30, rectWidth, 40);
|
| 510 |
+
}
|
| 511 |
+
|
| 512 |
+
// Title
|
| 513 |
+
timelineCtx.fillStyle = '#333';
|
| 514 |
+
timelineCtx.font = 'bold 13px Arial';
|
| 515 |
+
timelineCtx.textAlign = 'left';
|
| 516 |
+
timelineCtx.fillText('Timeline Progress Bar', leftMargin, 20);
|
| 517 |
+
}
|
| 518 |
+
|
| 519 |
+
function updateDetailsPanel(alignments, highThreshold, mediumThreshold) {
|
| 520 |
+
const panel = document.getElementById('alignment-details');
|
| 521 |
+
let html = '<table style="width: 100%; border-collapse: collapse;">';
|
| 522 |
+
html += '<tr style="background: #f0f0f0; font-weight: bold;">';
|
| 523 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Word</th>';
|
| 524 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Feature Range</th>';
|
| 525 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Peak</th>';
|
| 526 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Span</th>';
|
| 527 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Avg Attention</th>';
|
| 528 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Confidence</th>';
|
| 529 |
+
html += '</tr>';
|
| 530 |
+
|
| 531 |
+
for (const align of alignments) {
|
| 532 |
+
const confidence = getConfidenceLevel(align.avgWeight, highThreshold, mediumThreshold);
|
| 533 |
+
const span = align.endIdx - align.startIdx + 1;
|
| 534 |
+
|
| 535 |
+
html += '<tr>';
|
| 536 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;"><strong>${align.word}</strong></td>`;
|
| 537 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.startIdx} → ${align.endIdx}</td>`;
|
| 538 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.peakIdx}</td>`;
|
| 539 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;">${span}</td>`;
|
| 540 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.avgWeight.toFixed(4)}</td>`;
|
| 541 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;"><span class="confidence ${confidence}">${confidence}</span></td>`;
|
| 542 |
+
html += '</tr>';
|
| 543 |
+
}
|
| 544 |
+
|
| 545 |
+
html += '</table>';
|
| 546 |
+
panel.innerHTML = html;
|
| 547 |
+
}
|
| 548 |
+
|
| 549 |
+
function updateVisualization() {
|
| 550 |
+
const alignments = drawAlignmentChart();
|
| 551 |
+
drawTimeline(alignments);
|
| 552 |
+
const highThreshold = parseInt(confidenceHighSlider.value) / 100;
|
| 553 |
+
const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
|
| 554 |
+
updateDetailsPanel(alignments, highThreshold, mediumThreshold);
|
| 555 |
+
}
|
| 556 |
+
|
| 557 |
+
// Event listeners for sliders
|
| 558 |
+
peakSlider.addEventListener('input', function() {
|
| 559 |
+
peakValue.textContent = peakSlider.value + '%';
|
| 560 |
+
updateVisualization();
|
| 561 |
+
});
|
| 562 |
+
|
| 563 |
+
confidenceHighSlider.addEventListener('input', function() {
|
| 564 |
+
const val = parseInt(confidenceHighSlider.value) / 100;
|
| 565 |
+
confidenceHighValue.textContent = val.toFixed(2);
|
| 566 |
+
updateVisualization();
|
| 567 |
+
});
|
| 568 |
+
|
| 569 |
+
confidenceMediumSlider.addEventListener('input', function() {
|
| 570 |
+
const val = parseInt(confidenceMediumSlider.value) / 100;
|
| 571 |
+
confidenceMediumValue.textContent = val.toFixed(2);
|
| 572 |
+
updateVisualization();
|
| 573 |
+
});
|
| 574 |
+
|
| 575 |
+
// Initial visualization
|
| 576 |
+
updateVisualization();
|
| 577 |
+
</script>
|
| 578 |
+
</body>
|
| 579 |
+
</html>
|
SignX/detailed_prediction_20251226_155113/sample_000/translation.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
With BPE: <unk> #IF FRIEND GROUP/TOGE@@ TH@@ E@@ R DEPART PARTY IX-1p FINISH JO@@ I@@ N IX-1p
|
| 2 |
+
Clean: <unk> #IF FRIEND GROUP/TOGETHER DEPART PARTY IX-1p FINISH JOIN IX-1p
|
SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_161814}/sample_000/analysis_report.txt
RENAMED
|
@@ -2,43 +2,45 @@
|
|
| 2 |
Sign Language Recognition - Attention分析报告
|
| 3 |
================================================================================
|
| 4 |
|
| 5 |
-
生成时间: 2025-12-
|
| 6 |
|
| 7 |
翻译结果:
|
| 8 |
--------------------------------------------------------------------------------
|
| 9 |
-
<unk>
|
| 10 |
|
| 11 |
视频信息:
|
| 12 |
--------------------------------------------------------------------------------
|
| 13 |
-
总帧数:
|
| 14 |
-
词数量:
|
| 15 |
|
| 16 |
Attention权重信息:
|
| 17 |
--------------------------------------------------------------------------------
|
| 18 |
-
形状: (
|
| 19 |
-
- 解码步数:
|
| 20 |
- Batch大小: 8
|
| 21 |
|
| 22 |
词-帧对应详情:
|
| 23 |
================================================================================
|
| 24 |
No. Word Frames Peak Attn Conf
|
| 25 |
--------------------------------------------------------------------------------
|
| 26 |
-
1 <unk> 0-
|
| 27 |
-
2
|
| 28 |
-
3
|
| 29 |
-
4
|
| 30 |
-
5
|
| 31 |
-
6
|
| 32 |
-
7
|
| 33 |
-
8
|
|
|
|
|
|
|
| 34 |
|
| 35 |
================================================================================
|
| 36 |
|
| 37 |
统计摘要:
|
| 38 |
--------------------------------------------------------------------------------
|
| 39 |
-
平均attention权重: 0.
|
| 40 |
-
高置信度词:
|
| 41 |
-
中置信度词:
|
| 42 |
-
低置信度词:
|
| 43 |
|
| 44 |
================================================================================
|
|
|
|
| 2 |
Sign Language Recognition - Attention分析报告
|
| 3 |
================================================================================
|
| 4 |
|
| 5 |
+
生成时间: 2025-12-26 16:18:17
|
| 6 |
|
| 7 |
翻译结果:
|
| 8 |
--------------------------------------------------------------------------------
|
| 9 |
+
<unk> #IF FRIEND GROUP/TOGETHER DEPART PARTY IX-1p FINISH JOIN IX-1p
|
| 10 |
|
| 11 |
视频信息:
|
| 12 |
--------------------------------------------------------------------------------
|
| 13 |
+
总帧数: 28
|
| 14 |
+
词数量: 10
|
| 15 |
|
| 16 |
Attention权重信息:
|
| 17 |
--------------------------------------------------------------------------------
|
| 18 |
+
形状: (28, 8, 28)
|
| 19 |
+
- 解码步数: 28
|
| 20 |
- Batch大小: 8
|
| 21 |
|
| 22 |
词-帧对应详情:
|
| 23 |
================================================================================
|
| 24 |
No. Word Frames Peak Attn Conf
|
| 25 |
--------------------------------------------------------------------------------
|
| 26 |
+
1 <unk> 0-0 0 0.133 low
|
| 27 |
+
2 #IF 2-3 2 0.359 medium
|
| 28 |
+
3 FRIEND 5-5 5 0.449 medium
|
| 29 |
+
4 GROUP/TOGETHER 8-8 8 0.371 medium
|
| 30 |
+
5 DEPART 27-27 27 0.305 medium
|
| 31 |
+
6 PARTY 27-27 27 0.296 medium
|
| 32 |
+
7 IX-1p 27-27 27 0.326 medium
|
| 33 |
+
8 FINISH 11-12 12 0.467 medium
|
| 34 |
+
9 JOIN 13-14 14 0.317 medium
|
| 35 |
+
10 IX-1p 17-17 17 0.358 medium
|
| 36 |
|
| 37 |
================================================================================
|
| 38 |
|
| 39 |
统计摘要:
|
| 40 |
--------------------------------------------------------------------------------
|
| 41 |
+
平均attention权重: 0.338
|
| 42 |
+
高置信度词: 0 (0.0%)
|
| 43 |
+
中置信度词: 9 (90.0%)
|
| 44 |
+
低置信度词: 1 (10.0%)
|
| 45 |
|
| 46 |
================================================================================
|
SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_161814}/sample_000/attention_heatmap.png
RENAMED
|
File without changes
|
SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_161814}/sample_000/attention_weights.npy
RENAMED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7414e5ab870540255a4bc963aa612d837eca27b95da7b4603c4c8e39f82b8c01
|
| 3 |
+
size 25216
|
SignX/{detailed_prediction_20251225_192957 → detailed_prediction_20251226_161814}/sample_000/debug_video_path.txt
RENAMED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
video_path = '/common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/videos/
|
| 2 |
video_path type = <class 'str'>
|
| 3 |
video_path is None: False
|
| 4 |
bool(video_path): True
|
|
|
|
| 1 |
+
video_path = '/common/users/sf895/output/huggingface_asllrp_repo/SignX/eval/tiny_test_data/videos/632051.mp4'
|
| 2 |
video_path type = <class 'str'>
|
| 3 |
video_path is None: False
|
| 4 |
bool(video_path): True
|
SignX/detailed_prediction_20251226_161814/sample_000/feature_frame_mapping.json
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"original_frame_count": 106,
|
| 3 |
+
"feature_count": 28,
|
| 4 |
+
"downsampling_ratio": 3.7857142857142856,
|
| 5 |
+
"fps": 24.0,
|
| 6 |
+
"mapping": [
|
| 7 |
+
{
|
| 8 |
+
"feature_index": 0,
|
| 9 |
+
"frame_start": 0,
|
| 10 |
+
"frame_end": 3,
|
| 11 |
+
"frame_count": 3
|
| 12 |
+
},
|
| 13 |
+
{
|
| 14 |
+
"feature_index": 1,
|
| 15 |
+
"frame_start": 3,
|
| 16 |
+
"frame_end": 7,
|
| 17 |
+
"frame_count": 4
|
| 18 |
+
},
|
| 19 |
+
{
|
| 20 |
+
"feature_index": 2,
|
| 21 |
+
"frame_start": 7,
|
| 22 |
+
"frame_end": 11,
|
| 23 |
+
"frame_count": 4
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"feature_index": 3,
|
| 27 |
+
"frame_start": 11,
|
| 28 |
+
"frame_end": 15,
|
| 29 |
+
"frame_count": 4
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"feature_index": 4,
|
| 33 |
+
"frame_start": 15,
|
| 34 |
+
"frame_end": 18,
|
| 35 |
+
"frame_count": 3
|
| 36 |
+
},
|
| 37 |
+
{
|
| 38 |
+
"feature_index": 5,
|
| 39 |
+
"frame_start": 18,
|
| 40 |
+
"frame_end": 22,
|
| 41 |
+
"frame_count": 4
|
| 42 |
+
},
|
| 43 |
+
{
|
| 44 |
+
"feature_index": 6,
|
| 45 |
+
"frame_start": 22,
|
| 46 |
+
"frame_end": 26,
|
| 47 |
+
"frame_count": 4
|
| 48 |
+
},
|
| 49 |
+
{
|
| 50 |
+
"feature_index": 7,
|
| 51 |
+
"frame_start": 26,
|
| 52 |
+
"frame_end": 30,
|
| 53 |
+
"frame_count": 4
|
| 54 |
+
},
|
| 55 |
+
{
|
| 56 |
+
"feature_index": 8,
|
| 57 |
+
"frame_start": 30,
|
| 58 |
+
"frame_end": 34,
|
| 59 |
+
"frame_count": 4
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"feature_index": 9,
|
| 63 |
+
"frame_start": 34,
|
| 64 |
+
"frame_end": 37,
|
| 65 |
+
"frame_count": 3
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"feature_index": 10,
|
| 69 |
+
"frame_start": 37,
|
| 70 |
+
"frame_end": 41,
|
| 71 |
+
"frame_count": 4
|
| 72 |
+
},
|
| 73 |
+
{
|
| 74 |
+
"feature_index": 11,
|
| 75 |
+
"frame_start": 41,
|
| 76 |
+
"frame_end": 45,
|
| 77 |
+
"frame_count": 4
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
"feature_index": 12,
|
| 81 |
+
"frame_start": 45,
|
| 82 |
+
"frame_end": 49,
|
| 83 |
+
"frame_count": 4
|
| 84 |
+
},
|
| 85 |
+
{
|
| 86 |
+
"feature_index": 13,
|
| 87 |
+
"frame_start": 49,
|
| 88 |
+
"frame_end": 53,
|
| 89 |
+
"frame_count": 4
|
| 90 |
+
},
|
| 91 |
+
{
|
| 92 |
+
"feature_index": 14,
|
| 93 |
+
"frame_start": 53,
|
| 94 |
+
"frame_end": 56,
|
| 95 |
+
"frame_count": 3
|
| 96 |
+
},
|
| 97 |
+
{
|
| 98 |
+
"feature_index": 15,
|
| 99 |
+
"frame_start": 56,
|
| 100 |
+
"frame_end": 60,
|
| 101 |
+
"frame_count": 4
|
| 102 |
+
},
|
| 103 |
+
{
|
| 104 |
+
"feature_index": 16,
|
| 105 |
+
"frame_start": 60,
|
| 106 |
+
"frame_end": 64,
|
| 107 |
+
"frame_count": 4
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"feature_index": 17,
|
| 111 |
+
"frame_start": 64,
|
| 112 |
+
"frame_end": 68,
|
| 113 |
+
"frame_count": 4
|
| 114 |
+
},
|
| 115 |
+
{
|
| 116 |
+
"feature_index": 18,
|
| 117 |
+
"frame_start": 68,
|
| 118 |
+
"frame_end": 71,
|
| 119 |
+
"frame_count": 3
|
| 120 |
+
},
|
| 121 |
+
{
|
| 122 |
+
"feature_index": 19,
|
| 123 |
+
"frame_start": 71,
|
| 124 |
+
"frame_end": 75,
|
| 125 |
+
"frame_count": 4
|
| 126 |
+
},
|
| 127 |
+
{
|
| 128 |
+
"feature_index": 20,
|
| 129 |
+
"frame_start": 75,
|
| 130 |
+
"frame_end": 79,
|
| 131 |
+
"frame_count": 4
|
| 132 |
+
},
|
| 133 |
+
{
|
| 134 |
+
"feature_index": 21,
|
| 135 |
+
"frame_start": 79,
|
| 136 |
+
"frame_end": 83,
|
| 137 |
+
"frame_count": 4
|
| 138 |
+
},
|
| 139 |
+
{
|
| 140 |
+
"feature_index": 22,
|
| 141 |
+
"frame_start": 83,
|
| 142 |
+
"frame_end": 87,
|
| 143 |
+
"frame_count": 4
|
| 144 |
+
},
|
| 145 |
+
{
|
| 146 |
+
"feature_index": 23,
|
| 147 |
+
"frame_start": 87,
|
| 148 |
+
"frame_end": 90,
|
| 149 |
+
"frame_count": 3
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"feature_index": 24,
|
| 153 |
+
"frame_start": 90,
|
| 154 |
+
"frame_end": 94,
|
| 155 |
+
"frame_count": 4
|
| 156 |
+
},
|
| 157 |
+
{
|
| 158 |
+
"feature_index": 25,
|
| 159 |
+
"frame_start": 94,
|
| 160 |
+
"frame_end": 98,
|
| 161 |
+
"frame_count": 4
|
| 162 |
+
},
|
| 163 |
+
{
|
| 164 |
+
"feature_index": 26,
|
| 165 |
+
"frame_start": 98,
|
| 166 |
+
"frame_end": 102,
|
| 167 |
+
"frame_count": 4
|
| 168 |
+
},
|
| 169 |
+
{
|
| 170 |
+
"feature_index": 27,
|
| 171 |
+
"frame_start": 102,
|
| 172 |
+
"frame_end": 106,
|
| 173 |
+
"frame_count": 4
|
| 174 |
+
}
|
| 175 |
+
]
|
| 176 |
+
}
|
SignX/detailed_prediction_20251226_161814/sample_000/frame_alignment.json
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"translation": "<unk> #IF FRIEND GROUP/TOGETHER DEPART PARTY IX-1p FINISH JOIN IX-1p",
|
| 3 |
+
"words": [
|
| 4 |
+
"<unk>",
|
| 5 |
+
"#IF",
|
| 6 |
+
"FRIEND",
|
| 7 |
+
"GROUP/TOGETHER",
|
| 8 |
+
"DEPART",
|
| 9 |
+
"PARTY",
|
| 10 |
+
"IX-1p",
|
| 11 |
+
"FINISH",
|
| 12 |
+
"JOIN",
|
| 13 |
+
"IX-1p"
|
| 14 |
+
],
|
| 15 |
+
"total_video_frames": 28,
|
| 16 |
+
"frame_ranges": [
|
| 17 |
+
{
|
| 18 |
+
"word": "<unk>",
|
| 19 |
+
"start_frame": 0,
|
| 20 |
+
"end_frame": 0,
|
| 21 |
+
"peak_frame": 0,
|
| 22 |
+
"avg_attention": 0.13272422552108765,
|
| 23 |
+
"confidence": "low"
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"word": "#IF",
|
| 27 |
+
"start_frame": 2,
|
| 28 |
+
"end_frame": 3,
|
| 29 |
+
"peak_frame": 2,
|
| 30 |
+
"avg_attention": 0.35901427268981934,
|
| 31 |
+
"confidence": "medium"
|
| 32 |
+
},
|
| 33 |
+
{
|
| 34 |
+
"word": "FRIEND",
|
| 35 |
+
"start_frame": 5,
|
| 36 |
+
"end_frame": 5,
|
| 37 |
+
"peak_frame": 5,
|
| 38 |
+
"avg_attention": 0.4494199752807617,
|
| 39 |
+
"confidence": "medium"
|
| 40 |
+
},
|
| 41 |
+
{
|
| 42 |
+
"word": "GROUP/TOGETHER",
|
| 43 |
+
"start_frame": 8,
|
| 44 |
+
"end_frame": 8,
|
| 45 |
+
"peak_frame": 8,
|
| 46 |
+
"avg_attention": 0.3710141181945801,
|
| 47 |
+
"confidence": "medium"
|
| 48 |
+
},
|
| 49 |
+
{
|
| 50 |
+
"word": "DEPART",
|
| 51 |
+
"start_frame": 27,
|
| 52 |
+
"end_frame": 27,
|
| 53 |
+
"peak_frame": 27,
|
| 54 |
+
"avg_attention": 0.30533191561698914,
|
| 55 |
+
"confidence": "medium"
|
| 56 |
+
},
|
| 57 |
+
{
|
| 58 |
+
"word": "PARTY",
|
| 59 |
+
"start_frame": 27,
|
| 60 |
+
"end_frame": 27,
|
| 61 |
+
"peak_frame": 27,
|
| 62 |
+
"avg_attention": 0.2963099479675293,
|
| 63 |
+
"confidence": "medium"
|
| 64 |
+
},
|
| 65 |
+
{
|
| 66 |
+
"word": "IX-1p",
|
| 67 |
+
"start_frame": 27,
|
| 68 |
+
"end_frame": 27,
|
| 69 |
+
"peak_frame": 27,
|
| 70 |
+
"avg_attention": 0.3264133930206299,
|
| 71 |
+
"confidence": "medium"
|
| 72 |
+
},
|
| 73 |
+
{
|
| 74 |
+
"word": "FINISH",
|
| 75 |
+
"start_frame": 11,
|
| 76 |
+
"end_frame": 12,
|
| 77 |
+
"peak_frame": 12,
|
| 78 |
+
"avg_attention": 0.46679070591926575,
|
| 79 |
+
"confidence": "medium"
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"word": "JOIN",
|
| 83 |
+
"start_frame": 13,
|
| 84 |
+
"end_frame": 14,
|
| 85 |
+
"peak_frame": 14,
|
| 86 |
+
"avg_attention": 0.3172740340232849,
|
| 87 |
+
"confidence": "medium"
|
| 88 |
+
},
|
| 89 |
+
{
|
| 90 |
+
"word": "IX-1p",
|
| 91 |
+
"start_frame": 17,
|
| 92 |
+
"end_frame": 17,
|
| 93 |
+
"peak_frame": 17,
|
| 94 |
+
"avg_attention": 0.3579559326171875,
|
| 95 |
+
"confidence": "medium"
|
| 96 |
+
}
|
| 97 |
+
],
|
| 98 |
+
"statistics": {
|
| 99 |
+
"avg_confidence": 0.33822485208511355,
|
| 100 |
+
"high_confidence_words": 0,
|
| 101 |
+
"medium_confidence_words": 9,
|
| 102 |
+
"low_confidence_words": 1
|
| 103 |
+
}
|
| 104 |
+
}
|
SignX/{detailed_prediction_20251225_193758 → detailed_prediction_20251226_161814}/sample_000/frame_alignment.png
RENAMED
|
File without changes
|
SignX/detailed_prediction_20251226_161814/sample_000/gloss_to_frames.png
ADDED
|
Git LFS Details
|
SignX/detailed_prediction_20251226_161814/sample_000/interactive_alignment.html
ADDED
|
@@ -0,0 +1,579 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<!DOCTYPE html>
|
| 2 |
+
<html lang="zh-CN">
|
| 3 |
+
<head>
|
| 4 |
+
<meta charset="UTF-8">
|
| 5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
| 6 |
+
<title>Interactive Word-Frame Alignment</title>
|
| 7 |
+
<style>
|
| 8 |
+
body {
|
| 9 |
+
font-family: 'Arial', sans-serif;
|
| 10 |
+
margin: 20px;
|
| 11 |
+
background-color: #f5f5f5;
|
| 12 |
+
}
|
| 13 |
+
.container {
|
| 14 |
+
max-width: 1800px;
|
| 15 |
+
margin: 0 auto;
|
| 16 |
+
background-color: white;
|
| 17 |
+
padding: 30px;
|
| 18 |
+
border-radius: 8px;
|
| 19 |
+
box-shadow: 0 2px 10px rgba(0,0,0,0.1);
|
| 20 |
+
}
|
| 21 |
+
h1 {
|
| 22 |
+
color: #333;
|
| 23 |
+
border-bottom: 3px solid #4CAF50;
|
| 24 |
+
padding-bottom: 10px;
|
| 25 |
+
margin-bottom: 20px;
|
| 26 |
+
}
|
| 27 |
+
.stats {
|
| 28 |
+
background-color: #E3F2FD;
|
| 29 |
+
padding: 15px;
|
| 30 |
+
border-radius: 5px;
|
| 31 |
+
margin-bottom: 20px;
|
| 32 |
+
border-left: 4px solid #2196F3;
|
| 33 |
+
font-size: 14px;
|
| 34 |
+
}
|
| 35 |
+
.controls {
|
| 36 |
+
background-color: #f9f9f9;
|
| 37 |
+
padding: 20px;
|
| 38 |
+
border-radius: 5px;
|
| 39 |
+
margin-bottom: 30px;
|
| 40 |
+
border: 1px solid #ddd;
|
| 41 |
+
}
|
| 42 |
+
.control-group {
|
| 43 |
+
margin-bottom: 15px;
|
| 44 |
+
}
|
| 45 |
+
label {
|
| 46 |
+
font-weight: bold;
|
| 47 |
+
display: inline-block;
|
| 48 |
+
width: 250px;
|
| 49 |
+
color: #555;
|
| 50 |
+
}
|
| 51 |
+
input[type="range"] {
|
| 52 |
+
width: 400px;
|
| 53 |
+
vertical-align: middle;
|
| 54 |
+
}
|
| 55 |
+
.value-display {
|
| 56 |
+
display: inline-block;
|
| 57 |
+
width: 80px;
|
| 58 |
+
font-family: monospace;
|
| 59 |
+
font-size: 14px;
|
| 60 |
+
color: #2196F3;
|
| 61 |
+
font-weight: bold;
|
| 62 |
+
}
|
| 63 |
+
.reset-btn {
|
| 64 |
+
margin-top: 15px;
|
| 65 |
+
padding: 10px 25px;
|
| 66 |
+
background-color: #2196F3;
|
| 67 |
+
color: white;
|
| 68 |
+
border: none;
|
| 69 |
+
border-radius: 5px;
|
| 70 |
+
cursor: pointer;
|
| 71 |
+
font-size: 14px;
|
| 72 |
+
font-weight: bold;
|
| 73 |
+
}
|
| 74 |
+
.reset-btn:hover {
|
| 75 |
+
background-color: #1976D2;
|
| 76 |
+
}
|
| 77 |
+
canvas {
|
| 78 |
+
border: 1px solid #999;
|
| 79 |
+
display: block;
|
| 80 |
+
margin: 20px auto;
|
| 81 |
+
background: white;
|
| 82 |
+
}
|
| 83 |
+
.legend {
|
| 84 |
+
margin-top: 20px;
|
| 85 |
+
padding: 15px;
|
| 86 |
+
background-color: #fff;
|
| 87 |
+
border: 1px solid #ddd;
|
| 88 |
+
border-radius: 5px;
|
| 89 |
+
}
|
| 90 |
+
.legend-item {
|
| 91 |
+
display: inline-block;
|
| 92 |
+
margin-right: 25px;
|
| 93 |
+
font-size: 13px;
|
| 94 |
+
margin-bottom: 10px;
|
| 95 |
+
}
|
| 96 |
+
.color-box {
|
| 97 |
+
display: inline-block;
|
| 98 |
+
width: 30px;
|
| 99 |
+
height: 15px;
|
| 100 |
+
margin-right: 8px;
|
| 101 |
+
vertical-align: middle;
|
| 102 |
+
border: 1px solid #666;
|
| 103 |
+
}
|
| 104 |
+
.info-panel {
|
| 105 |
+
margin-top: 20px;
|
| 106 |
+
padding: 15px;
|
| 107 |
+
background-color: #f9f9f9;
|
| 108 |
+
border-radius: 5px;
|
| 109 |
+
border: 1px solid #ddd;
|
| 110 |
+
}
|
| 111 |
+
.confidence {
|
| 112 |
+
display: inline-block;
|
| 113 |
+
padding: 3px 10px;
|
| 114 |
+
border-radius: 10px;
|
| 115 |
+
font-weight: bold;
|
| 116 |
+
font-size: 11px;
|
| 117 |
+
text-transform: uppercase;
|
| 118 |
+
}
|
| 119 |
+
.confidence.high {
|
| 120 |
+
background-color: #4CAF50;
|
| 121 |
+
color: white;
|
| 122 |
+
}
|
| 123 |
+
.confidence.medium {
|
| 124 |
+
background-color: #FF9800;
|
| 125 |
+
color: white;
|
| 126 |
+
}
|
| 127 |
+
.confidence.low {
|
| 128 |
+
background-color: #f44336;
|
| 129 |
+
color: white;
|
| 130 |
+
}
|
| 131 |
+
</style>
|
| 132 |
+
</head>
|
| 133 |
+
<body>
|
| 134 |
+
<div class="container">
|
| 135 |
+
<h1>🎯 Interactive Word-to-Frame Alignment Visualizer</h1>
|
| 136 |
+
|
| 137 |
+
<div class="stats">
|
| 138 |
+
<strong>Translation:</strong> <unk> #IF FRIEND GROUP/TOGETHER DEPART PARTY IX-1p FINISH JOIN IX-1p<br>
|
| 139 |
+
<strong>Total Words:</strong> 10 |
|
| 140 |
+
<strong>Total Features:</strong> 8
|
| 141 |
+
</div>
|
| 142 |
+
|
| 143 |
+
<div class="controls">
|
| 144 |
+
<h3>⚙️ Threshold Controls</h3>
|
| 145 |
+
|
| 146 |
+
<div class="control-group">
|
| 147 |
+
<label for="peak-threshold">Peak Threshold (% of max):</label>
|
| 148 |
+
<input type="range" id="peak-threshold" min="1" max="100" value="90" step="1">
|
| 149 |
+
<span class="value-display" id="peak-threshold-value">90%</span>
|
| 150 |
+
<br>
|
| 151 |
+
<small style="margin-left: 255px; color: #666;">
|
| 152 |
+
帧的注意力权重 ≥ (峰值权重 × 阈值%) 时被认为是"显著帧"
|
| 153 |
+
</small>
|
| 154 |
+
</div>
|
| 155 |
+
|
| 156 |
+
<div class="control-group">
|
| 157 |
+
<label for="confidence-high">High Confidence (avg attn >):</label>
|
| 158 |
+
<input type="range" id="confidence-high" min="0" max="100" value="50" step="1">
|
| 159 |
+
<span class="value-display" id="confidence-high-value">0.50</span>
|
| 160 |
+
</div>
|
| 161 |
+
|
| 162 |
+
<div class="control-group">
|
| 163 |
+
<label for="confidence-medium">Medium Confidence (avg attn >):</label>
|
| 164 |
+
<input type="range" id="confidence-medium" min="0" max="100" value="20" step="1">
|
| 165 |
+
<span class="value-display" id="confidence-medium-value">0.20</span>
|
| 166 |
+
</div>
|
| 167 |
+
|
| 168 |
+
<button class="reset-btn" onclick="resetDefaults()">
|
| 169 |
+
Reset to Defaults
|
| 170 |
+
</button>
|
| 171 |
+
</div>
|
| 172 |
+
|
| 173 |
+
<div>
|
| 174 |
+
<h3>Word-to-Frame Alignment</h3>
|
| 175 |
+
<p style="color: #666; font-size: 13px;">
|
| 176 |
+
每个词显示为彩色矩形,宽度表示该词对应的特征帧范围。★ = 峰值帧。矩形内部显示注意力权重波形。
|
| 177 |
+
</p>
|
| 178 |
+
<canvas id="alignment-canvas" width="1600" height="600"></canvas>
|
| 179 |
+
|
| 180 |
+
<h3 style="margin-top: 30px;">Timeline Progress Bar</h3>
|
| 181 |
+
<canvas id="timeline-canvas" width="1600" height="100"></canvas>
|
| 182 |
+
|
| 183 |
+
<div class="legend">
|
| 184 |
+
<strong>Legend:</strong><br><br>
|
| 185 |
+
<div class="legend-item">
|
| 186 |
+
<span class="confidence high">High</span>
|
| 187 |
+
<span class="confidence medium">Medium</span>
|
| 188 |
+
<span class="confidence low">Low</span>
|
| 189 |
+
Confidence Levels (opacity reflects confidence)
|
| 190 |
+
</div>
|
| 191 |
+
<div class="legend-item">
|
| 192 |
+
<span style="color: red; font-size: 20px;">★</span>
|
| 193 |
+
Peak Frame (highest attention)
|
| 194 |
+
</div>
|
| 195 |
+
<div class="legend-item">
|
| 196 |
+
<span style="color: blue;">━</span>
|
| 197 |
+
Attention Waveform (within word region)
|
| 198 |
+
</div>
|
| 199 |
+
</div>
|
| 200 |
+
</div>
|
| 201 |
+
|
| 202 |
+
<div class="info-panel">
|
| 203 |
+
<h3>Alignment Details</h3>
|
| 204 |
+
<div id="alignment-details"></div>
|
| 205 |
+
</div>
|
| 206 |
+
</div>
|
| 207 |
+
|
| 208 |
+
<script>
|
| 209 |
+
// Attention data from Python
|
| 210 |
+
const attentionData = [{"word": "<unk>", "word_idx": 0, "weights": [0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765, 0.13272422552108765]}, {"word": "#IF", "word_idx": 1, "weights": [0.035573869943618774, 0.03952416777610779, 0.03952416777610779, 0.03952416777610779, 0.03952416777610779, 0.03952416777610779, 0.03952416777610779, 0.04215671867132187]}, {"word": "FRIEND", "word_idx": 2, "weights": [0.0035305300261825323, 0.06121520698070526, 0.038338951766490936, 0.03721731901168823, 0.0396437793970108, 0.06121520698070526, 0.03798501193523407, 0.06121520698070526]}, {"word": "GROUP/TOGETHER", "word_idx": 3, "weights": [0.0032519344240427017, 0.06749927252531052, 0.06769093871116638, 0.06797394156455994, 0.05660902336239815, 0.0032519344240427017, 0.06791087239980698, 0.06749927252531052]}, {"word": "DEPART", "word_idx": 4, "weights": [0.1269645392894745, 0.011337662115693092, 0.011169591918587685, 0.011129447259008884, 0.011337662115693092, 0.14123021066188812, 0.007393963634967804, 0.011116056703031063]}, {"word": "PARTY", "word_idx": 5, "weights": [0.08145920932292938, 0.003315121866762638, 0.24030542373657227, 0.0034980459604412317, 0.0034834302496165037, 0.16299134492874146, 0.003404060145840049, 0.0034867869690060616]}, {"word": "IX-1p", "word_idx": 6, "weights": [0.08820953965187073, 0.14399001002311707, 0.00422912510111928, 0.09592650085687637, 0.004478602670133114, 0.004475735127925873, 0.00422912510111928, 0.00439990172162652]}, {"word": "FINISH", "word_idx": 7, "weights": [0.0013184626586735249, 0.0013184626586735249, 0.0013184626586735249, 0.1313352882862091, 0.0013184626586735249, 0.14886727929115295, 0.14913317561149597, 0.004780622664839029]}, {"word": "JOIN", "word_idx": 8, "weights": [0.00555413169786334, 0.006608393043279648, 0.00555413169786334, 0.1470479518175125, 0.13233929872512817, 0.09560006111860275, 0.00555413169786334, 0.08190114796161652]}, {"word": "IX-1p", "word_idx": 9, "weights": [0.03655996546149254, 0.03403225541114807, 0.27225977182388306, 0.35054582357406616, 0.03655996546149254, 0.27422454953193665, 0.03655996546149254, 0.09239426255226135]}];
|
| 211 |
+
const numGlosses = 10;
|
| 212 |
+
const numFeatures = 8;
|
| 213 |
+
|
| 214 |
+
// Colors for different words (matching matplotlib tab20)
|
| 215 |
+
const colors = [
|
| 216 |
+
'#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd',
|
| 217 |
+
'#8c564b', '#e377c2', '#7f7f7f', '#bcbd22', '#17becf',
|
| 218 |
+
'#aec7e8', '#ffbb78', '#98df8a', '#ff9896', '#c5b0d5',
|
| 219 |
+
'#c49c94', '#f7b6d2', '#c7c7c7', '#dbdb8d', '#9edae5'
|
| 220 |
+
];
|
| 221 |
+
|
| 222 |
+
// Get controls
|
| 223 |
+
const peakThresholdSlider = document.getElementById('peak-threshold');
|
| 224 |
+
const peakThresholdValue = document.getElementById('peak-threshold-value');
|
| 225 |
+
const confidenceHighSlider = document.getElementById('confidence-high');
|
| 226 |
+
const confidenceHighValue = document.getElementById('confidence-high-value');
|
| 227 |
+
const confidenceMediumSlider = document.getElementById('confidence-medium');
|
| 228 |
+
const confidenceMediumValue = document.getElementById('confidence-medium-value');
|
| 229 |
+
const alignmentCanvas = document.getElementById('alignment-canvas');
|
| 230 |
+
const timelineCanvas = document.getElementById('timeline-canvas');
|
| 231 |
+
const alignmentCtx = alignmentCanvas.getContext('2d');
|
| 232 |
+
const timelineCtx = timelineCanvas.getContext('2d');
|
| 233 |
+
|
| 234 |
+
// Update displays when sliders change
|
| 235 |
+
peakThresholdSlider.oninput = function() {
|
| 236 |
+
peakThresholdValue.textContent = this.value + '%';
|
| 237 |
+
updateVisualization();
|
| 238 |
+
};
|
| 239 |
+
|
| 240 |
+
confidenceHighSlider.oninput = function() {
|
| 241 |
+
confidenceHighValue.textContent = (this.value / 100).toFixed(2);
|
| 242 |
+
updateVisualization();
|
| 243 |
+
};
|
| 244 |
+
|
| 245 |
+
confidenceMediumSlider.oninput = function() {
|
| 246 |
+
confidenceMediumValue.textContent = (this.value / 100).toFixed(2);
|
| 247 |
+
updateVisualization();
|
| 248 |
+
};
|
| 249 |
+
|
| 250 |
+
function resetDefaults() {
|
| 251 |
+
peakThresholdSlider.value = 90;
|
| 252 |
+
confidenceHighSlider.value = 50;
|
| 253 |
+
confidenceMediumSlider.value = 20;
|
| 254 |
+
peakThresholdValue.textContent = '90%';
|
| 255 |
+
confidenceHighValue.textContent = '0.50';
|
| 256 |
+
confidenceMediumValue.textContent = '0.20';
|
| 257 |
+
updateVisualization();
|
| 258 |
+
}
|
| 259 |
+
|
| 260 |
+
function calculateAlignment(weights, peakThreshold) {
|
| 261 |
+
// Find peak
|
| 262 |
+
let peakIdx = 0;
|
| 263 |
+
let peakWeight = weights[0];
|
| 264 |
+
for (let i = 1; i < weights.length; i++) {
|
| 265 |
+
if (weights[i] > peakWeight) {
|
| 266 |
+
peakWeight = weights[i];
|
| 267 |
+
peakIdx = i;
|
| 268 |
+
}
|
| 269 |
+
}
|
| 270 |
+
|
| 271 |
+
// Find significant frames
|
| 272 |
+
const threshold = peakWeight * (peakThreshold / 100);
|
| 273 |
+
let startIdx = peakIdx;
|
| 274 |
+
let endIdx = peakIdx;
|
| 275 |
+
let sumWeight = 0;
|
| 276 |
+
let count = 0;
|
| 277 |
+
|
| 278 |
+
for (let i = 0; i < weights.length; i++) {
|
| 279 |
+
if (weights[i] >= threshold) {
|
| 280 |
+
if (i < startIdx) startIdx = i;
|
| 281 |
+
if (i > endIdx) endIdx = i;
|
| 282 |
+
sumWeight += weights[i];
|
| 283 |
+
count++;
|
| 284 |
+
}
|
| 285 |
+
}
|
| 286 |
+
|
| 287 |
+
const avgWeight = count > 0 ? sumWeight / count : peakWeight;
|
| 288 |
+
|
| 289 |
+
return {
|
| 290 |
+
startIdx: startIdx,
|
| 291 |
+
endIdx: endIdx,
|
| 292 |
+
peakIdx: peakIdx,
|
| 293 |
+
peakWeight: peakWeight,
|
| 294 |
+
avgWeight: avgWeight,
|
| 295 |
+
threshold: threshold
|
| 296 |
+
};
|
| 297 |
+
}
|
| 298 |
+
|
| 299 |
+
function getConfidenceLevel(avgWeight, highThreshold, mediumThreshold) {
|
| 300 |
+
if (avgWeight > highThreshold) return 'high';
|
| 301 |
+
if (avgWeight > mediumThreshold) return 'medium';
|
| 302 |
+
return 'low';
|
| 303 |
+
}
|
| 304 |
+
|
| 305 |
+
function drawAlignmentChart() {
|
| 306 |
+
const peakThreshold = parseInt(peakThresholdSlider.value);
|
| 307 |
+
const highThreshold = parseInt(confidenceHighSlider.value) / 100;
|
| 308 |
+
const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
|
| 309 |
+
|
| 310 |
+
// Canvas dimensions
|
| 311 |
+
const width = alignmentCanvas.width;
|
| 312 |
+
const height = alignmentCanvas.height;
|
| 313 |
+
const leftMargin = 180;
|
| 314 |
+
const rightMargin = 50;
|
| 315 |
+
const topMargin = 60;
|
| 316 |
+
const bottomMargin = 80;
|
| 317 |
+
|
| 318 |
+
const plotWidth = width - leftMargin - rightMargin;
|
| 319 |
+
const plotHeight = height - topMargin - bottomMargin;
|
| 320 |
+
|
| 321 |
+
const rowHeight = plotHeight / numGlosses;
|
| 322 |
+
const featureWidth = plotWidth / numFeatures;
|
| 323 |
+
|
| 324 |
+
// Clear canvas
|
| 325 |
+
alignmentCtx.clearRect(0, 0, width, height);
|
| 326 |
+
|
| 327 |
+
// Draw title
|
| 328 |
+
alignmentCtx.fillStyle = '#333';
|
| 329 |
+
alignmentCtx.font = 'bold 18px Arial';
|
| 330 |
+
alignmentCtx.textAlign = 'center';
|
| 331 |
+
alignmentCtx.fillText('Word-to-Frame Alignment', width / 2, 30);
|
| 332 |
+
alignmentCtx.font = '13px Arial';
|
| 333 |
+
alignmentCtx.fillText('(based on attention peaks, ★ = peak frame)', width / 2, 48);
|
| 334 |
+
|
| 335 |
+
// Calculate alignments
|
| 336 |
+
const alignments = [];
|
| 337 |
+
for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
|
| 338 |
+
const data = attentionData[wordIdx];
|
| 339 |
+
const alignment = calculateAlignment(data.weights, peakThreshold);
|
| 340 |
+
alignment.word = data.word;
|
| 341 |
+
alignment.wordIdx = wordIdx;
|
| 342 |
+
alignment.weights = data.weights;
|
| 343 |
+
alignments.push(alignment);
|
| 344 |
+
}
|
| 345 |
+
|
| 346 |
+
// Draw grid
|
| 347 |
+
alignmentCtx.strokeStyle = '#e0e0e0';
|
| 348 |
+
alignmentCtx.lineWidth = 0.5;
|
| 349 |
+
for (let i = 0; i <= numFeatures; i++) {
|
| 350 |
+
const x = leftMargin + i * featureWidth;
|
| 351 |
+
alignmentCtx.beginPath();
|
| 352 |
+
alignmentCtx.moveTo(x, topMargin);
|
| 353 |
+
alignmentCtx.lineTo(x, topMargin + plotHeight);
|
| 354 |
+
alignmentCtx.stroke();
|
| 355 |
+
}
|
| 356 |
+
|
| 357 |
+
// Draw word regions
|
| 358 |
+
for (let wordIdx = 0; wordIdx < numGlosses; wordIdx++) {
|
| 359 |
+
const alignment = alignments[wordIdx];
|
| 360 |
+
const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
|
| 361 |
+
const y = topMargin + wordIdx * rowHeight;
|
| 362 |
+
|
| 363 |
+
// Alpha based on confidence
|
| 364 |
+
const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
|
| 365 |
+
|
| 366 |
+
// Draw rectangle for word region
|
| 367 |
+
const startX = leftMargin + alignment.startIdx * featureWidth;
|
| 368 |
+
const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
|
| 369 |
+
|
| 370 |
+
alignmentCtx.fillStyle = colors[wordIdx % 20];
|
| 371 |
+
alignmentCtx.globalAlpha = alpha;
|
| 372 |
+
alignmentCtx.fillRect(startX, y, rectWidth, rowHeight * 0.8);
|
| 373 |
+
alignmentCtx.globalAlpha = 1.0;
|
| 374 |
+
|
| 375 |
+
// Draw border
|
| 376 |
+
alignmentCtx.strokeStyle = '#000';
|
| 377 |
+
alignmentCtx.lineWidth = 2;
|
| 378 |
+
alignmentCtx.strokeRect(startX, y, rectWidth, rowHeight * 0.8);
|
| 379 |
+
|
| 380 |
+
// Draw attention waveform inside rectangle
|
| 381 |
+
alignmentCtx.strokeStyle = 'rgba(0, 0, 255, 0.8)';
|
| 382 |
+
alignmentCtx.lineWidth = 1.5;
|
| 383 |
+
alignmentCtx.beginPath();
|
| 384 |
+
for (let i = alignment.startIdx; i <= alignment.endIdx; i++) {
|
| 385 |
+
const x = leftMargin + i * featureWidth + featureWidth / 2;
|
| 386 |
+
const weight = alignment.weights[i];
|
| 387 |
+
const maxWeight = alignment.peakWeight;
|
| 388 |
+
const normalizedWeight = weight / (maxWeight * 1.2); // Scale for visibility
|
| 389 |
+
const waveY = y + rowHeight * 0.8 - (normalizedWeight * rowHeight * 0.6);
|
| 390 |
+
|
| 391 |
+
if (i === alignment.startIdx) {
|
| 392 |
+
alignmentCtx.moveTo(x, waveY);
|
| 393 |
+
} else {
|
| 394 |
+
alignmentCtx.lineTo(x, waveY);
|
| 395 |
+
}
|
| 396 |
+
}
|
| 397 |
+
alignmentCtx.stroke();
|
| 398 |
+
|
| 399 |
+
// Draw word label
|
| 400 |
+
const labelX = startX + rectWidth / 2;
|
| 401 |
+
const labelY = y + rowHeight * 0.4;
|
| 402 |
+
|
| 403 |
+
alignmentCtx.fillStyle = 'rgba(0, 0, 0, 0.7)';
|
| 404 |
+
alignmentCtx.fillRect(labelX - 60, labelY - 12, 120, 24);
|
| 405 |
+
alignmentCtx.fillStyle = '#fff';
|
| 406 |
+
alignmentCtx.font = 'bold 13px Arial';
|
| 407 |
+
alignmentCtx.textAlign = 'center';
|
| 408 |
+
alignmentCtx.textBaseline = 'middle';
|
| 409 |
+
alignmentCtx.fillText(alignment.word, labelX, labelY);
|
| 410 |
+
|
| 411 |
+
// Mark peak frame with star
|
| 412 |
+
const peakX = leftMargin + alignment.peakIdx * featureWidth + featureWidth / 2;
|
| 413 |
+
const peakY = y + rowHeight * 0.4;
|
| 414 |
+
|
| 415 |
+
// Draw star
|
| 416 |
+
alignmentCtx.fillStyle = '#ff0000';
|
| 417 |
+
alignmentCtx.strokeStyle = '#ffff00';
|
| 418 |
+
alignmentCtx.lineWidth = 1.5;
|
| 419 |
+
alignmentCtx.font = '20px Arial';
|
| 420 |
+
alignmentCtx.textAlign = 'center';
|
| 421 |
+
alignmentCtx.strokeText('★', peakX, peakY);
|
| 422 |
+
alignmentCtx.fillText('★', peakX, peakY);
|
| 423 |
+
|
| 424 |
+
// Y-axis label (word names)
|
| 425 |
+
alignmentCtx.fillStyle = '#333';
|
| 426 |
+
alignmentCtx.font = '12px Arial';
|
| 427 |
+
alignmentCtx.textAlign = 'right';
|
| 428 |
+
alignmentCtx.textBaseline = 'middle';
|
| 429 |
+
alignmentCtx.fillText(alignment.word, leftMargin - 10, y + rowHeight * 0.4);
|
| 430 |
+
}
|
| 431 |
+
|
| 432 |
+
// Draw horizontal grid lines
|
| 433 |
+
alignmentCtx.strokeStyle = '#ccc';
|
| 434 |
+
alignmentCtx.lineWidth = 0.5;
|
| 435 |
+
for (let i = 0; i <= numGlosses; i++) {
|
| 436 |
+
const y = topMargin + i * rowHeight;
|
| 437 |
+
alignmentCtx.beginPath();
|
| 438 |
+
alignmentCtx.moveTo(leftMargin, y);
|
| 439 |
+
alignmentCtx.lineTo(leftMargin + plotWidth, y);
|
| 440 |
+
alignmentCtx.stroke();
|
| 441 |
+
}
|
| 442 |
+
|
| 443 |
+
// Draw axes
|
| 444 |
+
alignmentCtx.strokeStyle = '#000';
|
| 445 |
+
alignmentCtx.lineWidth = 2;
|
| 446 |
+
alignmentCtx.strokeRect(leftMargin, topMargin, plotWidth, plotHeight);
|
| 447 |
+
|
| 448 |
+
// X-axis labels (frame indices)
|
| 449 |
+
alignmentCtx.fillStyle = '#000';
|
| 450 |
+
alignmentCtx.font = '11px Arial';
|
| 451 |
+
alignmentCtx.textAlign = 'center';
|
| 452 |
+
alignmentCtx.textBaseline = 'top';
|
| 453 |
+
for (let i = 0; i < numFeatures; i++) {
|
| 454 |
+
const x = leftMargin + i * featureWidth + featureWidth / 2;
|
| 455 |
+
alignmentCtx.fillText(i.toString(), x, topMargin + plotHeight + 10);
|
| 456 |
+
}
|
| 457 |
+
|
| 458 |
+
// Axis titles
|
| 459 |
+
alignmentCtx.fillStyle = '#333';
|
| 460 |
+
alignmentCtx.font = 'bold 14px Arial';
|
| 461 |
+
alignmentCtx.textAlign = 'center';
|
| 462 |
+
alignmentCtx.fillText('Feature Frame Index', leftMargin + plotWidth / 2, height - 20);
|
| 463 |
+
|
| 464 |
+
alignmentCtx.save();
|
| 465 |
+
alignmentCtx.translate(30, topMargin + plotHeight / 2);
|
| 466 |
+
alignmentCtx.rotate(-Math.PI / 2);
|
| 467 |
+
alignmentCtx.fillText('Generated Word', 0, 0);
|
| 468 |
+
alignmentCtx.restore();
|
| 469 |
+
|
| 470 |
+
return alignments;
|
| 471 |
+
}
|
| 472 |
+
|
| 473 |
+
function drawTimeline(alignments) {
|
| 474 |
+
const highThreshold = parseInt(confidenceHighSlider.value) / 100;
|
| 475 |
+
const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
|
| 476 |
+
|
| 477 |
+
const width = timelineCanvas.width;
|
| 478 |
+
const height = timelineCanvas.height;
|
| 479 |
+
const leftMargin = 180;
|
| 480 |
+
const rightMargin = 50;
|
| 481 |
+
const plotWidth = width - leftMargin - rightMargin;
|
| 482 |
+
const featureWidth = plotWidth / numFeatures;
|
| 483 |
+
|
| 484 |
+
// Clear canvas
|
| 485 |
+
timelineCtx.clearRect(0, 0, width, height);
|
| 486 |
+
|
| 487 |
+
// Background bar
|
| 488 |
+
timelineCtx.fillStyle = '#ddd';
|
| 489 |
+
timelineCtx.fillRect(leftMargin, 30, plotWidth, 40);
|
| 490 |
+
timelineCtx.strokeStyle = '#000';
|
| 491 |
+
timelineCtx.lineWidth = 2;
|
| 492 |
+
timelineCtx.strokeRect(leftMargin, 30, plotWidth, 40);
|
| 493 |
+
|
| 494 |
+
// Draw word regions on timeline
|
| 495 |
+
for (let wordIdx = 0; wordIdx < alignments.length; wordIdx++) {
|
| 496 |
+
const alignment = alignments[wordIdx];
|
| 497 |
+
const confidence = getConfidenceLevel(alignment.avgWeight, highThreshold, mediumThreshold);
|
| 498 |
+
const alpha = confidence === 'high' ? 0.9 : confidence === 'medium' ? 0.7 : 0.5;
|
| 499 |
+
|
| 500 |
+
const startX = leftMargin + alignment.startIdx * featureWidth;
|
| 501 |
+
const rectWidth = (alignment.endIdx - alignment.startIdx + 1) * featureWidth;
|
| 502 |
+
|
| 503 |
+
timelineCtx.fillStyle = colors[wordIdx % 20];
|
| 504 |
+
timelineCtx.globalAlpha = alpha;
|
| 505 |
+
timelineCtx.fillRect(startX, 30, rectWidth, 40);
|
| 506 |
+
timelineCtx.globalAlpha = 1.0;
|
| 507 |
+
timelineCtx.strokeStyle = '#000';
|
| 508 |
+
timelineCtx.lineWidth = 0.5;
|
| 509 |
+
timelineCtx.strokeRect(startX, 30, rectWidth, 40);
|
| 510 |
+
}
|
| 511 |
+
|
| 512 |
+
// Title
|
| 513 |
+
timelineCtx.fillStyle = '#333';
|
| 514 |
+
timelineCtx.font = 'bold 13px Arial';
|
| 515 |
+
timelineCtx.textAlign = 'left';
|
| 516 |
+
timelineCtx.fillText('Timeline Progress Bar', leftMargin, 20);
|
| 517 |
+
}
|
| 518 |
+
|
| 519 |
+
function updateDetailsPanel(alignments, highThreshold, mediumThreshold) {
|
| 520 |
+
const panel = document.getElementById('alignment-details');
|
| 521 |
+
let html = '<table style="width: 100%; border-collapse: collapse;">';
|
| 522 |
+
html += '<tr style="background: #f0f0f0; font-weight: bold;">';
|
| 523 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Word</th>';
|
| 524 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Feature Range</th>';
|
| 525 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Peak</th>';
|
| 526 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Span</th>';
|
| 527 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Avg Attention</th>';
|
| 528 |
+
html += '<th style="padding: 8px; border: 1px solid #ddd;">Confidence</th>';
|
| 529 |
+
html += '</tr>';
|
| 530 |
+
|
| 531 |
+
for (const align of alignments) {
|
| 532 |
+
const confidence = getConfidenceLevel(align.avgWeight, highThreshold, mediumThreshold);
|
| 533 |
+
const span = align.endIdx - align.startIdx + 1;
|
| 534 |
+
|
| 535 |
+
html += '<tr>';
|
| 536 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;"><strong>${align.word}</strong></td>`;
|
| 537 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.startIdx} → ${align.endIdx}</td>`;
|
| 538 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.peakIdx}</td>`;
|
| 539 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;">${span}</td>`;
|
| 540 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;">${align.avgWeight.toFixed(4)}</td>`;
|
| 541 |
+
html += `<td style="padding: 8px; border: 1px solid #ddd;"><span class="confidence ${confidence}">${confidence}</span></td>`;
|
| 542 |
+
html += '</tr>';
|
| 543 |
+
}
|
| 544 |
+
|
| 545 |
+
html += '</table>';
|
| 546 |
+
panel.innerHTML = html;
|
| 547 |
+
}
|
| 548 |
+
|
| 549 |
+
function updateVisualization() {
|
| 550 |
+
const alignments = drawAlignmentChart();
|
| 551 |
+
drawTimeline(alignments);
|
| 552 |
+
const highThreshold = parseInt(confidenceHighSlider.value) / 100;
|
| 553 |
+
const mediumThreshold = parseInt(confidenceMediumSlider.value) / 100;
|
| 554 |
+
updateDetailsPanel(alignments, highThreshold, mediumThreshold);
|
| 555 |
+
}
|
| 556 |
+
|
| 557 |
+
// Event listeners for sliders
|
| 558 |
+
peakSlider.addEventListener('input', function() {
|
| 559 |
+
peakValue.textContent = peakSlider.value + '%';
|
| 560 |
+
updateVisualization();
|
| 561 |
+
});
|
| 562 |
+
|
| 563 |
+
confidenceHighSlider.addEventListener('input', function() {
|
| 564 |
+
const val = parseInt(confidenceHighSlider.value) / 100;
|
| 565 |
+
confidenceHighValue.textContent = val.toFixed(2);
|
| 566 |
+
updateVisualization();
|
| 567 |
+
});
|
| 568 |
+
|
| 569 |
+
confidenceMediumSlider.addEventListener('input', function() {
|
| 570 |
+
const val = parseInt(confidenceMediumSlider.value) / 100;
|
| 571 |
+
confidenceMediumValue.textContent = val.toFixed(2);
|
| 572 |
+
updateVisualization();
|
| 573 |
+
});
|
| 574 |
+
|
| 575 |
+
// Initial visualization
|
| 576 |
+
updateVisualization();
|
| 577 |
+
</script>
|
| 578 |
+
</body>
|
| 579 |
+
</html>
|
SignX/detailed_prediction_20251226_161814/sample_000/translation.txt
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
With BPE: <unk> #IF FRIEND GROUP/TOGE@@ TH@@ E@@ R DEPART PARTY IX-1p FINISH JO@@ I@@ N IX-1p
|
| 2 |
+
Clean: <unk> #IF FRIEND GROUP/TOGETHER DEPART PARTY IX-1p FINISH JOIN IX-1p
|
SignX/eval/attention_analysis.py
CHANGED
|
@@ -121,8 +121,8 @@ class AttentionAnalyzer:
|
|
| 121 |
peak_frame = int(np.argmax(attn_weights))
|
| 122 |
peak_weight = attn_weights[peak_frame]
|
| 123 |
|
| 124 |
-
# 计算显著帧范围(权重 >= 最大值的
|
| 125 |
-
threshold = peak_weight * 0.
|
| 126 |
significant_frames = np.where(attn_weights >= threshold)[0]
|
| 127 |
|
| 128 |
if len(significant_frames) > 0:
|
|
@@ -253,8 +253,24 @@ class AttentionAnalyzer:
|
|
| 253 |
print(" 跳过对齐图: matplotlib未安装")
|
| 254 |
return
|
| 255 |
|
| 256 |
-
|
| 257 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 258 |
|
| 259 |
# === 上图: 词-帧对齐 ===
|
| 260 |
ax1 = fig.add_subplot(gs[0])
|
|
@@ -299,7 +315,7 @@ class AttentionAnalyzer:
|
|
| 299 |
ax1.set_yticks(range(len(self.words)))
|
| 300 |
ax1.set_yticklabels([w['word'] for w in self.word_frame_ranges], fontsize=10)
|
| 301 |
|
| 302 |
-
# === 中图: 时间线进度条 ===
|
| 303 |
ax2 = fig.add_subplot(gs[1])
|
| 304 |
|
| 305 |
# 背景
|
|
@@ -318,18 +334,58 @@ class AttentionAnalyzer:
|
|
| 318 |
|
| 319 |
ax2.set_xlim(-2, self.video_frames + 2)
|
| 320 |
ax2.set_ylim(-0.4, 0.4)
|
| 321 |
-
ax2.set_xlabel('Frame Index', fontsize=12, fontweight='bold')
|
| 322 |
ax2.set_yticks([])
|
| 323 |
-
ax2.set_title('
|
| 324 |
ax2.grid(True, alpha=0.3, axis='x', linestyle='--')
|
| 325 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 326 |
# === 下图: 置信度图例 ===
|
| 327 |
-
|
| 328 |
-
|
| 329 |
|
| 330 |
legend_text = "Confidence: ■ High (avg attn > 0.5) ■ Medium (0.2-0.5) ■ Low (< 0.2)"
|
| 331 |
-
|
| 332 |
-
fontsize=11, transform=
|
| 333 |
|
| 334 |
plt.tight_layout()
|
| 335 |
plt.savefig(output_path, dpi=150, bbox_inches='tight')
|
|
@@ -580,10 +636,10 @@ class AttentionAnalyzer:
|
|
| 580 |
print(" ⓘ No video frames extracted, skipping visualization")
|
| 581 |
return
|
| 582 |
|
| 583 |
-
# 创建figure
|
| 584 |
n_words = len(self.words)
|
| 585 |
-
fig = plt.figure(figsize=(
|
| 586 |
-
gs = gridspec.GridSpec(n_words,
|
| 587 |
|
| 588 |
for row_idx, (word, word_info) in enumerate(zip(self.words, self.word_frame_ranges)):
|
| 589 |
# 列1: Gloss文本
|
|
@@ -592,23 +648,36 @@ class AttentionAnalyzer:
|
|
| 592 |
ha='center', va='center', wrap=True)
|
| 593 |
ax_gloss.axis('off')
|
| 594 |
|
| 595 |
-
# 列2:
|
| 596 |
-
|
| 597 |
|
| 598 |
# 特征帧信息
|
| 599 |
feat_start = word_info['start_frame']
|
| 600 |
feat_end = word_info['end_frame']
|
| 601 |
feat_peak = word_info['peak_frame']
|
| 602 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 603 |
# 相对时间 (0-100%)
|
| 604 |
rel_start = (feat_start / self.video_frames) * 100
|
| 605 |
rel_end = (feat_end / self.video_frames) * 100
|
| 606 |
rel_peak = (feat_peak / self.video_frames) * 100
|
| 607 |
|
| 608 |
-
info_text = f"
|
| 609 |
-
info_text += f" Range: {feat_start}-{feat_end}\n"
|
| 610 |
-
info_text += f" Peak: {feat_peak}\n\n"
|
| 611 |
-
info_text += f"Relative Time:\n"
|
| 612 |
info_text += f" Range: {rel_start:.1f}%-{rel_end:.1f}%\n"
|
| 613 |
info_text += f" Peak: {rel_peak:.1f}%\n"
|
| 614 |
|
|
@@ -617,17 +686,18 @@ class AttentionAnalyzer:
|
|
| 617 |
orig_start = self._map_feature_frame_to_original(feat_start)
|
| 618 |
orig_end = self._map_feature_frame_to_original(feat_end)
|
| 619 |
orig_peak = self._map_feature_frame_to_original(feat_peak)
|
| 620 |
-
info_text += f"\nOriginal Video:\n"
|
| 621 |
info_text += f" Total: {self.original_video_total_frames} frames\n"
|
| 622 |
info_text += f" Range: {orig_start}-{orig_end}\n"
|
| 623 |
info_text += f" Peak: {orig_peak}\n"
|
|
|
|
| 624 |
|
| 625 |
ax_info.text(0.05, 0.5, info_text, fontsize=10, family='monospace',
|
| 626 |
va='center', ha='left')
|
| 627 |
ax_info.axis('off')
|
| 628 |
|
| 629 |
-
# 列
|
| 630 |
-
ax_frames = fig.add_subplot(gs[row_idx,
|
| 631 |
|
| 632 |
# 选择要显示的帧: start, peak, end
|
| 633 |
frames_to_show = []
|
|
@@ -656,7 +726,7 @@ class AttentionAnalyzer:
|
|
| 656 |
|
| 657 |
ax_frames.axis('off')
|
| 658 |
|
| 659 |
-
plt.suptitle(f"
|
| 660 |
fontsize=16, weight='bold', y=0.995)
|
| 661 |
|
| 662 |
plt.savefig(output_path, dpi=150, bbox_inches='tight', facecolor='white')
|
|
|
|
| 121 |
peak_frame = int(np.argmax(attn_weights))
|
| 122 |
peak_weight = attn_weights[peak_frame]
|
| 123 |
|
| 124 |
+
# 计算显著帧范围(权重 >= 最大值的90%)
|
| 125 |
+
threshold = peak_weight * 0.9
|
| 126 |
significant_frames = np.where(attn_weights >= threshold)[0]
|
| 127 |
|
| 128 |
if len(significant_frames) > 0:
|
|
|
|
| 253 |
print(" 跳过对齐图: matplotlib未安装")
|
| 254 |
return
|
| 255 |
|
| 256 |
+
# Try to load feature-to-frame mapping
|
| 257 |
+
feature_mapping = None
|
| 258 |
+
output_dir = Path(output_path).parent
|
| 259 |
+
mapping_file = output_dir / "feature_frame_mapping.json"
|
| 260 |
+
if mapping_file.exists():
|
| 261 |
+
try:
|
| 262 |
+
with open(mapping_file, 'r') as f:
|
| 263 |
+
feature_mapping = json.load(f)
|
| 264 |
+
except Exception as e:
|
| 265 |
+
print(f" Warning: Failed to load feature mapping: {e}")
|
| 266 |
+
|
| 267 |
+
# Adjust layout based on whether we have feature mapping
|
| 268 |
+
if feature_mapping:
|
| 269 |
+
fig = plt.figure(figsize=(18, 10))
|
| 270 |
+
gs = GridSpec(4, 1, height_ratios=[4, 1, 1, 0.5], hspace=0.4)
|
| 271 |
+
else:
|
| 272 |
+
fig = plt.figure(figsize=(18, 8))
|
| 273 |
+
gs = GridSpec(3, 1, height_ratios=[4, 1, 0.5], hspace=0.4)
|
| 274 |
|
| 275 |
# === 上图: 词-帧对齐 ===
|
| 276 |
ax1 = fig.add_subplot(gs[0])
|
|
|
|
| 315 |
ax1.set_yticks(range(len(self.words)))
|
| 316 |
ax1.set_yticklabels([w['word'] for w in self.word_frame_ranges], fontsize=10)
|
| 317 |
|
| 318 |
+
# === 中图1: SMKD特征帧时间线进度条 ===
|
| 319 |
ax2 = fig.add_subplot(gs[1])
|
| 320 |
|
| 321 |
# 背景
|
|
|
|
| 334 |
|
| 335 |
ax2.set_xlim(-2, self.video_frames + 2)
|
| 336 |
ax2.set_ylim(-0.4, 0.4)
|
| 337 |
+
ax2.set_xlabel('SMKD Feature Frame Index', fontsize=12, fontweight='bold')
|
| 338 |
ax2.set_yticks([])
|
| 339 |
+
ax2.set_title('SMKD Feature Timeline', fontsize=13, fontweight='bold')
|
| 340 |
ax2.grid(True, alpha=0.3, axis='x', linestyle='--')
|
| 341 |
|
| 342 |
+
# === 中图2: 原始视频帧时间线进度条 (如果有feature mapping) ===
|
| 343 |
+
if feature_mapping:
|
| 344 |
+
ax3 = fig.add_subplot(gs[2])
|
| 345 |
+
|
| 346 |
+
original_frame_count = feature_mapping['original_frame_count']
|
| 347 |
+
|
| 348 |
+
# 背景
|
| 349 |
+
ax3.barh(0, original_frame_count, height=0.6, color='lightgray',
|
| 350 |
+
edgecolor='black', linewidth=2)
|
| 351 |
+
|
| 352 |
+
# 每个词对应的原始帧区域
|
| 353 |
+
for i, word_info in enumerate(self.word_frame_ranges):
|
| 354 |
+
feat_start = word_info['start_frame']
|
| 355 |
+
feat_end = word_info['end_frame']
|
| 356 |
+
confidence = word_info['confidence']
|
| 357 |
+
alpha = 0.9 if confidence == 'high' else 0.7 if confidence == 'medium' else 0.5
|
| 358 |
+
|
| 359 |
+
# 从feature mapping中找到对应的原始帧范围
|
| 360 |
+
# 使用特征帧的起始和结束索引来查找原始帧范围
|
| 361 |
+
mapping_list = feature_mapping['mapping']
|
| 362 |
+
if feat_start < len(mapping_list) and feat_end < len(mapping_list):
|
| 363 |
+
orig_start = mapping_list[feat_start]['frame_start']
|
| 364 |
+
orig_end = mapping_list[feat_end]['frame_end']
|
| 365 |
+
|
| 366 |
+
ax3.barh(0, orig_end - orig_start, left=orig_start, height=0.6,
|
| 367 |
+
color=colors[i % 20], alpha=alpha, edgecolor='black', linewidth=0.5)
|
| 368 |
+
|
| 369 |
+
ax3.set_xlim(-2, original_frame_count + 2)
|
| 370 |
+
ax3.set_ylim(-0.4, 0.4)
|
| 371 |
+
ax3.set_xlabel('Original Video Frame Index', fontsize=12, fontweight='bold')
|
| 372 |
+
ax3.set_yticks([])
|
| 373 |
+
ax3.set_title(f'Original Video Timeline ({original_frame_count} frames, '
|
| 374 |
+
f'{feature_mapping["downsampling_ratio"]:.2f}x downsampling)',
|
| 375 |
+
fontsize=13, fontweight='bold')
|
| 376 |
+
ax3.grid(True, alpha=0.3, axis='x', linestyle='--')
|
| 377 |
+
|
| 378 |
+
legend_row = 3
|
| 379 |
+
else:
|
| 380 |
+
legend_row = 2
|
| 381 |
+
|
| 382 |
# === 下图: 置信度图例 ===
|
| 383 |
+
ax_legend = fig.add_subplot(gs[legend_row])
|
| 384 |
+
ax_legend.axis('off')
|
| 385 |
|
| 386 |
legend_text = "Confidence: ■ High (avg attn > 0.5) ■ Medium (0.2-0.5) ■ Low (< 0.2)"
|
| 387 |
+
ax_legend.text(0.5, 0.5, legend_text, ha='center', va='center',
|
| 388 |
+
fontsize=11, transform=ax_legend.transAxes)
|
| 389 |
|
| 390 |
plt.tight_layout()
|
| 391 |
plt.savefig(output_path, dpi=150, bbox_inches='tight')
|
|
|
|
| 636 |
print(" ⓘ No video frames extracted, skipping visualization")
|
| 637 |
return
|
| 638 |
|
| 639 |
+
# 创建figure (4列布局: Gloss | Feature Index | Frame Info | Video Frames)
|
| 640 |
n_words = len(self.words)
|
| 641 |
+
fig = plt.figure(figsize=(24, 3 * n_words))
|
| 642 |
+
gs = gridspec.GridSpec(n_words, 4, width_ratios=[1.5, 1.5, 2, 6], hspace=0.3, wspace=0.2)
|
| 643 |
|
| 644 |
for row_idx, (word, word_info) in enumerate(zip(self.words, self.word_frame_ranges)):
|
| 645 |
# 列1: Gloss文本
|
|
|
|
| 648 |
ha='center', va='center', wrap=True)
|
| 649 |
ax_gloss.axis('off')
|
| 650 |
|
| 651 |
+
# 列2: 特征索引信息 (Feature Index Layer)
|
| 652 |
+
ax_feature = fig.add_subplot(gs[row_idx, 1])
|
| 653 |
|
| 654 |
# 特征帧信息
|
| 655 |
feat_start = word_info['start_frame']
|
| 656 |
feat_end = word_info['end_frame']
|
| 657 |
feat_peak = word_info['peak_frame']
|
| 658 |
|
| 659 |
+
feature_text = f"SMKD Feature Index\n"
|
| 660 |
+
feature_text += f"{'='*20}\n\n"
|
| 661 |
+
feature_text += f"Range:\n {feat_start} → {feat_end}\n\n"
|
| 662 |
+
feature_text += f"Peak:\n {feat_peak}\n\n"
|
| 663 |
+
feature_text += f"Count:\n {feat_end - feat_start + 1} features\n\n"
|
| 664 |
+
feature_text += f"Position:\n {(feat_start/self.video_frames)*100:.1f}% - {(feat_end/self.video_frames)*100:.1f}%"
|
| 665 |
+
|
| 666 |
+
ax_feature.text(0.5, 0.5, feature_text, fontsize=11, family='monospace',
|
| 667 |
+
va='center', ha='center',
|
| 668 |
+
bbox=dict(boxstyle='round,pad=0.8', facecolor='lightblue',
|
| 669 |
+
edgecolor='darkblue', linewidth=2, alpha=0.7))
|
| 670 |
+
ax_feature.axis('off')
|
| 671 |
+
|
| 672 |
+
# 列3: 原始视频帧信息
|
| 673 |
+
ax_info = fig.add_subplot(gs[row_idx, 2])
|
| 674 |
+
|
| 675 |
# 相对时间 (0-100%)
|
| 676 |
rel_start = (feat_start / self.video_frames) * 100
|
| 677 |
rel_end = (feat_end / self.video_frames) * 100
|
| 678 |
rel_peak = (feat_peak / self.video_frames) * 100
|
| 679 |
|
| 680 |
+
info_text = f"Relative Time:\n"
|
|
|
|
|
|
|
|
|
|
| 681 |
info_text += f" Range: {rel_start:.1f}%-{rel_end:.1f}%\n"
|
| 682 |
info_text += f" Peak: {rel_peak:.1f}%\n"
|
| 683 |
|
|
|
|
| 686 |
orig_start = self._map_feature_frame_to_original(feat_start)
|
| 687 |
orig_end = self._map_feature_frame_to_original(feat_end)
|
| 688 |
orig_peak = self._map_feature_frame_to_original(feat_peak)
|
| 689 |
+
info_text += f"\nOriginal Video Frames:\n"
|
| 690 |
info_text += f" Total: {self.original_video_total_frames} frames\n"
|
| 691 |
info_text += f" Range: {orig_start}-{orig_end}\n"
|
| 692 |
info_text += f" Peak: {orig_peak}\n"
|
| 693 |
+
info_text += f" Count: {orig_end - orig_start} frames\n"
|
| 694 |
|
| 695 |
ax_info.text(0.05, 0.5, info_text, fontsize=10, family='monospace',
|
| 696 |
va='center', ha='left')
|
| 697 |
ax_info.axis('off')
|
| 698 |
|
| 699 |
+
# 列4: 视频帧缩略图
|
| 700 |
+
ax_frames = fig.add_subplot(gs[row_idx, 3])
|
| 701 |
|
| 702 |
# 选择要显示的帧: start, peak, end
|
| 703 |
frames_to_show = []
|
|
|
|
| 726 |
|
| 727 |
ax_frames.axis('off')
|
| 728 |
|
| 729 |
+
plt.suptitle(f"Three-Layer Alignment: Gloss ↔ Feature Index ↔ Original Frames\nTranslation: {self.translation}",
|
| 730 |
fontsize=16, weight='bold', y=0.995)
|
| 731 |
|
| 732 |
plt.savefig(output_path, dpi=150, bbox_inches='tight', facecolor='white')
|
SignX/eval/generate_feature_mapping.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python
|
| 2 |
+
"""
|
| 3 |
+
生成特征-帧映射文件
|
| 4 |
+
|
| 5 |
+
Usage:
|
| 6 |
+
python generate_feature_mapping.py <sample_dir> <video_path>
|
| 7 |
+
|
| 8 |
+
Example:
|
| 9 |
+
python generate_feature_mapping.py detailed_prediction_20251226_155113/sample_000 \\
|
| 10 |
+
eval/tiny_test_data/videos/632051.mp4
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
import sys
|
| 14 |
+
import os
|
| 15 |
+
import json
|
| 16 |
+
import numpy as np
|
| 17 |
+
from pathlib import Path
|
| 18 |
+
|
| 19 |
+
def generate_feature_mapping(sample_dir, video_path):
|
| 20 |
+
"""为指定样本生成特征-帧映射文件"""
|
| 21 |
+
sample_dir = Path(sample_dir)
|
| 22 |
+
|
| 23 |
+
# Check if attention_weights.npy exists
|
| 24 |
+
attn_file = sample_dir / "attention_weights.npy"
|
| 25 |
+
if not attn_file.exists():
|
| 26 |
+
print(f"错误: 找不到attention_weights.npy: {attn_file}")
|
| 27 |
+
return False
|
| 28 |
+
|
| 29 |
+
# Load attention weights to get feature count
|
| 30 |
+
attn_weights = np.load(attn_file)
|
| 31 |
+
feature_count = attn_weights.shape[2] # Shape: (time, beam, features)
|
| 32 |
+
|
| 33 |
+
print(f"特征数量: {feature_count}")
|
| 34 |
+
|
| 35 |
+
# Get original frame count from video
|
| 36 |
+
try:
|
| 37 |
+
import cv2
|
| 38 |
+
cap = cv2.VideoCapture(str(video_path))
|
| 39 |
+
if not cap.isOpened():
|
| 40 |
+
print(f"错误: 无法打开视频文件: {video_path}")
|
| 41 |
+
return False
|
| 42 |
+
|
| 43 |
+
original_frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
|
| 44 |
+
fps = cap.get(cv2.CAP_PROP_FPS)
|
| 45 |
+
cap.release()
|
| 46 |
+
|
| 47 |
+
print(f"原始帧数: {original_frame_count}, FPS: {fps}")
|
| 48 |
+
|
| 49 |
+
except ImportError:
|
| 50 |
+
print("警告: OpenCV不可用,使用估算值")
|
| 51 |
+
# 假设30fps,根据特征数估算
|
| 52 |
+
original_frame_count = feature_count * 3 # 默认3倍下采样
|
| 53 |
+
fps = 30.0
|
| 54 |
+
|
| 55 |
+
# Calculate uniform mapping: feature i -> frames [start, end]
|
| 56 |
+
frame_mapping = []
|
| 57 |
+
for feat_idx in range(feature_count):
|
| 58 |
+
start_frame = int(feat_idx * original_frame_count / feature_count)
|
| 59 |
+
end_frame = int((feat_idx + 1) * original_frame_count / feature_count)
|
| 60 |
+
frame_mapping.append({
|
| 61 |
+
"feature_index": feat_idx,
|
| 62 |
+
"frame_start": start_frame,
|
| 63 |
+
"frame_end": end_frame,
|
| 64 |
+
"frame_count": end_frame - start_frame
|
| 65 |
+
})
|
| 66 |
+
|
| 67 |
+
# Save mapping
|
| 68 |
+
mapping_data = {
|
| 69 |
+
"original_frame_count": original_frame_count,
|
| 70 |
+
"feature_count": feature_count,
|
| 71 |
+
"downsampling_ratio": original_frame_count / feature_count,
|
| 72 |
+
"fps": fps,
|
| 73 |
+
"mapping": frame_mapping
|
| 74 |
+
}
|
| 75 |
+
|
| 76 |
+
output_file = sample_dir / "feature_frame_mapping.json"
|
| 77 |
+
with open(output_file, 'w') as f:
|
| 78 |
+
json.dump(mapping_data, f, indent=2)
|
| 79 |
+
|
| 80 |
+
print(f"\n✓ 已生成映射文件: {output_file}")
|
| 81 |
+
print(f" 原始帧数: {original_frame_count}")
|
| 82 |
+
print(f" 特征数量: {feature_count}")
|
| 83 |
+
print(f" 下采样比例: {mapping_data['downsampling_ratio']:.2f}x")
|
| 84 |
+
|
| 85 |
+
# Print sample mappings
|
| 86 |
+
print("\n映射示例:")
|
| 87 |
+
for i in range(min(3, len(frame_mapping))):
|
| 88 |
+
mapping = frame_mapping[i]
|
| 89 |
+
print(f" 特征 {mapping['feature_index']}: 帧 {mapping['frame_start']}-{mapping['frame_end']} "
|
| 90 |
+
f"({mapping['frame_count']} 帧)")
|
| 91 |
+
if len(frame_mapping) > 3:
|
| 92 |
+
print(" ...")
|
| 93 |
+
mapping = frame_mapping[-1]
|
| 94 |
+
print(f" 特征 {mapping['feature_index']}: 帧 {mapping['frame_start']}-{mapping['frame_end']} "
|
| 95 |
+
f"({mapping['frame_count']} 帧)")
|
| 96 |
+
|
| 97 |
+
return True
|
| 98 |
+
|
| 99 |
+
if __name__ == "__main__":
|
| 100 |
+
if len(sys.argv) != 3:
|
| 101 |
+
print("用法: python generate_feature_mapping.py <sample_dir> <video_path>")
|
| 102 |
+
print("\n示例:")
|
| 103 |
+
print(" python generate_feature_mapping.py detailed_prediction_20251226_155113/sample_000 \\")
|
| 104 |
+
print(" eval/tiny_test_data/videos/632051.mp4")
|
| 105 |
+
sys.exit(1)
|
| 106 |
+
|
| 107 |
+
sample_dir = sys.argv[1]
|
| 108 |
+
video_path = sys.argv[2]
|
| 109 |
+
|
| 110 |
+
if not os.path.exists(sample_dir):
|
| 111 |
+
print(f"错误: 样本目录不存在: {sample_dir}")
|
| 112 |
+
sys.exit(1)
|
| 113 |
+
|
| 114 |
+
if not os.path.exists(video_path):
|
| 115 |
+
print(f"错误: 视频文件不存在: {video_path}")
|
| 116 |
+
sys.exit(1)
|
| 117 |
+
|
| 118 |
+
success = generate_feature_mapping(sample_dir, video_path)
|
| 119 |
+
sys.exit(0 if success else 1)
|
SignX/eval/generate_interactive_alignment.py
CHANGED
|
@@ -205,8 +205,8 @@ def generate_interactive_html(sample_dir, output_path):
|
|
| 205 |
|
| 206 |
<div class="control-group">
|
| 207 |
<label for="peak-threshold">Peak Threshold (% of max):</label>
|
| 208 |
-
<input type="range" id="peak-threshold" min="1" max="100" value="
|
| 209 |
-
<span class="value-display" id="peak-threshold-value">
|
| 210 |
<br>
|
| 211 |
<small style="margin-left: 255px; color: #666;">
|
| 212 |
帧的注意力权重 ≥ (峰值权重 × 阈值%) 时被认为是"显著帧"
|
|
@@ -308,10 +308,10 @@ def generate_interactive_html(sample_dir, output_path):
|
|
| 308 |
}};
|
| 309 |
|
| 310 |
function resetDefaults() {{
|
| 311 |
-
peakThresholdSlider.value =
|
| 312 |
confidenceHighSlider.value = 50;
|
| 313 |
confidenceMediumSlider.value = 20;
|
| 314 |
-
peakThresholdValue.textContent = '
|
| 315 |
confidenceHighValue.textContent = '0.50';
|
| 316 |
confidenceMediumValue.textContent = '0.20';
|
| 317 |
updateVisualization();
|
|
|
|
| 205 |
|
| 206 |
<div class="control-group">
|
| 207 |
<label for="peak-threshold">Peak Threshold (% of max):</label>
|
| 208 |
+
<input type="range" id="peak-threshold" min="1" max="100" value="90" step="1">
|
| 209 |
+
<span class="value-display" id="peak-threshold-value">90%</span>
|
| 210 |
<br>
|
| 211 |
<small style="margin-left: 255px; color: #666;">
|
| 212 |
帧的注意力权重 ≥ (峰值权重 × 阈值%) 时被认为是"显著帧"
|
|
|
|
| 308 |
}};
|
| 309 |
|
| 310 |
function resetDefaults() {{
|
| 311 |
+
peakThresholdSlider.value = 90;
|
| 312 |
confidenceHighSlider.value = 50;
|
| 313 |
confidenceMediumSlider.value = 20;
|
| 314 |
+
peakThresholdValue.textContent = '90%';
|
| 315 |
confidenceHighValue.textContent = '0.50';
|
| 316 |
confidenceMediumValue.textContent = '0.20';
|
| 317 |
updateVisualization();
|
SignX/eval/regenerate_visualizations.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
重新生成所有可视化(使用最新的attention_analysis.py代码)
|
| 4 |
+
|
| 5 |
+
使用方法:
|
| 6 |
+
python regenerate_visualizations.py <detailed_prediction_dir> <video_path>
|
| 7 |
+
|
| 8 |
+
例如:
|
| 9 |
+
python regenerate_visualizations.py detailed_prediction_20251226_161117 ./eval/tiny_test_data/videos/632051.mp4
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import sys
|
| 13 |
+
import os
|
| 14 |
+
from pathlib import Path
|
| 15 |
+
|
| 16 |
+
# 添加项目根目录到path
|
| 17 |
+
SCRIPT_DIR = Path(__file__).parent.parent
|
| 18 |
+
sys.path.insert(0, str(SCRIPT_DIR))
|
| 19 |
+
|
| 20 |
+
from eval.attention_analysis import AttentionAnalyzer
|
| 21 |
+
import numpy as np
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def regenerate_sample_visualizations(sample_dir, video_path):
|
| 25 |
+
"""为单个样本重新生成所有可视化"""
|
| 26 |
+
sample_dir = Path(sample_dir)
|
| 27 |
+
|
| 28 |
+
if not sample_dir.exists():
|
| 29 |
+
print(f"错误: 样本目录不存在: {sample_dir}")
|
| 30 |
+
return False
|
| 31 |
+
|
| 32 |
+
# 加载数据
|
| 33 |
+
attn_file = sample_dir / "attention_weights.npy"
|
| 34 |
+
trans_file = sample_dir / "translation.txt"
|
| 35 |
+
|
| 36 |
+
if not attn_file.exists() or not trans_file.exists():
|
| 37 |
+
print(f" 跳过 {sample_dir.name}: 缺少必要文件")
|
| 38 |
+
return False
|
| 39 |
+
|
| 40 |
+
# 读取数据
|
| 41 |
+
attention_weights = np.load(attn_file)
|
| 42 |
+
with open(trans_file, 'r') as f:
|
| 43 |
+
lines = f.readlines()
|
| 44 |
+
# 找到 "Clean:" 后的翻译
|
| 45 |
+
translation = None
|
| 46 |
+
for line in lines:
|
| 47 |
+
if line.startswith('Clean:'):
|
| 48 |
+
translation = line.replace('Clean:', '').strip()
|
| 49 |
+
break
|
| 50 |
+
if translation is None:
|
| 51 |
+
translation = lines[0].strip() # 后备方案
|
| 52 |
+
|
| 53 |
+
# 获取特征数量(video_frames)
|
| 54 |
+
if len(attention_weights.shape) == 4:
|
| 55 |
+
video_frames = attention_weights.shape[3]
|
| 56 |
+
elif len(attention_weights.shape) == 3:
|
| 57 |
+
video_frames = attention_weights.shape[2]
|
| 58 |
+
else:
|
| 59 |
+
video_frames = attention_weights.shape[1]
|
| 60 |
+
|
| 61 |
+
print(f" 样本: {sample_dir.name}")
|
| 62 |
+
print(f" Attention shape: {attention_weights.shape}")
|
| 63 |
+
print(f" Translation: {translation}")
|
| 64 |
+
print(f" Features: {video_frames}")
|
| 65 |
+
|
| 66 |
+
# 创建分析器
|
| 67 |
+
analyzer = AttentionAnalyzer(
|
| 68 |
+
attentions=attention_weights,
|
| 69 |
+
translation=translation,
|
| 70 |
+
video_frames=video_frames,
|
| 71 |
+
video_path=str(video_path) if video_path else None
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
# 重新生成frame_alignment.png (带原始帧层)
|
| 75 |
+
print(f" 重新生成 frame_alignment.png...")
|
| 76 |
+
analyzer.plot_frame_alignment(sample_dir / "frame_alignment.png")
|
| 77 |
+
|
| 78 |
+
# 重新生成gloss_to_frames.png (带特征索引层)
|
| 79 |
+
if video_path and Path(video_path).exists():
|
| 80 |
+
print(f" 重新生成 gloss_to_frames.png...")
|
| 81 |
+
try:
|
| 82 |
+
analyzer.generate_gloss_to_frames_visualization(sample_dir / "gloss_to_frames.png")
|
| 83 |
+
except Exception as e:
|
| 84 |
+
print(f" 警告: gloss_to_frames生成失败: {e}")
|
| 85 |
+
|
| 86 |
+
return True
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def main():
|
| 90 |
+
if len(sys.argv) < 2:
|
| 91 |
+
print("用法: python regenerate_visualizations.py <detailed_prediction_dir> [<video_path>]")
|
| 92 |
+
print("\n示例:")
|
| 93 |
+
print(" python regenerate_visualizations.py detailed_prediction_20251226_161117 ./eval/tiny_test_data/videos/632051.mp4")
|
| 94 |
+
sys.exit(1)
|
| 95 |
+
|
| 96 |
+
pred_dir = Path(sys.argv[1])
|
| 97 |
+
video_path = Path(sys.argv[2]) if len(sys.argv) > 2 else None
|
| 98 |
+
|
| 99 |
+
if not pred_dir.exists():
|
| 100 |
+
print(f"错误: 预测目录不存在: {pred_dir}")
|
| 101 |
+
sys.exit(1)
|
| 102 |
+
|
| 103 |
+
if video_path and not video_path.exists():
|
| 104 |
+
print(f"警告: 视频文件不存在: {video_path}")
|
| 105 |
+
video_path = None
|
| 106 |
+
|
| 107 |
+
print(f"重新生成可视化:")
|
| 108 |
+
print(f" 预测目录: {pred_dir}")
|
| 109 |
+
print(f" 视频路径: {video_path if video_path else 'N/A'}")
|
| 110 |
+
print()
|
| 111 |
+
|
| 112 |
+
# 处理所有样本
|
| 113 |
+
success_count = 0
|
| 114 |
+
for sample_dir in sorted(pred_dir.glob("sample_*")):
|
| 115 |
+
if sample_dir.is_dir():
|
| 116 |
+
if regenerate_sample_visualizations(sample_dir, video_path):
|
| 117 |
+
success_count += 1
|
| 118 |
+
|
| 119 |
+
print(f"\n✓ 完成!成功处理 {success_count} 个样本")
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
if __name__ == "__main__":
|
| 123 |
+
main()
|
SignX/inference.sh
CHANGED
|
@@ -247,33 +247,50 @@ if [ -f "$TEMP_DIR/prediction.txt" ]; then
|
|
| 247 |
sample_count=$(find "$dest_path" -maxdepth 1 -type d -name "sample_*" | wc -l)
|
| 248 |
echo " ✓ 已保存 $sample_count 个样本的详细分析到: $dest_path"
|
| 249 |
|
| 250 |
-
#
|
| 251 |
echo ""
|
| 252 |
-
echo -e "${BLUE}生成
|
| 253 |
-
if [ -f "$SCRIPT_DIR/eval/
|
| 254 |
-
# 切换
|
| 255 |
conda activate signx-slt
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
|
| 259 |
-
|
| 260 |
-
|
| 261 |
-
|
| 262 |
-
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
|
| 268 |
-
|
| 269 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 270 |
fi
|
|
|
|
| 271 |
|
| 272 |
-
|
| 273 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 274 |
else
|
| 275 |
-
echo " ⓘ
|
| 276 |
fi
|
|
|
|
|
|
|
|
|
|
| 277 |
done
|
| 278 |
fi
|
| 279 |
|
|
|
|
| 247 |
sample_count=$(find "$dest_path" -maxdepth 1 -type d -name "sample_*" | wc -l)
|
| 248 |
echo " ✓ 已保存 $sample_count 个样本的详细分析到: $dest_path"
|
| 249 |
|
| 250 |
+
# 步骤1:生成特征-帧映射 (Feature-to-Frame Mapping)
|
| 251 |
echo ""
|
| 252 |
+
echo -e "${BLUE}生成特征-帧映射...${NC}"
|
| 253 |
+
if [ -f "$SCRIPT_DIR/eval/generate_feature_mapping.py" ]; then
|
| 254 |
+
# 切换到 signx-slt 环境 (有 cv2)
|
| 255 |
conda activate signx-slt
|
| 256 |
+
for sample_dir in "$dest_path"/sample_*; do
|
| 257 |
+
if [ -d "$sample_dir" ]; then
|
| 258 |
+
python "$SCRIPT_DIR/eval/generate_feature_mapping.py" "$sample_dir" "$VIDEO_PATH" 2>&1 | grep -E "(特征数量|原始帧数|已生成映射|错误)"
|
| 259 |
+
fi
|
| 260 |
+
done
|
| 261 |
+
else
|
| 262 |
+
echo " ⓘ generate_feature_mapping.py 未找到,跳过特征映射生成"
|
| 263 |
+
fi
|
| 264 |
+
|
| 265 |
+
# 步骤2:重新生成所有可视化(使用最新代码)
|
| 266 |
+
echo ""
|
| 267 |
+
echo -e "${BLUE}重新生成可视化(使用最新代码)...${NC}"
|
| 268 |
+
if [ -f "$SCRIPT_DIR/eval/regenerate_visualizations.py" ]; then
|
| 269 |
+
# 已在 signx-slt 环境
|
| 270 |
+
python "$SCRIPT_DIR/eval/regenerate_visualizations.py" "$dest_path" "$VIDEO_PATH"
|
| 271 |
+
else
|
| 272 |
+
echo " ⓘ regenerate_visualizations.py 未找到,使用旧版本"
|
| 273 |
+
if [ -f "$SCRIPT_DIR/eval/generate_gloss_frames.py" ]; then
|
| 274 |
+
python "$SCRIPT_DIR/eval/generate_gloss_frames.py" "$dest_path" "$VIDEO_PATH"
|
| 275 |
fi
|
| 276 |
+
fi
|
| 277 |
|
| 278 |
+
# 步骤3:生成交互式HTML可视化
|
| 279 |
+
echo ""
|
| 280 |
+
echo -e "${BLUE}生成交互式HTML可视化...${NC}"
|
| 281 |
+
if [ -f "$SCRIPT_DIR/eval/generate_interactive_alignment.py" ]; then
|
| 282 |
+
# 处理所有样本
|
| 283 |
+
for sample_dir in "$dest_path"/sample_*; do
|
| 284 |
+
if [ -d "$sample_dir" ]; then
|
| 285 |
+
python "$SCRIPT_DIR/eval/generate_interactive_alignment.py" "$sample_dir"
|
| 286 |
+
fi
|
| 287 |
+
done
|
| 288 |
else
|
| 289 |
+
echo " ⓘ generate_interactive_alignment.py 未找到,跳过交互式HTML生成"
|
| 290 |
fi
|
| 291 |
+
|
| 292 |
+
# 切换回 slt_tf1 环境
|
| 293 |
+
conda activate slt_tf1
|
| 294 |
done
|
| 295 |
fi
|
| 296 |
|
SignX/inference_output.txt
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
<unk>
|
|
|
|
| 1 |
+
<unk> #IF FRIEND GROUP/TOGE@@ TH@@ E@@ R DEPART PARTY IX-1p FINISH JO@@ I@@ N IX-1p
|
SignX/inference_output.txt.clean
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
<unk>
|
|
|
|
| 1 |
+
<unk> #IF FRIEND GROUP/TOGETHER DEPART PARTY IX-1p FINISH JOIN IX-1p
|
SignX/models/evalu.py
CHANGED
|
@@ -386,6 +386,48 @@ def dump_detailed_attention_output(tranes, output, indices, attentions, video_pa
|
|
| 386 |
f.write(f"With BPE: {trans}\n")
|
| 387 |
f.write(f"Clean: {trans_clean}\n")
|
| 388 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 389 |
# 使用attention_analysis模块生成可视化
|
| 390 |
try:
|
| 391 |
# 添加eval目录到路径
|
|
|
|
| 386 |
f.write(f"With BPE: {trans}\n")
|
| 387 |
f.write(f"Clean: {trans_clean}\n")
|
| 388 |
|
| 389 |
+
# Calculate and save feature-to-frame mapping
|
| 390 |
+
if video_path and os.path.exists(video_path):
|
| 391 |
+
try:
|
| 392 |
+
import cv2
|
| 393 |
+
import json
|
| 394 |
+
|
| 395 |
+
# Get original frame count from video
|
| 396 |
+
cap = cv2.VideoCapture(video_path)
|
| 397 |
+
original_frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
|
| 398 |
+
cap.release()
|
| 399 |
+
|
| 400 |
+
# Feature count from attention matrix
|
| 401 |
+
feature_count = sample_attn.shape[2]
|
| 402 |
+
|
| 403 |
+
# Calculate uniform mapping: feature i -> frames [start, end]
|
| 404 |
+
frame_mapping = []
|
| 405 |
+
for feat_idx in range(feature_count):
|
| 406 |
+
start_frame = int(feat_idx * original_frame_count / feature_count)
|
| 407 |
+
end_frame = int((feat_idx + 1) * original_frame_count / feature_count)
|
| 408 |
+
frame_mapping.append({
|
| 409 |
+
"feature_index": feat_idx,
|
| 410 |
+
"frame_start": start_frame,
|
| 411 |
+
"frame_end": end_frame,
|
| 412 |
+
"frame_count": end_frame - start_frame
|
| 413 |
+
})
|
| 414 |
+
|
| 415 |
+
# Save mapping
|
| 416 |
+
mapping_data = {
|
| 417 |
+
"original_frame_count": original_frame_count,
|
| 418 |
+
"feature_count": feature_count,
|
| 419 |
+
"downsampling_ratio": original_frame_count / feature_count,
|
| 420 |
+
"mapping": frame_mapping
|
| 421 |
+
}
|
| 422 |
+
|
| 423 |
+
with open(sample_dir / "feature_frame_mapping.json", 'w') as f:
|
| 424 |
+
json.dump(mapping_data, f, indent=2)
|
| 425 |
+
|
| 426 |
+
tf.logging.info(f" ✓ Feature-to-frame mapping saved ({original_frame_count} frames → {feature_count} features)")
|
| 427 |
+
|
| 428 |
+
except Exception as e:
|
| 429 |
+
tf.logging.warning(f"Failed to generate feature-to-frame mapping: {e}")
|
| 430 |
+
|
| 431 |
# 使用attention_analysis模块生成可视化
|
| 432 |
try:
|
| 433 |
# 添加eval目录到路径
|