Pranov888 commited on
Commit
2ea16fa
·
verified ·
1 Parent(s): 77ac706

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. config/data_config.yaml +76 -0
  2. config/model_config.yaml +52 -0
  3. config/training_config.yaml +67 -0
  4. models/final/cnn_final.pt +3 -0
  5. models/final/svm_and_scaler_final.pkl +3 -0
  6. models/final/xgboost_final.json +0 -0
  7. models/fold_00_H_S1/cnn_weights.pt +3 -0
  8. models/fold_00_H_S1/svm_and_scaler.pkl +3 -0
  9. models/fold_00_H_S1/xgboost.json +0 -0
  10. models/fold_01_H_S10/cnn_weights.pt +3 -0
  11. models/fold_01_H_S10/svm_and_scaler.pkl +3 -0
  12. models/fold_01_H_S10/xgboost.json +0 -0
  13. models/fold_02_H_S11/cnn_weights.pt +3 -0
  14. models/fold_02_H_S11/svm_and_scaler.pkl +3 -0
  15. models/fold_02_H_S11/xgboost.json +0 -0
  16. models/fold_03_H_S12/cnn_weights.pt +3 -0
  17. models/fold_03_H_S12/svm_and_scaler.pkl +3 -0
  18. models/fold_03_H_S12/xgboost.json +0 -0
  19. models/fold_04_H_S13/cnn_weights.pt +3 -0
  20. models/fold_04_H_S13/svm_and_scaler.pkl +3 -0
  21. models/fold_04_H_S13/xgboost.json +0 -0
  22. models/fold_05_H_S14/cnn_weights.pt +3 -0
  23. models/fold_05_H_S14/svm_and_scaler.pkl +3 -0
  24. models/fold_05_H_S14/xgboost.json +0 -0
  25. models/fold_06_H_S15/cnn_weights.pt +3 -0
  26. models/fold_06_H_S15/svm_and_scaler.pkl +3 -0
  27. models/fold_06_H_S15/xgboost.json +0 -0
  28. models/fold_07_H_S16/cnn_weights.pt +3 -0
  29. models/fold_07_H_S16/svm_and_scaler.pkl +3 -0
  30. models/fold_07_H_S16/xgboost.json +0 -0
  31. models/fold_08_H_S17/cnn_weights.pt +3 -0
  32. models/fold_08_H_S17/svm_and_scaler.pkl +3 -0
  33. models/fold_08_H_S17/xgboost.json +0 -0
  34. models/fold_09_H_S18/cnn_weights.pt +3 -0
  35. models/fold_09_H_S18/svm_and_scaler.pkl +3 -0
  36. models/fold_09_H_S18/xgboost.json +0 -0
  37. models/fold_10_H_S19/cnn_weights.pt +3 -0
  38. models/fold_10_H_S19/svm_and_scaler.pkl +3 -0
  39. models/fold_10_H_S19/xgboost.json +0 -0
  40. models/fold_11_H_S2/cnn_weights.pt +3 -0
  41. models/fold_11_H_S2/svm_and_scaler.pkl +3 -0
  42. models/fold_11_H_S2/xgboost.json +0 -0
  43. models/fold_12_H_S20/cnn_weights.pt +3 -0
  44. models/fold_12_H_S20/svm_and_scaler.pkl +3 -0
  45. models/fold_12_H_S20/xgboost.json +0 -0
  46. models/fold_13_H_S21/cnn_weights.pt +3 -0
  47. models/fold_13_H_S21/svm_and_scaler.pkl +3 -0
  48. models/fold_13_H_S21/xgboost.json +0 -0
  49. models/fold_14_H_S22/cnn_weights.pt +3 -0
  50. models/fold_14_H_S22/svm_and_scaler.pkl +3 -0
config/data_config.yaml ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Data Configuration
2
+
3
+ # Primary Dataset: Figshare MDD
4
+ figshare:
5
+ url: "https://figshare.com/articles/dataset/EEG_Data_New/4244171"
6
+ data_dir: "data/raw/figshare"
7
+ n_subjects: 64
8
+ n_mdd: 34
9
+ n_healthy: 30
10
+ n_channels: 19
11
+ sampling_rate: 256
12
+ duration_sec: 300 # 5 minutes
13
+ conditions: ["EC", "EO"] # Eyes Closed, Eyes Open
14
+
15
+ # Secondary Dataset: Kaggle (for benchmarking)
16
+ kaggle:
17
+ url: "https://www.kaggle.com/datasets/tocodeforsoul/depression-rest-eeg-features"
18
+ data_dir: "data/raw/kaggle"
19
+ n_subjects: 232
20
+ n_features: 31
21
+ note: "Pre-extracted features only"
22
+
23
+ # Preprocessing
24
+ preprocessing:
25
+ target_sampling_rate: 250 # Resample to this rate
26
+ bandpass:
27
+ low: 1
28
+ high: 45
29
+ notch:
30
+ freqs: [50, 60] # Power line noise (50Hz EU, 60Hz US)
31
+ filter_order: 101
32
+ ica:
33
+ method: "picard"
34
+ n_components: 0.99 # Variance explained
35
+ max_iter: 500
36
+ reference: "average"
37
+
38
+ # Segmentation
39
+ segmentation:
40
+ epoch_length_sec: 4.0
41
+ overlap_ratio: 0.5 # 50% overlap
42
+ min_epochs_per_subject: 10
43
+
44
+ # 10-20 Electrode Montage
45
+ electrodes:
46
+ names: ["Fp1", "Fp2", "F7", "F3", "Fz", "F4", "F8",
47
+ "T3", "C3", "Cz", "C4", "T4",
48
+ "T5", "P3", "Pz", "P4", "T6",
49
+ "O1", "O2"]
50
+ # Standard 10-20 3D positions (normalized)
51
+ positions:
52
+ Fp1: [-0.31, 0.95, -0.03]
53
+ Fp2: [0.31, 0.95, -0.03]
54
+ F7: [-0.81, 0.59, -0.03]
55
+ F3: [-0.55, 0.67, 0.50]
56
+ Fz: [0.00, 0.71, 0.70]
57
+ F4: [0.55, 0.67, 0.50]
58
+ F8: [0.81, 0.59, -0.03]
59
+ T3: [-1.00, 0.00, -0.03]
60
+ C3: [-0.71, 0.00, 0.71]
61
+ Cz: [0.00, 0.00, 1.00]
62
+ C4: [0.71, 0.00, 0.71]
63
+ T4: [1.00, 0.00, -0.03]
64
+ T5: [-0.81, -0.59, -0.03]
65
+ P3: [-0.55, -0.67, 0.50]
66
+ Pz: [0.00, -0.71, 0.70]
67
+ P4: [0.55, -0.67, 0.50]
68
+ T6: [0.81, -0.59, -0.03]
69
+ O1: [-0.31, -0.95, -0.03]
70
+ O2: [0.31, -0.95, -0.03]
71
+
72
+ # Graph Construction
73
+ graph:
74
+ spatial_k_neighbors: 6 # K-nearest neighbors for spatial adjacency
75
+ functional_threshold: 0.5 # Correlation threshold for functional connectivity
76
+ combine_method: "union" # How to combine spatial and functional edges
config/model_config.yaml ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Model Configuration for EEG Depression Detection
2
+ # Optimized for <8GB VRAM
3
+
4
+ # Transformer Encoder (for CWT scalograms)
5
+ transformer:
6
+ d_model: 128
7
+ nhead: 4
8
+ num_layers: 4
9
+ dim_ff: 512
10
+ dropout: 0.1
11
+ patch_size: [8, 16] # [freq, time]
12
+ max_seq_len: 65 # 64 patches + 1 CLS token
13
+
14
+ # Graph Attention Network (for electrode relationships)
15
+ gnn:
16
+ num_nodes: 19 # 10-20 montage electrodes
17
+ node_feat_dim: 576 # WPD features per electrode
18
+ hidden_dim: 128
19
+ num_heads: 4
20
+ num_layers: 3
21
+ dropout: 0.3
22
+ pooling_ratio: 0.5
23
+ adjacency_type: "hybrid" # spatial + functional
24
+
25
+ # Attention-Based Fusion
26
+ fusion:
27
+ trans_dim: 128
28
+ gnn_dim: 128
29
+ fusion_dim: 128
30
+ num_heads: 4
31
+ dropout: 0.1
32
+ use_gating: true
33
+ strategy: "cross_attention"
34
+
35
+ # Classification Head
36
+ classifier:
37
+ input_dim: 128
38
+ hidden_dims: [64, 32]
39
+ dropout_rates: [0.5, 0.3]
40
+ num_classes: 1 # Binary classification
41
+
42
+ # Wavelet Feature Extraction
43
+ wpd:
44
+ wavelets: ["db4", "sym5", "coif3"]
45
+ level: 5
46
+ features: ["energy", "entropy", "log_energy", "mean", "std", "skewness", "kurtosis"]
47
+
48
+ cwt:
49
+ wavelet: "cmor1.5-1.0"
50
+ freq_range: [1, 45]
51
+ num_scales: 64
52
+ output_size: [64, 128] # [freq, time]
config/training_config.yaml ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Training Configuration
2
+ # Optimized for <8GB VRAM
3
+
4
+ # Optimizer
5
+ optimizer:
6
+ name: "AdamW"
7
+ lr: 0.0001
8
+ weight_decay: 0.01
9
+ betas: [0.9, 0.999]
10
+
11
+ # Learning Rate Scheduler
12
+ scheduler:
13
+ name: "CosineAnnealingWarmRestarts"
14
+ T_0: 10
15
+ T_mult: 2
16
+ eta_min: 0.000001
17
+
18
+ # Training
19
+ training:
20
+ epochs: 100
21
+ batch_size: 16
22
+ gradient_accumulation_steps: 2 # Effective batch size = 32
23
+ gradient_clip: 1.0
24
+ early_stopping_patience: 15
25
+ early_stopping_delta: 0.001
26
+
27
+ # Memory Optimization
28
+ memory:
29
+ mixed_precision: true # FP16 training
30
+ gradient_checkpointing: true # Trade compute for memory
31
+ pin_memory: true
32
+ num_workers: 4
33
+ prefetch_factor: 2
34
+
35
+ # Loss Function
36
+ loss:
37
+ name: "BCEWithLogitsLoss"
38
+ label_smoothing: 0.1
39
+ class_weight: "balanced" # Compute from training data
40
+
41
+ # Data Augmentation
42
+ augmentation:
43
+ enabled: true
44
+ time_shift_ratio: 0.1 # Max 10% shift
45
+ time_warp_sigma: 0.2
46
+ channel_dropout_prob: 0.1
47
+ noise_snr_db: 20
48
+
49
+ # Cross-Validation
50
+ cross_validation:
51
+ strategy: "loso" # Leave-One-Subject-Out
52
+ n_splits: 5 # For stratified k-fold fallback
53
+ shuffle: true
54
+ random_state: 42
55
+
56
+ # Checkpointing
57
+ checkpointing:
58
+ save_best: true
59
+ save_last: true
60
+ save_every_n_epochs: 10
61
+ checkpoint_dir: "checkpoints"
62
+
63
+ # Logging
64
+ logging:
65
+ log_every_n_steps: 10
66
+ log_dir: "logs"
67
+ use_wandb: false # Set to true if using Weights & Biases
models/final/cnn_final.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0435c9662f2b12f66b929c4f44015197bf1cbba359225fd23c369f5f72f4bf2b
3
+ size 132
models/final/svm_and_scaler_final.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0f670e0896728c2422594572b7c3be3d47bc0ec467339cad7ee7a6a6974af71
3
+ size 133
models/final/xgboost_final.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_00_H_S1/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b42758731b7e0f242723e98ed688689c177dc3e56e75e36c5494789564d65b7
3
+ size 132
models/fold_00_H_S1/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:804531a4ea9e4e7bb7d050e0d6c99e54efa4c02529aab19c9737ef6c30285170
3
+ size 133
models/fold_00_H_S1/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_01_H_S10/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e60135ed134a2959b65deb5f72f3df250f426465661d11ffe8e3c9b080073f3f
3
+ size 132
models/fold_01_H_S10/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47cae4f580ac69afe895ee205763173ba05206a45f67c13bd653b4d50d9f7c56
3
+ size 133
models/fold_01_H_S10/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_02_H_S11/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01560172fbeb7f90b638746e48661df0ff823a4f540dce476b0d6ab924469705
3
+ size 132
models/fold_02_H_S11/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ee20ebb79fed0ece21c103789bdcadf40d2adf112cee90aa9761cefa4d64a47
3
+ size 133
models/fold_02_H_S11/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_03_H_S12/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74eddffb4544d44e3e1b71d5cff3e2e7282cd71b14a6211f673219b57e3fdeb4
3
+ size 132
models/fold_03_H_S12/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a714e728dc842cd9d26301db693455cde3c6d008f64b941663e01d1778ba2fbd
3
+ size 133
models/fold_03_H_S12/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_04_H_S13/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d42083a44bb3736111ea9c15111049dd56f3b9f1c32ed60ca8688d0b6150da9
3
+ size 132
models/fold_04_H_S13/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90dab45c5ad913128dc43dc903e54508572410aa53d74549d2e45e7fd1295e0b
3
+ size 133
models/fold_04_H_S13/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_05_H_S14/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:716f26287d68a63c1115ebdb847ac6becac88c8ba4d6a1f3446dfa37c58df6fe
3
+ size 132
models/fold_05_H_S14/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:819f53ffce03c1e0e9e17247127213246931ae7e09c09ceb65483958ebbd8f59
3
+ size 133
models/fold_05_H_S14/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_06_H_S15/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d145da358f03bc3ba74fa06d2a1dbb9599948e68dbdaf3dcd9696f00679a6b1
3
+ size 132
models/fold_06_H_S15/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12747aec85dbbf4d5a84986f3ca68e40a94ea6a2a247c04e4901c10cd95e91b0
3
+ size 133
models/fold_06_H_S15/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_07_H_S16/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d745a96a85e6d8e1951642143a1934ed7327697102c6d9b0a42e3f9bd38b2ca8
3
+ size 132
models/fold_07_H_S16/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb3c0babee5477b376860bb3c732815d4f7f41499d896866a83bb7b5eab1ffd9
3
+ size 133
models/fold_07_H_S16/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_08_H_S17/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56f3d55182ef69e07a156141b5f909b7e1d6bfa21cd5c17e54e1a7eb844068d6
3
+ size 132
models/fold_08_H_S17/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40b640f853a11a910be9bc98eace35f49badf4f34f15f1798400ccf8491bd7af
3
+ size 133
models/fold_08_H_S17/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_09_H_S18/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a31d867094b72289ed2b79544cff18a64c5ccf9d73f72468b1fcd572a0bfd4ff
3
+ size 132
models/fold_09_H_S18/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc66f488928e8445500eeccd01936768b08c7dc243a388ced4a298611d580a7a
3
+ size 133
models/fold_09_H_S18/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_10_H_S19/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29f03b63340ccd8a62e7eef3efcb0bf8b626cca7eb5c2a8c3b8869479153a98a
3
+ size 132
models/fold_10_H_S19/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83b09a2f3011274ee6264f4162f1f667966d345280a24535e04820572b7ba4e1
3
+ size 133
models/fold_10_H_S19/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_11_H_S2/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8806d1cd437642964c3a2e0eaef9a1b7ea09faa413e55993636f3ec232f16d5f
3
+ size 132
models/fold_11_H_S2/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2b7408eb529ab87fb06db8a49a04df6bc17b404649e672a1b218e3afbb6f805
3
+ size 133
models/fold_11_H_S2/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_12_H_S20/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:595ebeda65cd10892db572fb4428a6f69fb52087162aaacbf27b01c9724a3d68
3
+ size 132
models/fold_12_H_S20/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa0360b6d8c6a8dadfdc27df53fe91199d64a5546b03b14f0134548eb85d0729
3
+ size 133
models/fold_12_H_S20/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_13_H_S21/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86fe4c5a1fa4305a1de75dacdffdbc4ac78399365cd5b743f548bf720b24f191
3
+ size 132
models/fold_13_H_S21/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6638e537a7d27a51f3bff5e8aae9caa4e1d19a780e8e47d10001c0bff2fe0db3
3
+ size 133
models/fold_13_H_S21/xgboost.json ADDED
The diff for this file is too large to render. See raw diff
 
models/fold_14_H_S22/cnn_weights.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cddb547d0de648df2fc0e0c8174f2996af9e6a0cabe94f73b971a33c5d4f8a84
3
+ size 132
models/fold_14_H_S22/svm_and_scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4d9a81ae75c1c8c28e899208bd075a936dd9342021c38bd2463db3f3d54439d
3
+ size 133