File size: 3,107 Bytes
218aef1
 
 
 
 
 
2a7c991
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
218aef1
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
2026-04-15 16:56:39,772 - INFO - train_pipeline - Logging to ./output_checkpoints/graphcodebert-rdrop/training.log
2026-04-15 16:56:39,774 - INFO - train_pipeline - Loading model & tokenizer for 'microsoft/graphcodebert-base'
2026-04-15 16:56:42,689 - INFO - train_pipeline - Model placed on cuda
2026-04-15 16:56:42,692 - INFO - train_pipeline - Base model weights frozen – only classifier head will be trained.
2026-04-15 16:56:42,694 - INFO - train_pipeline - ===== Model Architecture =====
2026-04-15 16:56:42,697 - INFO - train_pipeline - 
RobertaForSequenceClassification(
  (roberta): RobertaModel(
    (embeddings): RobertaEmbeddings(
      (word_embeddings): Embedding(50265, 768, padding_idx=1)
      (position_embeddings): Embedding(514, 768, padding_idx=1)
      (token_type_embeddings): Embedding(1, 768)
      (LayerNorm): LayerNorm((768,), eps=1e-05, elementwise_affine=True)
      (dropout): Dropout(p=0.2, inplace=False)
    )
    (encoder): RobertaEncoder(
      (layer): ModuleList(
        (0-11): 12 x RobertaLayer(
          (attention): RobertaAttention(
            (self): RobertaSdpaSelfAttention(
              (query): Linear(in_features=768, out_features=768, bias=True)
              (key): Linear(in_features=768, out_features=768, bias=True)
              (value): Linear(in_features=768, out_features=768, bias=True)
              (dropout): Dropout(p=0.2, inplace=False)
            )
            (output): RobertaSelfOutput(
              (dense): Linear(in_features=768, out_features=768, bias=True)
              (LayerNorm): LayerNorm((768,), eps=1e-05, elementwise_affine=True)
              (dropout): Dropout(p=0.2, inplace=False)
            )
          )
          (intermediate): RobertaIntermediate(
            (dense): Linear(in_features=768, out_features=3072, bias=True)
            (intermediate_act_fn): GELUActivation()
          )
          (output): RobertaOutput(
            (dense): Linear(in_features=3072, out_features=768, bias=True)
            (LayerNorm): LayerNorm((768,), eps=1e-05, elementwise_affine=True)
            (dropout): Dropout(p=0.2, inplace=False)
          )
        )
      )
    )
  )
  (classifier): RobertaClassificationHead(
    (dense): Linear(in_features=768, out_features=768, bias=True)
    (dropout): Dropout(p=0.2, inplace=False)
    (out_proj): Linear(in_features=768, out_features=2, bias=True)
  )
)
2026-04-15 16:56:42,702 - INFO - train_pipeline - ===== Parameter Summary =====
2026-04-15 16:56:42,703 - INFO - train_pipeline - Total Parameters:         124,647,170
2026-04-15 16:56:42,705 - INFO - train_pipeline - Trainable Parameters:     592,130
2026-04-15 16:56:42,706 - INFO - train_pipeline - Non-trainable Parameters: 124,055,040
2026-04-15 16:56:42,709 - INFO - train_pipeline - ===== Tokenizer Summary =====
2026-04-15 16:56:42,725 - INFO - train_pipeline - Vocab size: 50265 | Special tokens: ['<s>', '</s>', '<unk>', '<pad>', '<mask>']
2026-04-15 16:56:42,726 - INFO - train_pipeline - ===== End of Architecture Log =====
2026-04-15 16:56:49,788 - INFO - train_pipeline - === Starting training ===