File size: 2,954 Bytes
bf31071
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b121266
bf31071
 
 
b121266
bf31071
 
 
 
 
 
 
 
b121266
bf31071
 
 
 
b121266
bf31071
b121266
 
 
 
 
 
 
 
 
 
 
bf31071
b121266
 
bf31071
 
 
b121266
bf31071
b121266
bf31071
 
 
 
b121266
bf31071
 
 
 
 
 
 
b121266
bf31071
 
 
 
b121266
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
"""Prediction heads for SwipeTransformer."""

import torch
import torch.nn as nn


class CharacterPredictionHead(nn.Module):
    """Prediction head for masked characters."""

    def __init__(self, d_model: int, vocab_size: int):
        super().__init__()
        self.dense = nn.Linear(d_model, d_model)
        self.layer_norm = nn.LayerNorm(d_model)
        self.decoder = nn.Linear(d_model, vocab_size)
        self.activation = nn.GELU()

    def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
        """
        Args:
            hidden_states: [batch, seq_len, d_model]

        Returns:
            [batch, seq_len, vocab_size] logits
        """
        x = self.dense(hidden_states)
        x = self.activation(x)
        x = self.layer_norm(x)
        logits = self.decoder(x)
        return logits


class PathPredictionHead(nn.Module):
    """Prediction head for masked path coordinates."""

    def __init__(self, d_model: int, output_dim: int = 6):
        super().__init__()
        self.dense = nn.Linear(d_model, d_model)
        self.layer_norm = nn.LayerNorm(d_model)
        self.decoder = nn.Linear(d_model, output_dim)
        self.activation = nn.GELU()

    def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
        """
        Args:
            hidden_states: [batch, seq_len, d_model]

        Returns:
            [batch, seq_len, output_dim] path features.
        """
        x = self.dense(hidden_states)
        x = self.activation(x)
        x = self.layer_norm(x)
        features = self.decoder(x)

        # Per-feature constraints:
        # - x, y are normalized to [0,1]
        # - dx, dy are signed deltas (roughly [-1,1])
        # - ds is non-negative
        # - log_dt is non-negative
        if features.shape[-1] == 6:
            x_y = torch.sigmoid(features[..., 0:2])
            dx_dy = torch.tanh(features[..., 2:4])
            ds = torch.nn.functional.softplus(features[..., 4:5])
            log_dt = torch.nn.functional.softplus(features[..., 5:6])
            return torch.cat([x_y, dx_dy, ds, log_dt], dim=-1)

        # Fallback: unconstrained regression for other output dims.
        return features


class LengthPredictionHead(nn.Module):
    """Regress sequence length (e.g., swipable character count) from CLS embedding."""

    def __init__(self, d_model: int):
        super().__init__()
        self.dense = nn.Linear(d_model, d_model)
        self.activation = nn.GELU()
        self.norm = nn.LayerNorm(d_model)
        self.regressor = nn.Linear(d_model, 1)  # predict expected length directly

    def forward(self, cls_features: torch.Tensor) -> torch.Tensor:
        """
        Args:
            cls_features: [batch, d_model] CLS embeddings

        Returns:
            [batch, 1] predicted length
        """
        x = self.dense(cls_features)
        x = self.activation(x)
        x = self.norm(x)
        return self.regressor(x).squeeze(-1)