Search is not available for this dataset
repo
stringlengths
2
152
file
stringlengths
15
239
code
stringlengths
0
58.4M
file_length
int64
0
58.4M
avg_line_length
float64
0
1.81M
max_line_length
int64
0
12.7M
extension_type
stringclasses
364 values
null
DA-Transformer-main/fairseq/models/roberta/hub_interface.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from fairseq import utils from fairseq.data import encod...
8,857
36.533898
114
py
null
DA-Transformer-main/fairseq/models/roberta/model.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ RoBERTa: A Robustly Optimized BERT Pretraining Approach. """ import logging import torch import torch.nn as nn import torch.nn.functional...
26,608
36.958631
117
py
null
DA-Transformer-main/fairseq/models/roberta/model_camembert.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ CamemBERT: a Tasty French Language Model """ from fairseq.models import register_model from .hub_interface import RobertaHubInterface fro...
1,942
37.098039
127
py
null
DA-Transformer-main/fairseq/models/roberta/model_gottbert.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ GottBERT: a pure German Language Model """ from fairseq.models import register_model from .hub_interface import RobertaHubInterface from ...
1,376
26.54
90
py
null
DA-Transformer-main/fairseq/models/roberta/model_xlmr.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ Unsupervised Cross-lingual Representation Learning at Scale """ from fairseq.models import register_model from .hub_interface import Robe...
1,442
29.702128
92
py
null
DA-Transformer-main/fairseq/models/speech_to_speech/__init__.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from .modules import * # noqa from .s2s_transformer import * # noqa
248
30.125
65
py
null
DA-Transformer-main/fairseq/models/speech_to_speech/modules.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch from torch import nn from fairseq.models import FairseqEncoder from fairseq.models.transformer import Linear class CTCDecoder(...
2,214
35.916667
88
py
null
DA-Transformer-main/fairseq/models/speech_to_speech/s2s_transformer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging from pathlib import Path from typing import Any, Dict, List, Optional import torch from torch import Tensor from fairseq impo...
25,037
34.565341
110
py
null
DA-Transformer-main/fairseq/models/speech_to_text/__init__.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from .berard import * # noqa from .convtransformer import * # noqa from .s2t_transformer import * # noqa from .xm_transformer import * # n...
361
31.909091
65
py
null
DA-Transformer-main/fairseq/models/speech_to_text/berard.py
#!/usr/bin/env python3 from ast import literal_eval from typing import List, Tuple import torch import torch.nn as nn import torch.nn.functional as F from fairseq import checkpoint_utils, utils from fairseq.data.data_utils import lengths_to_padding_mask from fairseq.models import ( FairseqEncoder, FairseqEnco...
23,124
37.097199
88
py
null
DA-Transformer-main/fairseq/models/speech_to_text/convtransformer.py
#!/usr/bin/env python3 import logging import math from typing import Dict, List, Optional, Tuple import torch import torch.nn as nn import torch.nn.functional as F from fairseq import checkpoint_utils, utils from fairseq.data.data_utils import lengths_to_padding_mask from fairseq.models import ( FairseqEncoder, ...
16,567
35.899777
101
py
null
DA-Transformer-main/fairseq/models/speech_to_text/hub_interface.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from argparse import Namespace import logging from typing import Union, Tuple, Optional import torch import torch.nn as nn import torch.nn.fu...
4,716
36.141732
83
py
null
DA-Transformer-main/fairseq/models/speech_to_text/s2t_conformer.py
import logging import torch from fairseq.models.speech_to_text.s2t_transformer import ( S2TTransformerEncoder, S2TTransformerModel, Conv1dSubsampler, base_architecture as transformer_base_architecture, ) from fairseq.data.data_utils import lengths_to_padding_mask from fairseq.modules.conformer_layer imp...
6,492
39.58125
99
py
null
DA-Transformer-main/fairseq/models/speech_to_text/s2t_transformer.py
#!/usr/bin/env python3 import logging import math from pathlib import Path from typing import Dict, List, Optional, Tuple import torch import torch.nn as nn from torch import Tensor from fairseq import checkpoint_utils, utils from fairseq.data.data_utils import lengths_to_padding_mask from fairseq.models import ( ...
20,008
35.579525
87
py
null
DA-Transformer-main/fairseq/models/speech_to_text/utils.py
# Copyright (c) 2017-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the license found in the LICENSE file in # the root directory of this source tree. An additional grant of patent rights # can be found in the PATENTS file in the same directory. import logging from collections.a...
18,585
31.953901
88
py
null
DA-Transformer-main/fairseq/models/speech_to_text/xm_transformer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import copy import logging from typing import Dict, List, Optional, Tuple import numpy as np import torch import torch.nn as nn from torch im...
24,443
34.684672
88
py
null
DA-Transformer-main/fairseq/models/speech_to_text/modules/augmented_memory_attention.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import Tuple, List import torch import torch.nn.functional as F from fairseq.models import FairseqEncoder from fairseq.models.spe...
16,097
31.920245
88
py
null
DA-Transformer-main/fairseq/models/speech_to_text/modules/emformer.py
#!/usr/bin/env python3 # Copyright (c) 2017-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the license found in the LICENSE file in # the root directory of this source tree. An additional grant of patent rights # can be found in the PATENTS file in the same directory. import mat...
68,457
36.104607
97
py
null
DA-Transformer-main/fairseq/models/text_to_speech/__init__.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from .tacotron2 import * # noqa from .tts_transformer import * # noqa from .fastspeech2 import * # noqa
285
30.777778
65
py
null
DA-Transformer-main/fairseq/models/text_to_speech/codehifigan.py
from argparse import Namespace import torch import torch.nn as nn from fairseq.models.text_to_speech.fastspeech2 import VariancePredictor from fairseq.models.text_to_speech.hifigan import Generator class CodeGenerator(Generator): def __init__(self, cfg): super().__init__(cfg) self.dict = nn.Embed...
3,637
36.895833
92
py
null
DA-Transformer-main/fairseq/models/text_to_speech/fastspeech2.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging import torch from torch import nn from fairseq import utils from fairseq.data.data_utils import lengths_to_padding_mask from ...
15,718
34.008909
87
py
null
DA-Transformer-main/fairseq/models/text_to_speech/hifigan.py
import torch import torch.nn as nn import torch.nn.functional as F from torch.nn import Conv1d, ConvTranspose1d from torch.nn.utils import remove_weight_norm, weight_norm LRELU_SLOPE = 0.1 def init_weights(m, mean=0.0, std=0.01): classname = m.__class__.__name__ if classname.find("Conv") != -1: m.wei...
5,648
30.383333
76
py
null
DA-Transformer-main/fairseq/models/text_to_speech/hub_interface.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging import random from pathlib import Path from typing import Dict, Optional, Tuple import torch import torch.nn as nn logger = l...
4,678
32.421429
88
py
null
DA-Transformer-main/fairseq/models/text_to_speech/tacotron2.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging import torch from torch import nn from torch.nn import functional as F from fairseq.models import ( FairseqEncoder, F...
15,041
38.480315
87
py
null
DA-Transformer-main/fairseq/models/text_to_speech/tts_transformer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging from typing import List, Optional import torch from torch import nn from fairseq import utils from fairseq.data.data_utils im...
16,971
36.301099
104
py
null
DA-Transformer-main/fairseq/models/text_to_speech/vocoder.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging import json from typing import Dict import numpy as np import torch from torch import nn import torch.nn.functional as F from...
9,046
33.796154
88
py
null
DA-Transformer-main/fairseq/models/transformer/__init__.py
# Copyright (c) Facebook Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """isort:skip_file""" from .transformer_config import ( TransformerConfig, DEFAULT_MAX_SOURCE_POSITIONS, DEFAULT_MAX_TARGET_POSITIO...
1,488
28.196078
83
py
null
DA-Transformer-main/fairseq/models/transformer/transformer_base.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import Dict, List, Optional, Tuple import torch import torch.nn as nn from torch import Tensor from fairseq import utils from fa...
6,757
36.337017
102
py
null
DA-Transformer-main/fairseq/models/transformer/transformer_config.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import re from dataclasses import dataclass, field, fields from typing import List, Optional from omegaconf import II from fairseq import u...
13,154
39.229358
175
py
null
DA-Transformer-main/fairseq/models/transformer/transformer_decoder.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import math from typing import Any, Dict, List, Optional import torch import torch.nn as nn from torch import Tensor from fairseq import uti...
17,874
35.931818
104
py
null
DA-Transformer-main/fairseq/models/transformer/transformer_encoder.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import math from typing import Dict, List, Optional import torch import torch.nn as nn from fairseq import utils from fairseq.distributed imp...
13,864
37.407202
88
py
null
DA-Transformer-main/fairseq/models/transformer/transformer_legacy.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from fairseq.dataclass.utils import gen_parser_from_dataclass from fairseq.models import ( register_model, register_model_architecture...
13,586
48.228261
159
py
null
DA-Transformer-main/fairseq/models/wav2vec/__init__.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from .wav2vec import * # noqa from .wav2vec2 import * # noqa from .wav2vec2_asr import * # noqa
277
29.888889
65
py
null
DA-Transformer-main/fairseq/models/wav2vec/utils.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import math import torch.nn.functional as F def pad_to_multiple(x, multiple, dim=-1, value=0): # Inspired from https://github.com/lucidr...
680
29.954545
116
py
null
DA-Transformer-main/fairseq/models/wav2vec/wav2vec.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from dataclasses import dataclass, field import logging import math from typing import Optional, Tuple from omegaconf import II import sys im...
20,928
32.167987
179
py
null
DA-Transformer-main/fairseq/models/wav2vec/wav2vec2.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import math from dataclasses import dataclass, field from typing import List, Tuple import numpy as np import torch import torch.nn as nn imp...
43,514
32.863813
120
py
null
DA-Transformer-main/fairseq/models/wav2vec/wav2vec2_asr.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import contextlib import copy import logging import math import re from argparse import Namespace from dataclasses import dataclass, field fro...
26,724
34.680908
96
py
null
DA-Transformer-main/fairseq/modules/__init__.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """isort:skip_file""" from .adaptive_input import AdaptiveInput from .adaptive_softmax import AdaptiveSoftmax from .base_layer import BaseLaye...
3,602
33.314286
79
py
null
DA-Transformer-main/fairseq/modules/adaptive_input.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import List import torch from torch import nn from fairseq.modules.quant_noise import quant_noise class AdaptiveInput(nn.Modu...
2,565
30.292683
82
py
null
DA-Transformer-main/fairseq/modules/adaptive_softmax.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import functools import operator import torch import torch.nn.functional as F from fairseq.modules.fairseq_dropout import FairseqDropout from...
8,789
31.67658
85
py
null
DA-Transformer-main/fairseq/modules/base_layer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch.nn as nn import torch import sys from fairseq import utils from fairseq.distributed import utils as distributed_utils from fairse...
6,693
38.146199
118
py
null
DA-Transformer-main/fairseq/modules/beamable_mm.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch import torch.nn as nn class BeamableMM(nn.Module): """This module provides an optimized MM for beam decoding with attention...
1,763
34.28
80
py
null
DA-Transformer-main/fairseq/modules/character_token_embedder.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging from typing import List, Tuple import torch import torch.nn.functional as F from fairseq.data import Dictionary from torch imp...
6,974
31.44186
97
py
null
DA-Transformer-main/fairseq/modules/checkpoint_activations.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import functools from typing import Any, Dict, List, Tuple, Union import torch import torch.utils.checkpoint as checkpoint from fairseq impor...
8,825
35.320988
98
py
null
DA-Transformer-main/fairseq/modules/conformer_layer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch from typing import Optional from fairseq.modules import ( LayerNorm, MultiheadAttention, ESPNETMultiHeadedAttention,...
9,130
29.436667
131
py
null
DA-Transformer-main/fairseq/modules/conv_tbc.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch from torch import nn from torch.nn.modules.utils import _single from torch import Tensor class ConvTBC(torch.nn.Module): ""...
1,683
30.185185
80
py
null
DA-Transformer-main/fairseq/modules/cross_entropy.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging import torch import torch.nn.functional as F logger = logging.getLogger(__name__) def _cross_entropy_pytorch(logits, target...
1,871
30.2
82
py
null
DA-Transformer-main/fairseq/modules/downsampled_multihead_attention.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # import math import torch import torch.nn as nn import torch.nn.functional as F from fairseq.modules.fairseq_dropout import FairseqDropout ...
10,672
32.562893
106
py
null
DA-Transformer-main/fairseq/modules/dynamic_convolution.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch import torch.nn as nn import torch.nn.functional as F from fairseq import utils from fairseq.incremental_decoding_utils import wi...
11,802
36.951768
132
py
null
DA-Transformer-main/fairseq/modules/dynamic_crf_layer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ This file is to re-implemented the low-rank and beam approximation of CRF layer Proposed by: Sun, Zhiqing, et al. Fast Structured Decodin...
7,717
39.621053
99
py
null
DA-Transformer-main/fairseq/modules/ema_module.py
#!/usr/bin/env python3 """ Used for EMA tracking a given pytorch module. The user is responsible for calling step() and setting the appropriate decay """ import copy from dataclasses import dataclass, field import logging import torch from fairseq.dataclass import FairseqDataclass @dataclass class EMAModuleConfig...
4,678
31.493056
88
py
null
DA-Transformer-main/fairseq/modules/espnet_multihead_attention.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2019 Shigeki Karita # Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0) """Multi-Head Attention layer definition.""" import math import torch from torch import nn from fairseq.modules.rotary_positional_embedding import ( RotaryPositionalEmbedding...
9,673
36.937255
84
py
null
DA-Transformer-main/fairseq/modules/fairseq_dropout.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging from typing import List, Optional import torch.nn as nn import torch.nn.functional as F logger = logging.getLogger(__name__)...
1,703
31.769231
83
py
null
DA-Transformer-main/fairseq/modules/fp32_batch_norm.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ batch norm done in fp32 (for fp16 training) """ import torch import torch.nn as nn class Fp32BatchNorm(nn.Module): def __init__(self,...
1,320
28.355556
67
py
null
DA-Transformer-main/fairseq/modules/fp32_group_norm.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ Layer norm done in fp32 (for fp16 training) """ import torch.nn as nn import torch.nn.functional as F class Fp32GroupNorm(nn.GroupNorm):...
727
27
69
py
null
DA-Transformer-main/fairseq/modules/fp32_instance_norm.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ Layer norm done in fp32 (for fp16 training) """ import torch.nn as nn import torch.nn.functional as F class Fp32InstanceNorm(nn.Instance...
1,238
33.416667
85
py
null
DA-Transformer-main/fairseq/modules/gelu.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ See "Gaussian Error Linear Units (GELUs)" by Dan Hendrycks and Kevin Gimpel with the corresponding GitHub repo: https://github.com/hendryck...
706
26.192308
87
py
null
DA-Transformer-main/fairseq/modules/grad_multiply.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch class GradMultiply(torch.autograd.Function): @staticmethod def forward(ctx, x, scale): ctx.scale = scale ...
442
22.315789
65
py
null
DA-Transformer-main/fairseq/modules/gumbel_vector_quantizer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch import torch.nn as nn import torch.nn.functional as F class GumbelVectorQuantizer(nn.Module): def __init__( self, ...
6,891
32.784314
117
py
null
DA-Transformer-main/fairseq/modules/kmeans_attention.py
import math from functools import reduce, wraps from inspect import isfunction from operator import mul import torch import torch.nn as nn import torch.nn.functional as F from aml.multimodal_video.utils.einops.lib import rearrange, repeat from aml.multimodal_video.utils.einops.lib.layers.torch import Rearrange from f...
21,840
28.316779
88
py
null
DA-Transformer-main/fairseq/modules/kmeans_vector_quantizer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch import torch.nn as nn from fairseq.modules import Fp32GroupNorm class KmeansVectorQuantizer(nn.Module): def __init__( ...
4,170
31.585938
83
py
null
DA-Transformer-main/fairseq/modules/layer_drop.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ LayerDrop as described in https://arxiv.org/abs/1909.11556. """ import torch import torch.nn as nn class LayerDropModuleList(nn.ModuleLi...
1,409
30.333333
71
py
null
DA-Transformer-main/fairseq/modules/layer_norm.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch import torch.nn as nn import torch.nn.functional as F try: from apex.normalization import FusedLayerNorm as _FusedLayerNorm ...
1,524
30.122449
81
py
null
DA-Transformer-main/fairseq/modules/learned_positional_embedding.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import Dict, Optional import torch import torch.nn as nn import torch.nn.functional as F from fairseq import utils from torch imp...
2,259
35.451613
94
py
null
DA-Transformer-main/fairseq/modules/lightweight_convolution.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch import torch.nn as nn import torch.nn.functional as F from fairseq import utils from fairseq.incremental_decoding_utils import wi...
10,919
34.11254
103
py
null
DA-Transformer-main/fairseq/modules/linearized_convolution.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch import torch.nn.functional as F from fairseq import utils from fairseq.incremental_decoding_utils import with_incremental_state ...
4,744
36.65873
83
py
null
DA-Transformer-main/fairseq/modules/location_attention.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch.nn as nn import torch import torch.nn.functional as F class LocationAttention(nn.Module): """ Attention-Based Models fo...
2,909
33.642857
80
py
null
DA-Transformer-main/fairseq/modules/lstm_cell_with_zoneout.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch.nn as nn class LSTMCellWithZoneOut(nn.Module): """ Zoneout: Regularizing RNNs by Randomly Preserving Hidden Activations...
1,220
31.131579
86
py
null
DA-Transformer-main/fairseq/modules/multihead_attention.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import math from typing import Dict, List, Optional, Tuple import torch import torch.nn.functional as F from torch import Tensor, nn from tor...
24,958
38.059468
104
py
null
DA-Transformer-main/fairseq/modules/positional_embedding.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch.nn as nn from .learned_positional_embedding import LearnedPositionalEmbedding from .sinusoidal_positional_embedding import Sinus...
1,293
34.944444
83
py
null
DA-Transformer-main/fairseq/modules/positional_encoding.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch.nn as nn import math import torch class PositionalEncoding(nn.Module): """Positional encoding. Args: d_model: ...
4,950
37.084615
84
py
null
DA-Transformer-main/fairseq/modules/quant_noise.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch import torch.nn as nn def quant_noise(module, p, block_size): """ Wraps modules and applies quantization noise to the w...
4,005
36.092593
87
py
null
DA-Transformer-main/fairseq/modules/rotary_positional_embedding.py
import torch class RotaryPositionalEmbedding(torch.nn.Module): def __init__(self, dim, base=10000, precision=torch.half): """Rotary positional embedding Reference : https://blog.eleuther.ai/rotary-embeddings/ Paper: https://arxiv.org/pdf/2104.09864.pdf Args: dim: Dimens...
1,851
34.615385
81
py
null
DA-Transformer-main/fairseq/modules/same_pad.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from torch import nn class SamePad(nn.Module): def __init__(self, kernel_size, causal=False): super().__init__() if cau...
552
24.136364
65
py
null
DA-Transformer-main/fairseq/modules/scalar_bias.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # import torch class ScalarBias(torch.autograd.Function): """ Adds a vector of scalars, used in self-attention mechanism to allow ...
888
26.78125
74
py
null
DA-Transformer-main/fairseq/modules/sinusoidal_positional_embedding.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import math from typing import Any, Optional import torch import torch.onnx.operators from fairseq import utils from torch import Tensor, nn ...
3,914
35.933962
87
py
null
DA-Transformer-main/fairseq/modules/sparse_multihead_attention.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import math import torch from .multihead_attention import MultiheadAttention class SparseMultiheadAttention(MultiheadAttention): """Sp...
4,931
33.978723
86
py
null
DA-Transformer-main/fairseq/modules/sparse_transformer_sentence_encoder.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch.nn as nn from fairseq.modules import TransformerSentenceEncoder from fairseq.modules.sparse_transformer_sentence_encoder_layer im...
3,155
31.536082
71
py
null
DA-Transformer-main/fairseq/modules/sparse_transformer_sentence_encoder_layer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from fairseq.modules import TransformerSentenceEncoderLayer from fairseq.modules.sparse_multihead_attention import SparseMultiheadAttention ...
1,563
29.076923
80
py
null
DA-Transformer-main/fairseq/modules/transformer_layer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import Dict, List, Optional import torch import torch.nn as nn from fairseq import utils from fairseq.modules import LayerNorm, M...
21,515
37.352941
88
py
null
DA-Transformer-main/fairseq/modules/transformer_sentence_encoder.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import Optional, Tuple import torch import torch.nn as nn from fairseq.modules import ( FairseqDropout, LayerDropModuleLi...
10,162
33.804795
88
py
null
DA-Transformer-main/fairseq/modules/transformer_sentence_encoder_layer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from typing import Callable, Optional import torch import torch.nn as nn from fairseq import utils from fairseq.modules import LayerNorm, Mul...
4,326
29.907143
84
py
null
DA-Transformer-main/fairseq/modules/transpose_last.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. """ transpose last 2 dimensions of the input """ import torch.nn as nn class TransposeLast(nn.Module): def __init__(self, deconstruct_id...
550
25.238095
65
py
null
DA-Transformer-main/fairseq/modules/unfold.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import torch.nn.functional as F def unfold1d(x, kernel_size, padding_l, pad_value=0): """unfold T x B x C to T x B x C x K""" if ker...
596
28.85
84
py
null
DA-Transformer-main/fairseq/modules/vggblock.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from __future__ import absolute_import, division, print_function, unicode_literals from collections.abc import Iterable from itertools import...
4,057
33.683761
88
py
null
DA-Transformer-main/fairseq/modules/dynamicconv_layer/__init__.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from .dynamicconv_layer import DynamicconvLayer # noqa
234
32.571429
65
py
null
DA-Transformer-main/fairseq/modules/dynamicconv_layer/cuda_function_gen.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. def gen_forward(): kernels = [3, 5, 7, 15, 31, 63, 127, 255] blocks = [32, 64, 128, 256] head = """ /** * Copyright (c) Facebo...
6,866
29.65625
126
py
null
DA-Transformer-main/fairseq/modules/dynamicconv_layer/dynamicconv_cuda.cpp
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ #include <torch/extension.h> #include <vector> std::vector<at::Tensor> dynamicconv_cuda_forward(at::Tensor input, at::Tensor filt...
1,429
26.5
78
cpp
null
DA-Transformer-main/fairseq/modules/dynamicconv_layer/dynamicconv_layer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import dynamicconv_cuda import torch import torch.nn.functional as F from fairseq import utils from fairseq.incremental_decoding_utils import ...
8,922
38.135965
117
py
null
DA-Transformer-main/fairseq/modules/dynamicconv_layer/dynamiconv_cpu.cpp
#include <torch/torch.h> #include <vector> std::vector<float*> dynamicconv_cpu_forward(float* input, float* filters, int padding_l); std::vector<float*> dynamicconv_cpu_backward( float* gradOutput, int padding_l, float* input, float* filters); std::vector<float*> dynamicconv_forward(float* input, flo...
805
25.866667
73
cpp
null
DA-Transformer-main/fairseq/modules/dynamicconv_layer/setup.py
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from setuptools import setup from torch.utils.cpp_extension import BuildExtension, CUDAExtension setup( name="dyn...
602
24.125
67
py
null
DA-Transformer-main/fairseq/modules/lightconv_layer/__init__.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from .lightconv_layer import LightconvLayer # noqa
230
32
65
py
null
DA-Transformer-main/fairseq/modules/lightconv_layer/cuda_function_gen.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. def gen_forward(): kernels = [3, 5, 7, 15, 31, 63, 127, 255] seqs = [32 * x for x in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,...
9,642
32.251724
141
py
null
DA-Transformer-main/fairseq/modules/lightconv_layer/lightconv_cuda.cpp
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ #include <torch/extension.h> #include <vector> std::vector<at::Tensor> lightconv_cuda_forward(at::Tensor input, at::Tensor filter...
1,407
26.076923
76
cpp
null
DA-Transformer-main/fairseq/modules/lightconv_layer/lightconv_layer.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import lightconv_cuda import torch import torch.nn.functional as F from fairseq import utils from fairseq.incremental_decoding_utils import wi...
4,799
33.782609
86
py
null
DA-Transformer-main/fairseq/modules/lightconv_layer/setup.py
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from setuptools import setup from torch.utils.cpp_extension import BuildExtension, CUDAExtension setup( name="lig...
581
23.25
67
py
null
DA-Transformer-main/fairseq/modules/quantization/__init__.py
0
0
0
py
null
DA-Transformer-main/fairseq/modules/quantization/quantization_options.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. def parse_config_yaml(yaml_data): # Initialize to default options. quantization_options = { "n_centroids": { "Lin...
1,647
35.622222
84
py
null
DA-Transformer-main/fairseq/modules/quantization/pq/__init__.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. from .utils import SizeTracker, get_param, attrsetter, quantize_model_ # NOQA
257
35.857143
78
py
null
DA-Transformer-main/fairseq/modules/quantization/pq/em.py
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import logging import os import random from collections import Counter import torch class EM: """ EM algorithm used to quantize the...
7,333
33.59434
92
py