python_code stringlengths 0 83.2k |
|---|
from charformer_pytorch.charformer_pytorch import GBST
|
import math
from math import gcd
import functools
import torch
import torch.nn.functional as F
from torch import nn, einsum
from einops import rearrange, reduce, repeat
from einops.layers.torch import Rearrange
# helpers
def exists(val):
return val is not None
def lcm(*numbers):
return int(functools.reduce(... |
"""
Bonito Aligner
"""
from threading import Thread
from functools import partial
from mappy import Aligner, ThreadBuffer
from bonito.multiprocessing import ThreadMap, ProcessMap
def align_map(aligner, sequences, n_thread=4):
"""
Align `sequences` with minimap using `n_thread` threads.
"""
return Th... |
"""
Bonito Fast5 Utils
"""
import sys
from glob import glob
from pathlib import Path
from functools import partial
from multiprocessing import Pool
from itertools import chain, starmap
import torch
import numpy as np
from scipy.signal import find_peaks
from ont_fast5_api.fast5_interface import get_fast5_file
class ... |
"""
Bonito utils
"""
import os
import re
import sys
import random
from glob import glob
from itertools import groupby
from operator import itemgetter
from importlib import import_module
from collections import deque, defaultdict, OrderedDict
import toml
import torch
import parasail
import numpy as np
from torch.cuda ... |
"""
Bonito nn modules.
"""
import torch
from torch import nn
from torch.nn import Module
from torch.nn.init import orthogonal_
layers = {}
def register(layer):
layer.name = layer.__name__.lower()
layers[layer.name] = layer
return layer
register(torch.nn.ReLU)
register(torch.nn.Tanh)
@register
class... |
"""
Bonito Input/Output
"""
import os
import sys
import csv
import pandas as pd
from warnings import warn
from threading import Thread
from logging import getLogger
from contextlib import contextmanager
from os.path import realpath, splitext, dirname
import numpy as np
from mappy import revcomp
import bonito
from bo... |
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
from bonito.cli import basecaller, train, evaluate, view, convert, download, export, duplex
modules = [
'basecaller', 'train', 'evaluate', 'view', 'convert', 'download', 'export', 'duplex',
]
__version__ = '0.4.0'
def main():
parser = Argume... |
"""
Bonito Multiprocesing
"""
import queue
from itertools import count
from threading import Thread
from functools import partial
from collections import deque
from signal import signal, SIGINT
from multiprocessing import Process, Queue, Event, Lock, cpu_count
def process_iter(iterator, maxsize=1):
"""
Take ... |
"""
Bonito train
"""
import os
import re
from glob import glob
from functools import partial
from time import perf_counter
from collections import OrderedDict
from datetime import datetime
from bonito.util import accuracy, decode_ref, permute, concat, match_names
import bonito
import torch
import numpy as np
import ... |
"""
Bonito Download
"""
import os
import re
from shutil import rmtree
from zipfile import ZipFile
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from bonito.util import __data__, __models__
from bonito.cli.convert import main as convert
from bonito.cli.convert import argparser as cargparser
impor... |
#!/usr/bin/env python
"""
Convert a Taiyaki chunkify training file to set of Bonito CTC .npy files
"""
import os
import h5py
import random
import numpy as np
from argparse import ArgumentParser
from collections import OrderedDict
from itertools import islice as take
from argparse import ArgumentDefaultsHelpFormatter
... |
"""
Bonito Export
"""
import os
import re
import sys
import json
import torch
import bonito
import hashlib
import numpy as np
from glob import glob
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer... |
"""
Bonito model viewer - display a model architecture for a given config.
"""
import toml
import argparse
from bonito.util import load_symbol
def main(args):
config = toml.load(args.config)
Model = load_symbol(config, "Model")
model = Model(config)
print(model)
print("Total parameters in model",... |
"""
Bonito Basecaller
"""
import sys
import torch
import numpy as np
from tqdm import tqdm
from time import perf_counter
from datetime import timedelta
from itertools import islice as take
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from bonito.aligner import Aligner
from bonito.io import CTCWr... |
"""
Bonito Duplex consensus decoding.
https://www.biorxiv.org/content/10.1101/2020.02.25.956771v1
"""
import os
import sys
import json
from glob import glob
from pathlib import Path
from os.path import basename
from functools import partial
from time import perf_counter
from datetime import timedelta
from multiproces... |
#!/usr/bin/env python3
"""
Bonito training.
"""
import os
from argparse import ArgumentParser
from argparse import ArgumentDefaultsHelpFormatter
from bonito.util import __models__, default_config, default_data
from bonito.util import load_data, load_model, load_symbol, init, half_supported
from bonito.training impor... |
"""
Bonito model evaluator
"""
import os
import time
import torch
import numpy as np
from itertools import starmap
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from bonito.training import ChunkDataSet
from bonito.util import accuracy, poa, decode_ref, half_supported
from bonito.util import init,... |
from .model import Model
from .basecall import basecall
|
"""
Bonito CTC-CRF Model.
"""
import torch
import numpy as np
from bonito.nn import Module, Convolution, SHABlock, LinearCRFEncoder, Serial, Permute, layers, from_dict
import seqdist.sparse
from seqdist.ctc_simple import logZ_cupy, viterbi_alignments
from seqdist.core import SequenceDist, Max, Log, semiring
def get... |
"""
Bonito CRF basecall
"""
import torch
import numpy as np
from kbeam import beamsearch
from itertools import groupby
from functools import partial
from operator import itemgetter
import bonito
from bonito.io import Writer
from bonito.fast5 import get_reads
from bonito.aligner import align_map
from bonito.multiproce... |
from .model import Model
from .basecall import basecall
|
"""
Bonito Model template
"""
import numpy as np
from bonito.nn import Permute, layers
import torch
from torch.nn.functional import log_softmax, ctc_loss
from torch.nn import Module, ModuleList, Sequential, Conv1d, BatchNorm1d, Dropout
from fast_ctc_decode import beam_search, viterbi_search
class Model(Module):
... |
"""
Bonito basecall
"""
import torch
import numpy as np
from functools import partial
from bonito.fast5 import ReadChunk
from bonito.aligner import align_map
from bonito.multiprocessing import process_map, thread_map
from bonito.util import mean_qscore_from_qstring, half_supported
from bonito.util import chunk, stitch... |
from bs_roformer.bs_roformer import BSRoformer
|
from functools import wraps
from packaging import version
from collections import namedtuple
import torch
from torch import nn, einsum
import torch.nn.functional as F
from einops import rearrange, reduce
# constants
FlashAttentionConfig = namedtuple('FlashAttentionConfig', ['enable_flash', 'enable_math', 'enable_me... |
import torch
from torch import nn, einsum, Tensor
from torch.nn import Module, ModuleList
import torch.nn.functional as F
from bs_roformer.attend import Attend
from beartype.typing import Tuple, Optional, List
from beartype import beartype
from rotary_embedding_torch import RotaryEmbedding
from einops import rearra... |
import random
import torch
import torch.linalg
import numpy as np
class BlackHole(object):
def __setattr__(self, name, value):
pass
def __call__(self, *args, **kwargs):
return self
def __getattr__(self, name):
return self
def seed_all(seed):
torch.backends.cudnn.determinist... |
import warnings
import torch
from Bio import BiopythonWarning
from Bio.PDB import Selection
from Bio.PDB.PDBParser import PDBParser
from Bio.PDB.Polypeptide import three_to_one, three_to_index, is_aa
NON_STANDARD_SUBSTITUTIONS = {
'2AS':'ASP', '3AH':'HIS', '5HP':'GLU', 'ACL':'ARG', 'AGM':'ARG', 'AIB':'ALA', 'ALM'... |
import math
import torch
from torch.utils.data._utils.collate import default_collate
from .protein import ATOM_CA, parse_pdb
class PaddingCollate(object):
def __init__(self, length_ref_key='mutation_mask', pad_values={'aa': 20, 'pos14': float('999'), 'icode': ' ', 'chain_id': '-'}, donot_pad={'foldx'}, eight=Fa... |
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.residue import PerResidueEncoder
from models.attention import GAEncoder
from models.common import get_pos_CB, construct_3d_basis
from utils.protein import ATOM_N, ATOM_CA, ATOM_C
class ComplexEncoder(nn.Module):
def __init__(self, cf... |
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from .common import mask_zero, global_to_local, local_to_global, normalize_vector
def _alpha_from_logits(logits, mask, inf=1e5):
"""
Args:
logits: Logit matrices, (N, L_i, L_j, num_heads).
mask: Masks, (N,... |
import torch
import torch.nn as nn
from models.common import PositionalEncoding, construct_3d_basis, global_to_local
class PerResidueEncoder(nn.Module):
def __init__(self, feat_dim):
super().__init__()
self.aatype_embed = nn.Embedding(21, feat_dim)
self.torsion_embed = PositionalEncoding... |
import torch
import torch.nn as nn
from utils.protein import ATOM_CA, ATOM_CB
def get_pos_CB(pos14, atom_mask):
"""
Args:
pos14: (N, L, 14, 3)
atom_mask: (N, L, 14)
"""
N, L = pos14.shape[:2]
mask_CB = atom_mask[:, :, ATOM_CB] # (N, L)
mask_CB = mask_CB[:, :, None].expand(N... |
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
import argparse
import torch
from models.predictor import DDGPredictor
from utils.misc import *
from utils.data import *
from utils.protein import *
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argume... |
from aoa_pytorch.aoa_pytorch import AttentionOnAttention
AoA = AttentionOnAttention
|
import torch
from torch import nn, einsum
import torch.nn.functional as F
from einops import rearrange
def exists(val):
return val is not None
def default(val, d):
return val if exists(val) else d
class AttentionOnAttention(nn.Module):
def __init__(
self,
*,
dim,
dim_head... |
from adjacent_attention_network.adjacent_attention_network import AdjacentAttentionNetwork
|
import torch
import torch.nn.functional as F
from torch import nn, einsum
from einops import rearrange, repeat
from isab_pytorch import ISAB
# helpers
def exists(val):
return val is not None
def batched_index_select(values, indices):
last_dim = values.shape[-1]
return values.gather(1, indices[:, :, None... |
import torch
import os
import logging
from transformers import AutoTokenizer, AutoModelForMaskedLM, logging
from tf_bind_transformer.cache_utils import cache_fn, run_once
logging.set_verbosity_error()
def exists(val):
return val is not None
def map_values(fn, dictionary):
return {k: fn(v) for k, v in diction... |
from chroma_pytorch.chroma_pytorch import Chroma
|
End of preview. Expand in Data Studio
README.md exists but content is empty.
- Downloads last month
- 28