code stringlengths 281 23.7M |
|---|
def filter_framework_files(files: List[Union[(str, os.PathLike)]], frameworks: Optional[List[str]]=None) -> List[Union[(str, os.PathLike)]]:
if (frameworks is None):
frameworks = get_default_frameworks()
framework_to_file = {}
others = []
for f in files:
parts = Path(f).name.split('_')
... |
def precondition(x_samp, y_samp, x_bl, y_bl, y_data, Rinv, baseline_as_mean=False, **kwargs):
(nsamples, nx, ny, x_bl, y_bl, y_data, delta_x, delta_y, innovation) = _preproc(x_samp, y_samp, x_bl, y_bl, y_data, baseline_as_mean)
Cxy = (((1.0 / nsamples) * delta_x) delta_y.T)
return ((Cxy Rinv) innovation) |
class PyCoreInProjectsTest(unittest.TestCase):
def setUp(self):
super().setUp()
self.project = testutils.sample_project()
self.pycore = self.project.pycore
samplemod = testutils.create_module(self.project, 'samplemod')
code = dedent(' class SampleClass(object):\n ... |
class ListChoiceDialog(Gtk.Dialog):
def __init__(self, parent, rows):
Gtk.Dialog.__init__(self, title=_('Completion'), transient_for=parent, flags=0)
self.set_default_size(400, 250)
listbox = Gtk.ListBox()
listbox.set_selection_mode(Gtk.SelectionMode.SINGLE)
for (i, (name, de... |
def make_billing_address(wallet, num, addr_type):
(long_id, short_id) = wallet.get_user_id()
xpub = make_xpub(get_billing_xpub(), long_id)
usernode = BIP32Node.from_xkey(xpub)
child_node = usernode.subkey_at_public_derivation([num])
pubkey = child_node.eckey.get_public_key_bytes(compressed=True)
... |
def test_due_date_enforcement(monkeypatch):
class _MyDeprecation(SetuptoolsDeprecationWarning):
_SUMMARY = 'Summary'
_DETAILS = 'Lorem ipsum'
_DUE_DATE = (2000, 11, 22)
_SEE_DOCS = 'some_page.html'
monkeypatch.setenv('SETUPTOOLS_ENFORCE_DEPRECATION', 'true')
with pytest.raise... |
def get_running_servers():
temp_list = []
with suppress(Exception):
honeypots = ['QDNSServer', 'QFTPServer', 'QHTTPProxyServer', 'QHTTPServer', 'QHTTPSServer', 'QIMAPServer', 'QMysqlServer', 'QPOP3Server', 'QPostgresServer', 'QRedisServer', 'QSMBServer', 'QSMTPServer', 'QSOCKS5Server', 'QSSHServer', 'QT... |
def read_geojson(fn, cols=[], dtype=None, crs='EPSG:4326'):
if (os.path.getsize(fn) > 0):
return gpd.read_file(fn)
else:
df = gpd.GeoDataFrame(columns=cols, geometry=[], crs=crs)
if isinstance(dtype, dict):
for (k, v) in dtype.items():
df[k] = df[k].astype(v)
... |
_test
def test_sequential_pop():
model = Sequential()
model.add(Dense(num_hidden, input_dim=input_dim))
model.add(Dense(num_class))
model.compile(loss='mse', optimizer='sgd')
x = np.random.random((batch_size, input_dim))
y = np.random.random((batch_size, num_class))
model.fit(x, y, epochs=1)... |
def intword(value: NumberOrString, format: str='%.1f') -> str:
try:
if (not math.isfinite(float(value))):
return _format_not_finite(float(value))
value = int(value)
except (TypeError, ValueError):
return str(value)
if (value < 0):
value *= (- 1)
negative_p... |
def tune_mnist(data_dir, num_samples=10, num_epochs=10, num_workers=1, use_gpu=False):
config = {'layer_1': tune.choice([32, 64, 128]), 'layer_2': tune.choice([64, 128, 256]), 'lr': tune.loguniform(0.0001, 0.1), 'batch_size': tune.choice([32, 64, 128])}
metrics = {'loss': 'ptl/val_loss', 'acc': 'ptl/val_accurac... |
def _convert_attn_layers(params):
new_params = {}
processed_attn_layers = []
for (k, v) in params.items():
if ('attn.' in k):
base = k[:(k.rindex('attn.') + 5)]
if (base in processed_attn_layers):
continue
processed_attn_layers.append(base)
... |
class SendVoice():
async def send_voice(self: 'pyrogram.Client', chat_id: Union[(int, str)], voice: Union[(str, BinaryIO)], caption: str='', parse_mode: Optional['enums.ParseMode']=None, caption_entities: List['types.MessageEntity']=None, duration: int=0, disable_notification: bool=None, reply_to_message_id: int=No... |
class Migration(migrations.Migration):
dependencies = [('questions', '0075_data_migration')]
operations = [migrations.AlterModelOptions(name='question', options={'ordering': ('page', 'questionset', 'order'), 'verbose_name': 'Question', 'verbose_name_plural': 'Questions'}), migrations.AlterModelOptions(name='que... |
def fix_Yahoo_returning_live_separate(quotes, interval, tz_exchange):
n = quotes.shape[0]
if (n > 1):
dt1 = quotes.index[(n - 1)]
dt2 = quotes.index[(n - 2)]
if (quotes.index.tz is None):
dt1 = dt1.tz_localize('UTC')
dt2 = dt2.tz_localize('UTC')
dt1 = dt1.... |
def build_generator(latent_size):
cnn = Sequential()
cnn.add(Dense(1024, input_dim=latent_size, activation='relu'))
cnn.add(Dense(((128 * 7) * 7), activation='relu'))
cnn.add(Reshape((128, 7, 7)))
cnn.add(UpSampling2D(size=(2, 2)))
cnn.add(Conv2D(256, 5, padding='same', activation='relu', kernel... |
class GlobalPreModel_NN(nn.Module):
def __init__(self, input_dim, output_dim):
super(GlobalPreModel_NN, self).__init__()
self.dense = nn.Sequential(nn.Linear(input_dim, 600), nn.ReLU(), nn.Linear(600, 300), nn.ReLU(), nn.Linear(300, 100), nn.ReLU(), nn.Linear(100, output_dim))
def forward(self, ... |
def rename_in_file(path, in_list, out_list, is_interactive):
print(('-- %s' % path))
org_text = ''
new_text = None
if os.path.isdir(path):
print(('%s is a directory. You should use the --recursive option.' % path))
sys.exit()
with open(path, 'r') as fil:
org_text = fil.read()... |
def atom_features(atom: Chem.rdchem.Atom, functional_groups: List[int]=None) -> List[Union[(bool, int, float)]]:
features = (((((((onek_encoding_unk((atom.GetAtomicNum() - 1), ATOM_FEATURES['atomic_num']) + onek_encoding_unk(atom.GetTotalDegree(), ATOM_FEATURES['degree'])) + onek_encoding_unk(atom.GetFormalCharge()... |
class TestDatabaseFixtures():
(params=['db', 'transactional_db', 'django_db_reset_sequences', 'django_db_serialized_rollback'])
def all_dbs(self, request: pytest.FixtureRequest) -> None:
if (request.param == 'django_db_reset_sequences'):
request.getfixturevalue('django_db_reset_sequences')
... |
def predict_cli(toml_path):
toml_path = Path(toml_path)
cfg = config.parse.from_toml_path(toml_path)
if (cfg.predict is None):
raise ValueError(f'predict called with a config.toml file that does not have a PREDICT section: {toml_path}')
timenow = datetime.now().strftime('%y%m%d_%H%M%S')
mode... |
def filter_tests_by_category(args, testlist):
answer = list()
if (args.category and testlist):
test_ids = list()
for catg in set(args.category):
if (catg == '+c'):
continue
print('considering category {}'.format(catg))
for tc in testlist:
... |
def require(*requirements, none_on_failure=False):
def inner(f):
(f)
def wrapper(*args, **kwargs):
for req in requirements:
if none_on_failure:
if (not req.is_available):
return None
else:
req... |
class RandomResize(object):
def __init__(self, min_size, max_size=None):
self.min_size = min_size
if (max_size is None):
max_size = min_size
self.max_size = max_size
def __call__(self, image, target):
size = random.randint(self.min_size, self.max_size)
image =... |
def ground_filter_comparative(i_op, qdmr, grounding_out):
assert (qdmr.ops[i_op] in ['filter', 'comparative'])
args = qdmr.args[i_op]
i_arg_distinct = (1 if (qdmr.ops[i_op] == 'filter') else 2)
text_arg = args[i_arg_distinct]
(content_str, has_distinct, tokens_without_sw) = extract_distinct_and_cont... |
class RslLexer(RegexLexer):
name = 'RSL'
url = '
aliases = ['rsl']
filenames = ['*.rsl']
mimetypes = ['text/rsl']
version_added = '2.0'
flags = (re.MULTILINE | re.DOTALL)
tokens = {'root': [(words(('Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs', 'all', 'always', 'any', 'as',... |
def test_emanet_head():
head = EMAHead(in_channels=32, ema_channels=24, channels=16, num_stages=3, num_bases=16, num_classes=19)
for param in head.ema_mid_conv.parameters():
assert (not param.requires_grad)
assert hasattr(head, 'ema_module')
inputs = [torch.randn(1, 32, 45, 45)]
if torch.cud... |
def raise_status(status: Status):
if (status.status_code == StatusCode.CANCELLED):
raise CancelledError(status.status_message)
elif (status.status_code == StatusCode.UNKNOWN):
raise UnknownError(status.status_message)
elif (status.status_code == StatusCode.INVALID_ARGUMENT):
raise In... |
def parse_args():
parser = argparse.ArgumentParser(description='Simple example of a training script.')
parser.add_argument('--model', '-m', default='sd', choices=['sd', 'ldm'], help='Model for the pipeline: Stable Diffusion or Latent Diffusion')
parser.add_argument('--train_data_dir', type=str, required=Tru... |
def sortfunc(option):
if (option[0] == '~'):
return (option[1:] + '~')
if ((option.find('important') > (- 1)) or (option.find('first-party') > (- 1)) or (option.find('strict1p') > (- 1)) or (option.find('third-party') > (- 1)) or (option.find('strict3p') > (- 1))):
return ('0' + option)
if (... |
def BuildDistributions(MinSupport):
VPrint('building distributions')
for Source in Count:
for Target in Count[Source].keys():
if (Count[Source][Target] < MinSupport):
Count[Source][Target] = 0
for Target in Count[Source]:
if (Count[Source][Target] > 0):
... |
class TestMultiheadAttention(unittest.TestCase):
def test_append_prev_key_padding_mask(self):
bsz = 1
src_len = 4
cases = [(None, None, None), (torch.tensor([[1]]).bool(), None, torch.tensor([[0, 0, 0, 1]]).bool()), (None, torch.tensor([[0, 1, 0]]).bool(), torch.tensor([[0, 1, 0, 0]]).bool()... |
class ImplReturn(NamedTuple):
return_value: Value
constraint: AbstractConstraint = NULL_CONSTRAINT
no_return_unless: AbstractConstraint = NULL_CONSTRAINT
def unite_impl_rets(cls, rets: Sequence['ImplReturn']) -> 'ImplReturn':
if (not rets):
return ImplReturn(NO_RETURN_VALUE)
... |
class Conv2dBlock(nn.Module):
def __init__(self, input_dim, output_dim, kernel_size, stride, padding=0, norm='none', activation='relu', pad_type='zero'):
super(Conv2dBlock, self).__init__()
self.use_bias = True
if (pad_type == 'reflect'):
self.pad = nn.ReflectionPad2d(padding)
... |
class MyPluginMixin(_Base):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.register_preloop_hook(self.cmd2_myplugin_preloop_hook)
self.register_postloop_hook(self.cmd2_myplugin_postloop_hook)
self.register_postparsing_hook(self.cmd2_myplugin_postparsing_h... |
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [migrations.CreateModel(name='Answer', fields=[('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', mo... |
def test_channelstate_get_unlock_proof():
number_of_transfers = 100
lock_amounts = cycle([1, 3, 5, 7, 11])
lock_secrets = [make_secret(i) for i in range(number_of_transfers)]
block_number = 1000
locked_amount = 0
settle_timeout = 8
pending_locks = make_empty_pending_locks_state()
locked_... |
def test_cli(monkeypatch, tmpfolder):
fake_content = ' myproj_path\n --name myproj\n --license gpl3\n --no-config\n # --namespace myns\n # ^ test commented options\n '
fake_edit = (tmpfolder / 'pyscaffold.args')
fake_edit.write_text(dedent(fake_content), 'utf-8')
monkeypatch.setatt... |
def test_issue940_metaclass_subclass_property() -> None:
node = builder.extract_node("\n class BaseMeta(type):\n \n def __members__(cls):\n return ['a', 'property']\n class Parent(metaclass=BaseMeta):\n pass\n class Derived(Parent):\n pass\n Derived.__members__\n ... |
def lyapunov_exponent_naive(eq, rtol=0.001, atol=1e-10, n_samples=1000, traj_length=5000, max_walltime=None, **kwargs):
all_ic = sample_initial_conditions(eq, n_samples, traj_length=max(traj_length, n_samples), pts_per_period=15)
pts_per_period = 100
eps = atol
eps_max = rtol
all_lyap = []
all_c... |
class Shortcuts(Gtk.Window):
def __init__(self, app, *args, **kwargs):
super().__init__(*args, **kwargs, title=_('Help'))
self.set_transient_for(app.win)
self.set_modal(True)
self.set_titlebar(Adw.HeaderBar(css_classes=['flat']))
sect_main = Gtk.Box(margin_top=10, margin_star... |
def train(args, train_env, val_envs, aug_env=None, rank=(- 1)):
default_gpu = is_default_gpu(args)
if default_gpu:
with open(os.path.join(args.log_dir, 'training_args.json'), 'w') as outf:
json.dump(vars(args), outf, indent=4)
writer = SummaryWriter(log_dir=args.log_dir)
reco... |
class MetaICLData(object):
def __init__(self, logger=None, tokenizer=None, method='channel', use_demonstrations=True, k=16, max_length=1024, max_length_per_example=256, do_tensorize=False, tensorize_dir=None, n_process=None, n_gpu=None, local_rank=(- 1)):
self.logger = logger
self.tokenizer = tokeni... |
class LogPipe(threading.Thread):
def __init__(self, level):
threading.Thread.__init__(self)
self.daemon = False
self.level = level
(self.fd_read, self.fd_write) = os.pipe()
self.pipe_reader = os.fdopen(self.fd_read)
self.start()
def fileno(self):
return se... |
def test_keep_alive_argument(capture):
n_inst = ConstructorStats.detail_reg_inst()
with capture:
p = m.Parent()
assert (capture == 'Allocating parent.')
with capture:
p.addChild(m.Child())
assert (ConstructorStats.detail_reg_inst() == (n_inst + 1))
assert (capture == '\n ... |
def ConvTBC(in_channels, out_channels, kernel_size, dropout=0.0, **kwargs):
from fairseq.modules import ConvTBC
m = ConvTBC(in_channels, out_channels, kernel_size, **kwargs)
std = math.sqrt(((4 * (1.0 - dropout)) / (m.kernel_size[0] * in_channels)))
m.weight.data.normal_(mean=0, std=std)
m.bias.data... |
def get_config():
config = get_default_configs()
training = config.training
training.sde = 'vesde'
training.continuous = False
sampling = config.sampling
sampling.method = 'pc'
sampling.predictor = 'none'
sampling.corrector = 'ald'
sampling.n_steps_each = 5
sampling.snr = 0.128
... |
def list_sequences():
sequences = pymiere.objects.app.project.sequences
print("Found {} sequences in project '{}'\n".format(len(sequences), pymiere.objects.app.project.name))
for sequence in sequences:
print("Name : '{}'\nPath : '{}'".format(sequence.name, sequence.projectItem.treePath))
pri... |
def check_target_poss_unique(env_instances, env_rand_vecs):
for (env_name, rand_vecs) in env_rand_vecs.items():
if (env_name in set(['hammer-v2', 'sweep-into-v2', 'bin-picking-v2', 'basketball-v2'])):
continue
env = env_instances[env_name]
state_goals = []
for rand_vec in... |
def _add_mask(masks: Dict[(str, torch.Tensor)], word: str, mask: torch.Tensor, simplify80: bool=False) -> Dict[(str, torch.Tensor)]:
if simplify80:
word = COCO80_TO_27.get(word, word)
if (word in masks):
masks[word] = (masks[word.lower()] + mask)
masks[word].clamp_(0, 1)
else:
... |
def test_bronchus_segmentation(bronchus_data):
patient_path = bronchus_data.joinpath('LCTSC-Test-S1-201')
ct_path = next(patient_path.glob('IMAGES/*.nii.gz'))
img = sitk.ReadImage(str(ct_path))
working_dir = tempfile.mkdtemp()
lung_mask = generate_lung_mask(img)
bronchus_mask = generate_airway_m... |
class QtObserverTestBase(ObserverTestBase):
def setup(self):
self.qtcore = pytest.importorskip('{0}.QtCore'.format(self.BINDING_NAME))
def create_observer(self, monitor):
name = self.BINDING_NAME.lower()
mod = __import__('pyudev.{0}'.format(name), None, None, [name])
self.observe... |
class LayerFreeze(HookBase):
def __init__(self, model, freeze_layers, freeze_iters, fc_freeze_iters):
self._logger = logging.getLogger(__name__)
if isinstance(model, DistributedDataParallel):
model = model.module
self.model = model
self.freeze_layers = freeze_layers
... |
def evaluate_batch_e2e(args, rag_model, questions):
with torch.no_grad():
inputs_dict = rag_model.retriever.question_encoder_tokenizer.batch_encode_plus(questions, return_tensors='pt', padding=True, truncation=True)
input_ids = inputs_dict.input_ids.to(args.device)
attention_mask = inputs_di... |
def input(label: str='', type: str=TEXT, *, validate: Callable[([Any], Optional[str])]=None, name: str=None, value: Union[(str, int)]=None, action: Tuple[(str, Callable[([Callable], None)])]=None, onchange: Callable[([Any], None)]=None, placeholder: str=None, required: bool=None, readonly: bool=None, datalist: List[str... |
class Migration(migrations.Migration):
dependencies = [('questions', '0011_path')]
operations = [migrations.AlterField(model_name='questionentity', name='path', field=models.CharField(blank=True, help_text='The path part of the URI of this question/questionset (auto-generated).', max_length=512, null=True, verb... |
class Seq2Seq(object):
def calc_running_avg_loss(self, loss, running_avg_loss, decay=0.99):
if (running_avg_loss == 0):
running_avg_loss = loss
else:
running_avg_loss = ((running_avg_loss * decay) + ((1 - decay) * loss))
running_avg_loss = min(running_avg_loss, 12)
... |
def plot_rolling_beta(returns, benchmark, window1=126, window1_label='', window2=None, window2_label='', title='', hlcolor='red', figsize=(10, 6), grayscale=False, fontname='Arial', lw=1.5, ylabel=True, subtitle=True, savefig=None, show=True):
(colors, _, _) = _get_colors(grayscale)
(fig, ax) = _plt.subplots(fi... |
def openai_exception_handler(request: Request, exc: OpenAIHTTPException):
assert isinstance(exc, OpenAIHTTPException), f'Unable to handle invalid exception {type(exc)}'
if (exc.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR):
message = f'Internal Server Error (Request ID: {request.state.request_id... |
def test_binary_3() -> None:
div3 = Fsm(alphabet={Charclass('0'), Charclass('1'), (~ Charclass('01'))}, states={(- 2), (- 1), 0, 1, 2, 3}, initial=(- 2), finals={(- 1), 0}, map={(- 2): {Charclass('0'): (- 1), Charclass('1'): 1, (~ Charclass('01')): 3}, (- 1): {Charclass('0'): 3, Charclass('1'): 3, (~ Charclass('01'... |
def remap(image, old_values, new_values):
assert (isinstance(image, Image.Image) or isinstance(image, np.ndarray)), 'image must be of type PIL.Image or numpy.ndarray'
assert (type(new_values) is tuple), 'new_values must be of type tuple'
assert (type(old_values) is tuple), 'old_values must be of type tuple'... |
def normalize_text(seed: str) -> str:
seed = unicodedata.normalize('NFKD', seed)
seed = seed.lower()
seed = u''.join([c for c in seed if (not unicodedata.combining(c))])
seed = u' '.join(seed.split())
seed = u''.join([seed[i] for i in range(len(seed)) if (not ((seed[i] in string.whitespace) and is_C... |
.parametrize('pattern, expected_pattern', [('I do some stuff', 'Then I do some stuff'), (re.compile('I do some stuff'), re.compile('Then I do some stuff'))], ids=['Step with Step Pattern', 'Step with Regex'])
def test_registering_step_function_via_then_decorator(pattern, expected_pattern, stepregistry):
(pattern)
... |
class PhaseState():
def __init__(self, *, dataloader: Iterable[Any], max_epochs: Optional[int]=None, max_steps: Optional[int]=None, max_steps_per_epoch: Optional[int]=None, evaluate_every_n_steps: Optional[int]=None, evaluate_every_n_epochs: Optional[int]=None) -> None:
_check_loop_condition('max_epochs', m... |
((sys.version_info < (3, 8)), 'open_code only present since Python 3.8')
class FakeFilePatchedOpenCodeTest(FakeFileOpenTestBase):
def setUp(self):
super(FakeFilePatchedOpenCodeTest, self).setUp()
if self.use_real_fs():
self.open_code = io.open_code
else:
self.filesyst... |
class TaskHelper(ABC):
def __init__(self, wrapper):
self.wrapper = wrapper
self.output = None
def train_step(self, batch: Dict[(str, torch.Tensor)], **kwargs) -> Optional[torch.Tensor]:
pass
def eval_step(self, batch: Dict[(str, torch.Tensor)], **kwargs) -> Optional[torch.Tensor]:
... |
def imply(*args):
assert (len(args) == 2)
(cnd, tp) = args
if isinstance(tp, (tuple, list, set, frozenset)):
tp = list(tp)
assert (len(tp) >= 1)
return (imply(cnd, tp[0]) if (len(tp) == 1) else [imply(cnd, v) for v in tp])
if isinstance(tp, PartialConstraint):
tp = Node.b... |
class RagelLexer(RegexLexer):
name = 'Ragel'
url = '
aliases = ['ragel']
filenames = []
version_added = '1.1'
tokens = {'whitespace': [('\\s+', Whitespace)], 'comments': [('\\#.*$', Comment)], 'keywords': [('(access|action|alphtype)\\b', Keyword), ('(getkey|write|machine|include)\\b', Keyword), ... |
def _run_and_wait(command, error_allowed=False):
outputter = SpinOutputter(('Running command %s' % command))
outputter.start()
output = b''
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
for line in iter(process.stdout.readline, b''):
output += line
... |
def make_parser():
parser = argparse.ArgumentParser('YOLOX Eval')
parser.add_argument('-expn', '--experiment-name', type=str, default=None)
parser.add_argument('-n', '--name', type=str, default=None, help='model name')
parser.add_argument('--dist-backend', default='nccl', type=str, help='distributed bac... |
class WriteMultipleRegistersResponse(ModbusResponse):
function_code = 16
_rtu_frame_size = 8
def __init__(self, address=None, count=None, **kwargs):
super().__init__(**kwargs)
self.address = address
self.count = count
def encode(self):
return struct.pack('>HH', self.addre... |
class Hartmann3(object):
def __init__(self):
self._dim = 3
self._search_domain = numpy.repeat([[0.0, 1.0]], self._dim, axis=0)
self._num_init_pts = 3
self._sample_var = 0.0
self._min_value = (- 3.86278)
self._observations = []
self._num_fidelity = 0
def ev... |
class M2M100Tokenizer(PreTrainedTokenizer):
vocab_files_names = VOCAB_FILES_NAMES
max_model_input_sizes = PRETRAINED_POSITIONAL_EMBEDDINGS_SIZES
pretrained_vocab_files_map = PRETRAINED_VOCAB_FILES_MAP
model_input_names = ['input_ids', 'attention_mask']
prefix_tokens: List[int] = []
suffix_tokens... |
def test_dns_compression_generic_failure(caplog):
packet = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x06domain\x05local\x00\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x05-\x0c\x00\x01\x80\x01\x00\x00\x00\x01\x00\x04\xc0\xa8\xd0\x06'
parsed = r.DNSIncoming(packet, ('1.2.3.4', 5353))
assert ... |
class IndexedDataset(FairseqDataset):
_HDR_MAGIC = b'TNTIDX\x00\x00'
def __init__(self, path, fix_lua_indexing=False):
super().__init__()
self.path = path
self.fix_lua_indexing = fix_lua_indexing
self.data_file = None
self.read_index(path)
def read_index(self, path):
... |
class ShaderSource():
def __init__(self, source: str, source_type: GLenum):
self._lines = source.strip().splitlines()
self._type = source_type
if (not self._lines):
raise ShaderException('Shader source is empty')
self._version = self._find_glsl_version()
if (pygle... |
class OurLoss(torch.nn.modules.loss._Loss):
def __init__(self, device, margin=0.5, sigma=2.0, T=2.0):
super(OurLoss, self).__init__()
self.T = T
self.device = device
self.margin = margin
self.softmax = torch.nn.Softmax(dim=1)
self.sigma = sigma
def forward(self, e... |
def test_add_random_edges():
G = nx.star_graph(10)
edges = list(G.edges())
add_random_edges(G, 0)
assert (edges == list(G.edges()))
add_random_edges(G, 0.5)
assert (G.size() == 15)
assert (set(edges) < set(G.edges()))
with pytest.raises(ValueError):
add_random_edges(G, 1.2) |
def test_tensor_shared_zero():
shared_val = np.array([1.0, 3.0], dtype=np.float32)
res = pytensor.shared(value=shared_val, borrow=True)
assert isinstance(res, TensorSharedVariable)
assert (res.get_value(borrow=True) is shared_val)
res.zero(borrow=True)
new_shared_val = res.get_value(borrow=True)... |
def run_sql_query(config, client, query_func, sql_context, write_func=write_result):
try:
remove_benchmark_files()
config['start_time'] = time.time()
data_dir = config['data_dir']
results = benchmark(query_func, data_dir=data_dir, client=client, c=sql_context, config=config)
... |
class Iv(BinaryScalarOp):
nfunc_spec = ('scipy.special.iv', 2, 1)
def st_impl(v, x):
return scipy.special.iv(v, x)
def impl(self, v, x):
return self.st_impl(v, x)
def grad(self, inputs, grads):
(v, x) = inputs
(gz,) = grads
return [grad_not_implemented(self, 0, v)... |
def find_projects(group_id):
logger.debug(f'find projects {group_id}')
response = requests.get(f'{git_base_url}{gitlab_api_url}/groups/{group_id}/projects', headers={'PRIVATE-TOKEN': git_token, 'Content-Type': 'application/json'})
projects = json.loads(response.text)
logger.debug(f'project count = {len(... |
class ListViewWinFormTestCases32(unittest.TestCase):
path = os.path.join(winforms_folder_32, u'ListView_TestApp.exe')
def setUp(self):
Timings.defaults()
app = Application()
app.start(self.path)
self.dlg = app.ListViewEx
self.ctrl = self.dlg.ListView.find()
def tearDo... |
class DataGenerator():
def __init__(self, args):
self.args = args
self.seprate_ratio = (0.7, 0.2, 0.1)
self.mixture_dir = self.args.task_path
self.mixture_filename = 'mixture.npy'
self.base_dir = os.path.join(self.args.task_path, self.args.task)
self.did_to_dname = {0... |
class AlphaDropout(Layer):
def __init__(self, rate, noise_shape=None, seed=None, **kwargs):
super(AlphaDropout, self).__init__(**kwargs)
self.rate = rate
self.noise_shape = noise_shape
self.seed = seed
self.supports_masking = True
def _get_noise_shape(self, inputs):
... |
class Migration(migrations.Migration):
dependencies = [('digest', '0044_auto__2128')]
operations = [migrations.AlterField(model_name='autoimportresource', name='excl', field=models.TextField(blank=True, default='', help_text='List of exceptions, indicate by ", "', verbose_name='Exceptions'), preserve_default=Fa... |
def test_update_merge_request_approval_rule(project, resp_mr_approval_rules):
approval_rules = project.mergerequests.get(1, lazy=True).approval_rules
ar_1 = approval_rules.list()[0]
ar_1.user_ids = updated_approval_rule_user_ids
ar_1.approvals_required = updated_approval_rule_approvals_required
ar_1... |
class CythonParser(PythonParser):
_extensions = ['pyi', '.pyx', '.pxd']
_keywords = (pythonKeywords | cythonExtraKeywords)
def _identifierState(self, identifier=None):
if (identifier is None):
state = 0
try:
if (self._idsCounter > 0):
state... |
()
def backport_task(commit_hash, branch, *, issue_number, created_by, merged_by, installation_id):
loop = asyncio.get_event_loop()
loop.run_until_complete(backport_task_asyncio(commit_hash, branch, issue_number=issue_number, created_by=created_by, merged_by=merged_by, installation_id=installation_id)) |
class XORModel(LightningModule):
def __init__(self, input_dim=2, output_dim=1):
super(XORModel, self).__init__()
self.save_hyperparameters()
self.lin1 = torch.nn.Linear(input_dim, 8)
self.lin2 = torch.nn.Linear(8, output_dim)
def forward(self, features):
x = features.floa... |
def test_skip_using_reason_works_ok(pytester: Pytester) -> None:
p = pytester.makepyfile('\n import pytest\n\n def test_skipping_reason():\n pytest.skip(reason="skippedreason")\n ')
result = pytester.runpytest(p)
result.stdout.no_fnmatch_line('*PytestDeprecationWarning*')
... |
class RedirectModel(keras.callbacks.Callback):
def __init__(self, callback, model):
super(RedirectModel, self).__init__()
self.callback = callback
self.redirect_model = model
def on_epoch_begin(self, epoch, logs=None):
self.callback.on_epoch_begin(epoch, logs=logs)
def on_epo... |
.slow
def test_pinnacle_cli_output(data):
output_path = tempfile.mkdtemp()
for pinn_dir in data.joinpath('Pt1').joinpath('Pinnacle').iterdir():
command = (([str(pmp_test_utils.get_executable_even_when_embedded()), '-m'] + 'pymedphys pinnacle export'.split()) + ['-o', output_path, '-m', 'CT', '-m', 'RTST... |
def FRSKD(net, inputs, targets, criterion_cls, criterion_div):
loss_div = torch.tensor(0.0).cuda()
loss_cls = torch.tensor(0.0).cuda()
(logit, features, bi_feats, bi_logits) = net(inputs)
loss_cls += criterion_cls(logit, targets)
loss_cls += criterion_cls(bi_logits, targets)
loss_div += (2 * cri... |
def _create_segmentations_and_labels(video_data: List[VideoDatum], all_video_outputs: List[VideoOutputs]):
labels_are_valid = [video_datum.valid_labels(Task.SEGMENTATION) for video_datum in video_data]
valid_segmentations = [video_outputs[OUTPUT_SEGMENTATION] for (video_outputs, valid) in zip(all_video_outputs,... |
class ModbusSerialClient(ModbusBaseSyncClient):
state = ModbusTransactionState.IDLE
inter_char_timeout: float = 0
silent_interval: float = 0
def __init__(self, port: str, framer: Framer=Framer.RTU, baudrate: int=19200, bytesize: int=8, parity: str='N', stopbits: int=1, **kwargs: Any) -> None:
su... |
class CommonLispLexer(RegexLexer):
name = 'Common Lisp'
url = '
aliases = ['common-lisp', 'cl', 'lisp']
filenames = ['*.cl', '*.lisp']
mimetypes = ['text/x-common-lisp']
version_added = '0.9'
flags = (re.IGNORECASE | re.MULTILINE)
nonmacro = '\\\\.|[\\w!$%&*+-/<=>?\\[\\]^{}~]'
consti... |
def test_cov_min_float_value(testdir):
script = testdir.makepyfile(SCRIPT)
result = testdir.runpytest('-v', f'--cov={script.dirpath()}', '--cov-report=term-missing', '--cov-fail-under=88.88', script)
assert (result.ret == 0)
result.stdout.fnmatch_lines(['Required test coverage of 88.88% reached. Total c... |
def make_concept(res) -> Optional[MathConcept]:
if (not res.get('arity').isdigit()):
flash('Arity must be non-negative integer.')
return None
else:
arity = int(res.get('arity'))
description = res.get('description')
if (len(description) == 0):
flash('Description must be fi... |
def _ask_user_to_verify(description):
failure_description = None
print()
print(description)
while True:
response = input('Passed [Yn]: ')
if (not response):
break
elif (response in 'Nn'):
failure_description = input('Enter failure description: ')
... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.