code stringlengths 281 23.7M |
|---|
class ContentType(str, Enum):
CSV = 'text/csv'
JSON = 'application/json'
TSV = 'text/tsv'
PSV = 'text/psv'
PARQUET = 'application/parquet'
ORC = 'application/orc'
FEATHER = 'application/feather'
UNESCAPED_TSV = 'application/x-amzn-unescaped-tsv'
ION = 'application/x-amzn-ion' |
def pretix_questions():
return {'count': 3, 'next': None, 'previous': None, 'results': [{'id': 1, 'question': {'en': 'Vat number', 'it': 'Codice Fiscale'}, 'type': 'S', 'required': True, 'items': [1, 2], 'options': [], 'position': 0, 'ask_during_checkin': False, 'identifier': 'ZZZ', 'dependency_question': None, 'de... |
def count_sparsity(model):
total_num_weights = 0
layer_names = get_all_layer_names(model, (torch.nn.Conv2d, torch.nn.Linear))
for layer_name in layer_names:
module = get_layer_by_name(model, layer_name)
total_num_weights += module.weight.data.numel()
weights = torch.zeros(total_num_weigh... |
def test_fermi_hubbard_3x3_spinless():
hubbard_model = fermi_hubbard(3, 3, 1.0, 4.0, chemical_potential=0.5, spinless=True)
assert (str(hubbard_model).strip() == '\n-0.5 [0^ 0] +\n4.0 [0^ 0 1^ 1] +\n4.0 [0^ 0 3^ 3] +\n-1.0 [0^ 1] +\n-1.0 [0^ 2] +\n-1.0 [0^ 3] +\n-1.0 [0^ 6] +\n-1.0 [1^ 0] +\n-0.5 [1^ 1] +\n4.0 ... |
def __CharLowerBuff(ql: Qiling, address: int, params, wstring: bool):
lpBuffer = params['lpBuffer']
cchLength = params['cchLength']
data = ql.mem.read(lpBuffer, cchLength)
enc = ('utf-16le' if wstring else 'utf-8')
data = data.decode(enc)
data = data.lower()
data = data.encode(enc)
ql.me... |
def deprecated(replacement_description):
def decorate(fn_or_class):
if isinstance(fn_or_class, type):
pass
else:
try:
fn_or_class.__doc__ = ('This API point is obsolete. %s\n\n%s' % (replacement_description, fn_or_class.__doc__))
except AttributeEr... |
class CassandraTests(unittest.TestCase):
def setUp(self):
REQUEST_TIME.clear()
REQUEST_ACTIVE.clear()
REQUEST_TOTAL.clear()
def test_prom__on_execute_complete(self):
result = mock.MagicMock()
span = mock.MagicMock()
event = mock.MagicMock()
start_time = 1.... |
class CachedProxy(object):
def __init__(self, proxy, cacheid):
self.proxy = proxy
self.cacheid = cacheid
def details(self):
return {}
def json(self):
return {'proxyname': 'CachedProxy', 'proxy': self.proxy.json(), 'cacheid': self.cacheid}
def fromJSON(cls, data, deseriali... |
def test_fetch_toml_pass_with_string(fs):
in_path = './tests/testfiles/test.toml'
fs.create_file(in_path, contents='key1 = "value1"\nkey2 = "value2"\nkey3 = "value3"\n')
context = Context({'ok1': 'ov1', 'fetchToml': in_path})
tomlfetcher.run_step(context)
assert context, "context shouldn't be None"
... |
def backtranslate_samples(samples, collate_fn, generate_fn, cuda=True):
collated_samples = collate_fn(samples)
s = (utils.move_to_cuda(collated_samples) if cuda else collated_samples)
generated_sources = generate_fn(s)
id_to_src = {sample['id']: sample['source'] for sample in samples}
return [{'id':... |
(name='tests-ssh')
(name='tests-randomorder')
(name='tests-nocoverage')
def tests(session: nox.Session) -> None:
extras = 'test'
if (session.name == 'tests-ssh'):
extras += ',ssh'
if (session.name == 'tests-randomorder'):
extras += ',test-randomorder'
prof_location = ((pathlib.Path('.') ... |
class GameDetailsTab():
def __init__(self, parent: QtWidgets.QWidget, game: RandovaniaGame):
self.game_enum = game
def widget(self) -> QtWidgets.QWidget:
raise NotImplementedError
def tab_title(self) -> str:
raise NotImplementedError
def update_content(self, configuration: BaseCo... |
class PlyElement(object):
def __init__(self, name, properties, count, comments=[]):
self._name = str(name)
self._check_name()
self._count = count
self._properties = tuple(properties)
self._index()
self.comments = list(comments)
self._have_list = any((isinstanc... |
def test_remove_random_edges():
G = nx.star_graph(10)
edges = list(G.edges())
remove_random_edges(G, 0)
assert (edges == list(G.edges()))
remove_random_edges(G, 0.5)
assert (G.size() == 5)
assert (set(G.edges()) < set(edges))
with pytest.raises(ValueError):
remove_random_edges(G,... |
def rouge_n(evaluated_sentences, reference_sentences, n=2):
if ((len(evaluated_sentences) <= 0) or (len(reference_sentences) <= 0)):
return (0.0, 0.0, 0.0)
(evaluated_ngrams, evaluated_count) = _get_word_ngrams(n, evaluated_sentences)
(reference_ngrams, reference_count) = _get_word_ngrams(n, referen... |
class SetupCallback(Callback):
def __init__(self, resume, now, logdir, ckptdir, cfgdir, config, lightning_config):
super().__init__()
self.resume = resume
self.now = now
self.logdir = logdir
self.ckptdir = ckptdir
self.cfgdir = cfgdir
self.config = config
... |
def test_notebook_input(workspace):
doc_str = "\nprint('hi')\nimport os\ndef f():\n a = 2\n"
doc_uri = uris.from_fs_path(os.path.join(workspace.root_path, 'Untitled.ipynb'))
workspace.put_document(doc_uri, doc_str)
doc = workspace.get_document(doc_uri)
diags = ruff_lint.pylsp_lint(workspace, doc)... |
class EchoesHintDetailsTab(GameDetailsTab):
def __init__(self, parent: QtWidgets.QWidget, game: RandovaniaGame):
super().__init__(parent, game)
self.tree_widget = QtWidgets.QTreeWidget(parent)
def widget(self) -> QtWidgets.QWidget:
return self.tree_widget
def tab_title(self) -> str:
... |
def _makeTags(tagStr, xml, suppress_LT=Suppress('<'), suppress_GT=Suppress('>')):
if isinstance(tagStr, str_type):
resname = tagStr
tagStr = Keyword(tagStr, caseless=(not xml))
else:
resname = tagStr.name
tagAttrName = Word(alphas, (alphanums + '_-:'))
if xml:
tagAttrValu... |
def import_elements(elements, save=True, user=None):
for element in elements:
model = element.get('model')
element.update({'warnings': defaultdict(list), 'errors': [], 'created': False, 'updated': False})
if (model == 'conditions.condition'):
import_condition(element, save, user)... |
def test_specific_location(hatch, helpers, temp_dir_data, dist_name):
install_dir = (((temp_dir_data / 'foo') / 'bar') / 'baz')
helpers.write_distribution(install_dir, dist_name)
compatible_distributions = get_compatible_distributions()
installed_distribution = compatible_distributions.pop(dist_name)
... |
class BilibiliRealUrlExtractor(RealUrlExtractor):
def _extract_real_url(self):
try:
self.real_url = BiliBili(self.room).get_real_url()
except:
self.real_url = 'None'
super()._extract_real_url()
def _is_url_valid(self, url):
return ((url is not None) and (l... |
def test_filereplace_pass_out_encoding_in_to_out(fs):
payload = 'this {k1} X1 is line 1\nthis is line 2 REPLACEME2\nthis is line 3\nthis rm3 RM3 is RM4 line 4\nthis !$% * is rm5 line 5\n'
in_path = '/testreplace.txt'
fs.create_file(in_path, contents=payload, encoding='utf-32')
context = Context({'k1': ... |
def convert_arg(state, arg, typ, size, base):
szdiff = (size - arg.size())
if (szdiff > 0):
if (typ == SINT):
arg = z3.SignExt(szdiff, arg)
else:
arg = z3.ZeroExt(szdiff, arg)
elif (szdiff < 0):
arg = z3.Extract((size - 1), 0, arg)
arg = state.evalcon(arg)... |
def load_one_char(f, data_dict):
first_unit = f.read(4)
if (first_unit == ''):
return False
sample_size = st.unpack('i', first_unit)
c1 = st.unpack('c', f.read(1))[0]
c2 = st.unpack('c', f.read(1))[0]
u = unicode((c1 + c2), 'gbk')
width = st.unpack('H', f.read(2))[0]
height = st.... |
def driver_kwargs(request, test, capabilities, **kwargs):
provider = SauceLabs(request.config.getini('saucelabs_data_center'))
_capabilities = capabilities
if (os.getenv('SAUCELABS_W3C') == 'true'):
_capabilities = capabilities.setdefault('sauce:options', {})
_capabilities.setdefault('username',... |
class SingleLoader(BaseLoader):
def __init__(self, opt):
BaseLoader.__init__(self, opt)
self.dir = opt.dir
self.paths = file_utils.load_paths(self.dir)
self.index = 0
def __len__(self):
return len(self.paths)
def __iter__(self):
return self
def __next__(se... |
def authentication_required(url, authenticator, abort_on):
realm = authenticator.realm()
if realm:
msg = '<b>{}</b> says:<br/>{}'.format(html.escape(url.toDisplayString()), html.escape(realm))
else:
msg = '<b>{}</b> needs authentication'.format(html.escape(url.toDisplayString()))
urlstr ... |
class SpotLightHelper(Line):
def __init__(self, color=None):
self._color = color
positions = [[0, 0, 0], [0, 0, (- 1)], [0, 0, 0], [1, 0, (- 1)], [0, 0, 0], [(- 1), 0, (- 1)], [0, 0, 0], [0, 1, (- 1)], [0, 0, 0], [0, (- 1), (- 1)]]
for i in range(32):
p1 = (((i / 32) * math.pi) *... |
def inphp():
if (system == 'termux'):
os.system((pac + ' update'))
os.system((pac + ' install php -y'))
os.system((pac + ' install php-mysqli -y'))
else:
os.system((pac + ' update'))
os.system((pac + ' install php -y'))
os.system((pac + ' install php5 -y'))
... |
def main(hparams):
results_dir = get_results_directory(hparams.output_dir)
writer = SummaryWriter(log_dir=str(results_dir))
ds = get_dataset(hparams.dataset, root=hparams.data_root)
(input_size, num_classes, train_dataset, test_dataset) = ds
hparams.seed = set_seed(hparams.seed)
if (hparams.n_in... |
def get_grad(optimizer, X_Sk, y_Sk, opfun, ghost_batch=128):
if torch.cuda.is_available():
obj = torch.tensor(0, dtype=torch.float).cuda()
else:
obj = torch.tensor(0, dtype=torch.float)
Sk_size = X_Sk.shape[0]
optimizer.zero_grad()
for idx in np.array_split(np.arange(Sk_size), max(in... |
class BuildPo(Command):
description = 'update and copy .po files to the build dir'
user_options = []
def initialize_options(self):
self.build_base: (str | None) = None
self.po_build_dir: (Path | None) = None
def finalize_options(self):
self.set_undefined_options('build', ('build_... |
def get_chunk(start_byte=None, end_byte=None, full_path=None):
file_size = os.stat(full_path).st_size
if end_byte:
length = ((end_byte + 1) - start_byte)
else:
length = (file_size - start_byte)
with open(full_path, 'rb') as f:
f.seek(start_byte)
chunk = f.read(length)
... |
def vgg_arg_scope(weight_decay=0.0005):
with slim.arg_scope([slim.conv2d, slim.fully_connected], activation_fn=tf.nn.relu, weights_regularizer=slim.l2_regularizer(weight_decay), biases_initializer=tf.zeros_initializer()):
with slim.arg_scope([slim.conv2d], padding='SAME') as arg_sc:
return arg_s... |
class MSEOperator(ops.PixelComparisonOperator):
def image_to_repr(self, image):
return image
def input_image_to_repr(self, image, ctx):
return image
def target_image_to_repr(self, image):
return (image, None)
def calculate_score(self, input_repr, target_repr, ctx):
return... |
.parametrize('masked, secret', [('secret-token', 'secret-token'), (re.compile('ghp_.+?(?=\\s|$)'), ('ghp_' + _random_string(15)))])
.parametrize('use_named_masks', (True, False))
def test_mask_applied(use_named_masks, masked, secret):
masker = MaskingFilter(_use_named_masks=use_named_masks)
masker.add_mask_for(... |
class AccountTerminationQueue(models.Model):
class State(models.TextChoices):
NO_TRACE = ('NT', _('delete account completely'))
LEGACY = ('LE', _('delete account with legacy'))
FROZEN = ('FZ', _('freeze account'))
author = models.OneToOneField(Author, on_delete=models.CASCADE)
state ... |
def monkeypatch_or_replace_safeloras(models, safeloras):
loras = parse_safeloras(safeloras)
for (name, (lora, ranks, target)) in loras.items():
model = getattr(models, name, None)
if (not model):
print(f'No model provided for {name}, contained in Lora')
continue
m... |
def calc_spectral_mismatch_field(sr, e_sun, e_ref=None):
if (e_ref is None):
e_ref = get_am15g(wavelength=e_sun.T.index)
sr_sun = np.interp(e_sun.T.index, sr.index, sr, left=0.0, right=0.0)
sr_ref = np.interp(e_ref.T.index, sr.index, sr, left=0.0, right=0.0)
def integrate(e):
return np.t... |
class Application(StreamHandler):
APP_STATE_PROFILE_SENSE = 'Profile Sensing'
APP_STATE_INIT = 'Initializing'
APP_STATE_FIRMWARE_DOWNLOAD = 'Firmware Download'
APP_STATE_WAITING_FOR_BUTTON = 'Push button'
APP_STATE_CHECKING_UPDATE = 'Cloud'
APP_STATE_PROGRAMMING = 'Programming'
APP_STATE_SUC... |
def test_prepare_inputs_from_poa_arrays_missing_column(sapm_dc_snl_ac_system_Array, location, weather, total_irrad):
mc = ModelChain(sapm_dc_snl_ac_system_Array, location)
poa = pd.concat([weather, total_irrad], axis=1)
with pytest.raises(ValueError, match='Incomplete input data\\. Data needs to contain .*\... |
def test_decode(patches_with_data, default_echoes_configuration):
(encoded, expected) = patches_with_data
game = expected.game
pool = pool_creator.calculate_pool_results(default_echoes_configuration, game)
decoded = game_patches_serializer.decode_single(0, {0: pool}, game, encoded, default_echoes_config... |
def test_failing_command(tmp_path):
project_dir = (tmp_path / 'project')
test_projects.new_c_project().generate(project_dir)
with pytest.raises(subprocess.CalledProcessError):
utils.cibuildwheel_run(project_dir, add_env={'CIBW_BEFORE_ALL': 'false', 'CIBW_BEFORE_ALL_WINDOWS': 'exit /b 1'}) |
def get_group_handler(null_avatar):
(netloc='fakegitlab', path='/api/v4/groups/2$')
def group_handler(_, request):
if (not (request.headers.get('Authorization') == 'Bearer foobar')):
return {'status_code': 401}
return {'status_code': 200, 'headers': {'Content-Type': 'application/json... |
class SentWebAppMessage(Object):
def __init__(self, *, inline_message_id: str):
super().__init__()
self.inline_message_id = inline_message_id
def _parse(obj: 'raw.types.WebViewMessageSent'):
return SentWebAppMessage(inline_message_id=utils.pack_inline_message_id(obj.msg_id)) |
def setup_logger(ql: Qiling, log_file: Optional[str], console: bool, log_override: Optional[Logger], log_plain: bool):
global QL_INSTANCE_ID
if (log_override is not None):
log = log_override
else:
log = logging.getLogger(f'qiling{QL_INSTANCE_ID}')
QL_INSTANCE_ID += 1
log.prop... |
class struct__EFI_HII_AIBT_OVERLAY_IMAGES_BLOCK(ctypes.Structure):
_pack_ = True
_functions_ = []
_fields_ = [('DftImageId', ctypes.c_uint16), ('Width', ctypes.c_uint16), ('Height', ctypes.c_uint16), ('CellCount', ctypes.c_uint16), ('AnimationCell', (struct__EFI_HII_ANIMATION_CELL * 1))] |
def _create_mnv3(model_kwargs, variant, pretrained=False):
features_only = False
model_cls = MobileNetV3
if model_kwargs.pop('features_only', False):
features_only = True
model_kwargs.pop('num_classes', 0)
model_kwargs.pop('num_features', 0)
model_kwargs.pop('head_conv', None... |
class LineComp():
def __init__(self) -> None:
self.stringio = StringIO()
def assert_contains_lines(self, lines2: Sequence[str]) -> None:
__tracebackhide__ = True
val = self.stringio.getvalue()
self.stringio.truncate(0)
self.stringio.seek(0)
lines1 = val.split('\n'... |
class Resolver():
def __init__(self, config: utils.RepositoryConfig, input: CredentialInput) -> None:
self.config = config
self.input = input
def choose(cls, interactive: bool) -> Type['Resolver']:
return (cls if interactive else Private)
_cache()
def username(self) -> Optional[s... |
def errorhook(exc_info=None):
global _error_lock, _errorhook_enabled
if (not _errorhook_enabled):
return
if (exc_info is None):
exc_info = sys.exc_info()
if (exc_info[0] is None):
print_e('no active exception!')
return
print_exc(exc_info)
if (not _error_lock.acqui... |
class TestWordInformationPreserved(unittest.TestCase):
def test_word_information_preserved_with_valid_input(self) -> None:
torch.testing.assert_close(word_information_preserved('hello meta', 'hi metaverse'), torch.tensor(0.0, dtype=torch.float64))
torch.testing.assert_close(word_information_preserve... |
def get_binarized_kneighbors_graph(features, topk, mask=None, device=None):
assert (features.requires_grad is False)
features_norm = features.div(torch.norm(features, p=2, dim=(- 1), keepdim=True))
attention = torch.matmul(features_norm, features_norm.transpose((- 1), (- 2)))
if (mask is not None):
... |
class CaptionMergeAllKeywordDataset(CaptionKeywordProbDataset):
def __init__(self, features: Dict, transforms: Dict, caption: str, vocabulary: str, keyword_prob: str, load_into_mem: bool, keyword_encoder: str, dropout_prob: float):
assert (dropout_prob > 0)
super().__init__(features, transforms, cap... |
_vcs_handler('git', 'pieces_from_vcs')
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
if (not os.path.exists(os.path.join(root, '.git'))):
if verbose:
print(('no .git in %s' % root))
raise NotThisMethod('no .git directory')
GITS = ['git']
if (sys.pla... |
def scientific(value: NumberOrString, precision: int=2) -> str:
exponents = {'0': '0', '1': '1', '2': '2', '3': '3', '4': '4', '5': '5', '6': '6', '7': '7', '8': '8', '9': '9', '-': ''}
try:
value = float(value)
if (not math.isfinite(value)):
return _format_not_finite(value)
exce... |
class SRBlock(dict):
def __init__(self, fid, pointer):
if ((pointer != 0) and (pointer is not None)):
fid.seek(pointer)
(self['id'], reserved, self['length'], self['link_count'], self['sr_sr_next'], self['sr_data'], self['sr_cycle_count'], self['sr_interval'], self['sr_sync_type'], s... |
def extension_index(ext):
exists = True
i = 0
index =
while exists:
tag = wintab.UINT()
exists = lib.WTInfoW((wintab.WTI_EXTENSIONS + i), wintab.EXT_TAG, ctypes.byref(tag))
if (tag.value == ext):
index = i
break
i += 1
if (index != ):
... |
def get_strides_for_split_conv_ops(layer: tf.keras.layers.Layer) -> (Tuple, Tuple):
if (not isinstance(layer, tf.keras.layers.Conv2D)):
logger.error('Only Conv2d op can be split')
raise ValueError('Only Conv2d op can be split')
strides = layer.strides
conv_a_strides = (strides[0], 1)
con... |
class TestFreezeCoreTransformer(QiskitNatureTestCase):
from test.second_q.transformers.test_active_space_transformer import TestActiveSpaceTransformer
assertDriverResult = TestActiveSpaceTransformer.assertDriverResult
assertElectronicEnergy = TestActiveSpaceTransformer.assertElectronicEnergy
((not _opti... |
def get_exe_prefixes(exe_filename):
prefixes = [('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), ('PLATLIB/', ''), ('SCRIPTS/', 'EGG-INFO/scripts/'), ('DATA/lib/site-packages', '')]
z = zipfile.ZipFile(exe_filename)
try:
for info in z.infolist():
name = info.filename
parts... |
def test_startup(terminal):
try:
terminal.cursor().assert_equal((4, 3))
terminal.current_line().assert_startswith('r$>')
terminal.write('\n')
terminal.current_line().assert_startswith('r$>')
terminal.cursor().assert_equal((4, 5))
terminal.write('a')
terminal.s... |
def macaddr_pack(data, bytes=bytes):
colon_parts = data.split(':')
dash_parts = data.split('-')
dot_parts = data.split('.')
if (len(colon_parts) == 6):
mac_parts = colon_parts
elif (len(dash_parts) == 6):
mac_parts = dash_parts
elif (len(colon_parts) == 2):
mac_parts = [c... |
()
def notify_on_ad_image_change(advertisement_id):
ad = Advertisement.objects.filter(id=advertisement_id).first()
if ((not ad) or (not ad.image)):
log.warning("Invalid ad passed to 'notify_on_ad_image_change'")
return
ad_url = generate_absolute_url(ad.get_absolute_url())
message = f'Ad ... |
def parse_number_symbols(data, tree):
number_symbols = data.setdefault('number_symbols', {})
for symbol_system_elem in tree.findall('.//numbers/symbols'):
number_system = symbol_system_elem.get('numberSystem')
if (not number_system):
continue
for symbol_element in symbol_syst... |
def test_get_pype_loader_default():
with patch('pypyr.moduleloader.get_module') as mock_get_module:
mock_get_def = Mock()
mock_get_def.return_value = Mock(spec=dict())
mock_get_module.return_value.get_pipeline_definition = mock_get_def
loader = loadercache.LoaderCache().get_pype_load... |
class RandomScaleCrop(object):
def __init__(self, base_size, crop_size, fill=255):
self.base_size = base_size
self.crop_size = crop_size
self.fill = fill
def __call__(self, sample):
img = sample['image']
mask = sample['label']
short_size = random.randint(int((self... |
.route('/tv/')
def tv() -> None:
response = plugin.client('tv/index').get()
for ch in response['channels']:
li = plugin.list_item(name=ch['title'], iconImage=ch['logos']['s'])
xbmcplugin.addDirectoryItem(plugin.handle, ch['stream'], li, False)
xbmcplugin.endOfDirectory(plugin.handle) |
def format_data(file_path):
results = {'ID': [], 'instruction': [], 'target': []}
with open(file_path, encoding='utf-8') as f:
content = json.load(f)
for sample in content:
try:
results['ID'].append(sample['ID'])
results['instruction'].append(sample['i... |
class PL303QMTP(PLBase):
ch_1: PLChannel = Instrument.ChannelCreator(PLChannel, '1', voltage_range=[0, 30], current_range=[0, 3])
ch_2: PLChannel = Instrument.ChannelCreator(PLChannel, '2', voltage_range=[0, 30], current_range=[0, 3])
ch_3: PLChannel = Instrument.ChannelCreator(PLChannel, '3', voltage_range... |
class L1L2(Regularizer):
def __init__(self, l1=0.0, l2=0.0):
self.l1 = K.cast_to_floatx(l1)
self.l2 = K.cast_to_floatx(l2)
def __call__(self, x):
regularization = 0.0
if self.l1:
regularization += K.sum((self.l1 * K.abs(x)))
if self.l2:
regularizat... |
def main():
parser = argparse.ArgumentParser()
parser.add_argument('hexstring')
args = parser.parse_args()
b = bytes.fromhex(args.hexstring)
(op_type, num_ops, num_addresses, keep_alive) = struct.unpack_from('>BBBB', b, 0)
print(f'> Operation type: {op_type}')
print(f'> Keep Alive: {bool(kee... |
class DebugMROMeta(FinalMeta):
def __new__(mcls, name, bases, clsdict):
try:
return super(DebugMROMeta, mcls).__new__(mcls, name, bases, clsdict)
except TypeError as e:
if ('(MRO)' in str(e)):
msg = debug_mro_failure(name, bases)
raise TypeErro... |
class PerceptualLoss(torch.nn.Module):
def __init__(self, model='net-lin', net='vgg', use_gpu=True):
print('Setting up Perceptual loss...')
self.model = dist_model.DistModel()
self.model.initialize(model=model, net=net, use_gpu=True)
print('...Done')
def forward(self, pred, targe... |
class F20_LogVol(F18_LogVol):
removedKeywords = F18_LogVol.removedKeywords
removedAttrs = F18_LogVol.removedAttrs
conflictingCommands = ['autopart', 'mount']
def _getParser(self):
op = F18_LogVol._getParser(self)
op.add_argument('--thinpool', action='store_true', version=F20, dest='thin_... |
class Environment():
def __init__(self, min_pyrogram_version: str, min_telethon_version: str, min_hydrogram_version: str, client_name: str):
self._REQUIRED_PYROGRAM_VERSION = min_pyrogram_version
self._REQUIRED_TELETHON_VERSION = min_telethon_version
self._REQUIRED_HYDROGRAM_VERSION = min_hy... |
def process_qa_para(qa_with_result, k=10000, match='string'):
global PROCESS_DB, PROCESS_TOK
(qa, result) = qa_with_result
matched_paras = {}
for para_id in result['para_id'][:k]:
p = PROCESS_DB.get_doc_text(para_id)
p = normalize(p)
if (match == 'string'):
(covered, ... |
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--data-root', type=str, required=True)
parser.add_argument('--annot-path', type=str, required=True)
parser.add_argument('--in-scale', type=float, default=None)
parser.add_argument('--no-mask', action='store_true', default=Fals... |
_flax
class VisionTextDualEncoderMixin():
def get_vision_text_model(self, config, text_config):
pass
def prepare_config_and_inputs(self):
pass
def get_pretrained_model_and_inputs(self):
pass
def assert_almost_equals(self, a: np.ndarray, b: np.ndarray, tol: float):
diff = ... |
def _fold_given_batch_norms(model, conv_bn_pairs: Iterable[Tuple[(torch.nn.Module, torch.nn.Module)]], bn_conv_pairs: Iterable[Tuple[(torch.nn.Module, torch.nn.Module)]]):
for (bn, conv) in bn_conv_pairs:
if isinstance(conv, QcQuantizeWrapper):
raise RuntimeError(f'Forward folding to scale is no... |
def get_result(auto_var):
file_name = get_file_name(auto_var)
file_format = auto_var.settings['file_format']
file_path = os.path.join(auto_var.settings['result_file_dir'], f'{file_name}.{get_ext(file_format)}')
if (not os.path.exists(file_path)):
return None
try:
if (file_format == '... |
def test_invalid_instantiation_event_payment_received_success():
kwargs = dict(token_network_registry_address=factories.UNIT_TOKEN_NETWORK_REGISTRY_ADDRESS, token_network_address=factories.UNIT_TOKEN_NETWORK_ADDRESS, identifier=factories.UNIT_TRANSFER_IDENTIFIER, initiator=factories.make_address())
with pytest.... |
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_channels, channels, stride=1):
super(BasicBlock, self).__init__()
layers = nn.ModuleList()
conv_layer = []
conv_layer.append(nn.Conv2d(in_channels, channels, kernel_size=3, stride=stride, padding=1, bias=False))
... |
def rmdir(path):
def on_rm_error(func, path, exc_info):
try:
os.chmod(path, stat.S_IWRITE)
except Exception:
pass
try:
if os.path.isdir(path):
return os.rmdir(path)
if os.path.isfile(path):
return os.unlink(path)... |
def merge_resource_options_provider(index: int, item: Tuple[(int, List)], num_hash_groups: int, hash_group_size_bytes: Dict[(int, int)], hash_group_num_rows: Dict[(int, int)], round_completion_info: Optional[RoundCompletionInfo]=None, compacted_delta_manifest: Optional[Manifest]=None, ray_custom_resources: Optional[Dic... |
class customData(Data.Dataset):
def __init__(self, root, transform=None, target_transform=None, loader=default_loader, rotate=0, pad=0):
(classes, class_to_idx) = find_classes(root)
IMG_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif']
imgs = make_dataset(root, class_to_... |
def possible_output_idxs_of_htlc_in_ctx(*, chan: 'Channel', pcp: bytes, subject: 'HTLCOwner', htlc_direction: 'Direction', ctx: Transaction, htlc: 'UpdateAddHtlc') -> Set[int]:
(amount_msat, cltv_expiry, payment_hash) = (htlc.amount_msat, htlc.cltv_expiry, htlc.payment_hash)
for_us = (subject == LOCAL)
(con... |
_rewriter([Scan])
def push_out_add_scan(fgraph, node):
if (not (isinstance(node.op, Scan) and (not node.op.info.as_while))):
return False
op = node.op
args = ScanArgs(node.inputs, node.outputs, op.inner_inputs, op.inner_outputs, op.info)
clients = {}
local_fgraph_topo = io_toposort(args.inne... |
class OverwriteWarning(WarningMessage):
RESPONSE_SAVE = 1
def __init__(self, parent, song):
title = _('Tag may not be accurate')
fn_format = util.bold(fsn2text(song('~basename')))
description = (_('%(file-name)s changed while the program was running. Saving without refreshing your librar... |
_optimizer('adamw')
class AdamW(ClassyOptimizer):
def __init__(self, lr: float=0.001, betas: Tuple[(float, float)]=(0.9, 0.999), eps: float=1e-08, weight_decay: float=0.01, amsgrad: bool=False) -> None:
super().__init__()
self._lr = lr
self._betas = betas
self._eps = eps
self... |
class ChangeShipTacticalMode(ContextMenuUnconditional):
def __init__(self):
self.mainFrame = gui.mainFrame.MainFrame.getInstance()
self.modeMap = {'Defense': _t('Defense'), 'Propulsion': _t('Propulsion'), 'Sharpshooter': _t('Sharpshooter')}
def display(self, callingWindow, srcContext):
i... |
class BaseFairseqModel(nn.Module):
def __init__(self):
super().__init__()
self._is_generation_fast = False
def add_args(parser):
pass
def build_model(cls, args, task):
raise NotImplementedError('Model must implement the build_model method')
def get_targets(self, sample, n... |
class ErrorCode(object):
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
CONFLICT = 409
GONE = 410
PRECONDITION_FAILED = 412
PAYLOAD_TOO_LARGE = 413
IM_A_TEAPOT = 418
MISDIRECTED_REQUEST = 421
UNPROCESSABLE_ENTITY = 422
LOCK... |
def zero_initializer(ref, use_locking=True, name='zero_initializer'):
loader.load_op_library(resource_loader.get_path_to_datafile('_variable_ops.so'))
if resource_variable_ops.is_resource_variable(ref):
return gen_variable_ops.zero_var_initializer(ref.handle, shape=ref.shape, dtype=ref.dtype, name=name)... |
def main(data_dir, client, bc, config):
benchmark(read_tables, data_dir, bc, dask_profile=config['dask_profile'])
query = f'''
SELECT
ss.ss_customer_sk AS cid,
CAST( count(CASE WHEN i.i_class_id=1 THEN 1 ELSE NULL END) AS DOUBLE ) AS id1,
CAST( count(CASE WHEN i.i_class_id=2 ... |
()
def splunk_logs_model_config():
conf = {'LOGS_MODEL': 'splunk', 'LOGS_MODEL_CONFIG': {'producer': 'splunk', 'splunk_config': {'host': FAKE_SPLUNK_HOST, 'port': FAKE_SPLUNK_PORT, 'bearer_token': FAKE_SPLUNK_TOKEN, 'url_scheme': ' 'verify_ssl': True, 'index_prefix': FAKE_INDEX_PREFIX, 'ssl_ca_path': 'fake/cert/pat... |
def all_gather_base_pooled(input: Tensor, group: Optional[dist.ProcessGroup]=None, codecs: Optional[QuantizedCommCodecs]=None) -> Awaitable[Tensor]:
if (group is None):
group = dist.distributed_c10d._get_default_group()
if (dist.get_world_size(group) <= 1):
return NoWait(input)
myreq = Reque... |
def weights_init_normal(m):
classname = m.__class__.__name__
if (classname.find('Conv2') != (- 1)):
torch.nn.init.normal_(m.weight.data, 0.0, 0.02)
elif (classname.find('BatchNorm2d') != (- 1)):
torch.nn.init.normal_(m.weight.data, 1.0, 0.02)
torch.nn.init.constant_(m.bias.data, 0.0) |
class FlatExtractor():
record_boundary_regexp = re.compile(b'(?:\\n|^)(# file: (.*?))\\n')
_record_to_object = None
def __init__(self, fileobj):
self.fileobj = fileobj
self.buf = b''
self.at_end = 0
self.blocksize = (32 * 1024)
def iterate(self):
for record in sel... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.