code stringlengths 281 23.7M |
|---|
def _prepare_yaml_file(filename, obj_type, include_all_score_objs):
if isinstance(filename, dict):
yaml_content = filename
else:
_yaml = init_yaml()
with open(filename, 'r') as yaml_file:
yaml_content = _yaml.load(yaml_file)
yaml_content_eql = _traverse_modify_date(yaml_content)
yaml_eql_events = []
if (obj_type == 'data_sources'):
(yaml_content_eql, _, _, _, _) = load_data_sources(yaml_content, filter_empty_scores=False)
yaml_content_eql = _data_sources_to_events(yaml_content_eql)
for e in yaml_content_eql:
yaml_eql_events.append(eql.Event(obj_type, 0, e))
elif (obj_type in ['visibility', 'detection']):
yaml_content_eql = _techniques_to_events(yaml_content_eql, obj_type, include_all_score_objs)
for e in yaml_content_eql:
yaml_eql_events.append(eql.Event('techniques', 0, e))
return (yaml_eql_events, yaml_content) |
_caches
def test_split_mechanism_mice_is_not_reusable(redis_cache):
s = examples.basic_subsystem()
mechanism = (0, 1)
mice = s.find_mice(Direction.CAUSE, mechanism)
assert (s._mice_cache.size() == 1)
assert (mice.purview == (1, 2))
cut = models.Cut((0,), (1, 2))
cut_s = Subsystem(s.network, s.state, s.node_indices, cut=cut, mice_cache=s._mice_cache)
key = cut_s._mice_cache.key(Direction.CAUSE, mechanism)
assert (cut_s._mice_cache.get(key) is None) |
def attack_Linf_PGD(input_v, ones, label_v, dis, Ld, steps, epsilon):
dis.eval()
adverse_v = input_v.data.clone()
adverse_v = Variable(adverse_v, requires_grad=True)
optimizer = Linf_SGD([adverse_v], lr=0.0078)
for _ in range(steps):
optimizer.zero_grad()
dis.zero_grad()
(d_bin, d_multi) = dis(adverse_v)
loss = (- Ld(d_bin, ones, d_multi, label_v, lam=0.5))
loss.backward()
optimizer.step()
diff = (adverse_v.data - input_v.data)
diff.clamp_((- epsilon), epsilon)
adverse_v.data.copy_((diff + input_v.data).clamp_((- 1), 1))
dis.train()
dis.zero_grad()
return adverse_v |
def check_solution_satisfiability(sol, list_of_subsets):
n = len(list_of_subsets)
U = []
for sub in list_of_subsets:
U.extend(sub)
U = np.unique(U)
U2 = []
selected_subsets = []
for i in range(n):
if (sol[i] == 1):
selected_subsets.append(list_of_subsets[i])
U2.extend(list_of_subsets[i])
U2 = np.unique(U2)
if (set(U) != set(U2)):
return False
tmplen = len(selected_subsets)
for i in range(tmplen):
for j in range(i):
L = selected_subsets[i]
R = selected_subsets[j]
if (set(L) & set(R)):
return False
return True |
def get_grasp_poses(env):
segm = env.obs['segm']
depth = env.obs['depth']
K = env.obs['K']
mask = ((segm == env.obs['target_instance_id']) & (~ np.isnan(depth)))
pcd_in_camera = reorientbot.geometry.pointcloud_from_depth(depth, fx=K[(0, 0)], fy=K[(1, 1)], cx=K[(0, 2)], cy=K[(1, 2)])
normals_in_camera = reorientbot.geometry.normals_from_pointcloud(pcd_in_camera)
normals_on_obj = normals_in_camera.copy()
normals_on_obj[(~ mask)] = 0
laplacian = cv2.Laplacian(normals_on_obj, cv2.CV_64FC3)
magnitude = np.linalg.norm(laplacian, axis=2)
edge_mask = (magnitude > 0.5)
if (_utils.get_class_id(env.fg_object_id) == 2):
edge_mask = (cv2.dilate((np.uint8(edge_mask) * 255), kernel=np.ones((15, 15))) == 255)
else:
edge_mask = (cv2.dilate((np.uint8(edge_mask) * 255), kernel=np.ones((5, 5))) == 255)
mask = (mask & (~ edge_mask))
pcd_in_camera = pcd_in_camera[mask]
normals_in_camera = normals_in_camera[mask]
if (_utils.get_class_id(env.fg_object_id) == 11):
dist_from_centroid = np.linalg.norm((pcd_in_camera - pcd_in_camera.mean(axis=0)), axis=1)
keep = (dist_from_centroid < np.median(dist_from_centroid))
dist_from_centroid = dist_from_centroid[keep]
pcd_in_camera = pcd_in_camera[keep]
normals_in_camera = normals_in_camera[keep]
keep = (dist_from_centroid < np.median(dist_from_centroid))
dist_from_centroid = dist_from_centroid[keep]
pcd_in_camera = pcd_in_camera[keep]
normals_in_camera = normals_in_camera[keep]
camera_to_world = np.hsplit(env.obs['camera_to_world'], [3])
pcd_in_world = reorientbot.geometry.transform_points(pcd_in_camera, reorientbot.geometry.transformation_matrix(*camera_to_world))
normals_in_world = (reorientbot.geometry.transform_points((pcd_in_camera + normals_in_camera), reorientbot.geometry.transformation_matrix(*camera_to_world)) - pcd_in_world)
quaternion_in_world = reorientbot.geometry.quaternion_from_vec2vec([0, 0, 1], normals_in_world)
p = np.random.permutation(pcd_in_world.shape[0])
obstacles = (env.bg_objects + env.object_ids)
obstacles.remove(env.fg_object_id)
for pose in zip(pcd_in_world[p], quaternion_in_world[p]):
(yield np.hstack(pose)) |
.parametrize('iam_model,model_params', [('ashrae', {'b': 0.05}), ('physical', {'K': 4, 'L': 0.002, 'n': 1.526}), ('martin_ruiz', {'a_r': 0.16})])
def test_PVSystem_get_iam(mocker, iam_model, model_params):
m = mocker.spy(_iam, iam_model)
system = pvsystem.PVSystem(module_parameters=model_params)
thetas = 1
iam = system.get_iam(thetas, iam_model=iam_model)
m.assert_called_with(thetas, **model_params)
assert (iam < 1.0) |
def test_validate_workflow(acetone):
model0 = get_workflow_protocol(workflow_protocol='0')
model0.qc_options.program = 'rdkit'
model0.qc_options.method = 'uff'
model0.qc_options.basis = None
run_order = WorkFlow.get_running_order(skip_stages=['charges'])
model0.validate_workflow(workflow=run_order, molecule=acetone) |
def cheng2020_anchor(quality, metric='mse', pretrained=False, progress=True, **kwargs):
if (metric not in ('mse',)):
raise ValueError(f'Invalid metric "{metric}"')
if ((quality < 1) or (quality > 6)):
raise ValueError(f'Invalid quality "{quality}", should be between (1, 6)')
return _load_model('cheng2020-anchor', metric, quality, pretrained, progress, **kwargs) |
_dataframe_method
def inflate_currency(df: pd.DataFrame, column_name: str=None, country: str=None, currency_year: int=None, to_year: int=None, make_new_column: bool=False) -> pd.DataFrame:
inflator = _inflate_currency(country, currency_year, to_year)
if make_new_column:
new_column_name = ((column_name + '_') + str(to_year))
df[new_column_name] = (df[column_name] * inflator)
else:
df[column_name] = (df[column_name] * inflator)
return df |
def test_run_model_solar_position_weather(pvwatts_dc_pvwatts_ac_system, location, weather, mocker):
mc = ModelChain(pvwatts_dc_pvwatts_ac_system, location, aoi_model='no_loss', spectral_model='no_loss')
weather['pressure'] = 90000
weather['temp_air'] = 25
m = mocker.spy(location, 'get_solarposition')
mc.run_model(weather)
assert_series_equal(m.call_args[1]['temperature'], weather['temp_air'])
assert_series_equal(m.call_args[1]['pressure'], weather['pressure']) |
def typo_fix(slot_values, ontology, version='2.1'):
named_entity_slots = ['hotel-name', 'train-destination', 'train-departure', 'attraction-type', 'attraction-name', 'restaurant-name', 'taxi-departure', 'taxi-destination', 'restaurant-food']
fixed = {}
for (slot, value) in slot_values.items():
value = value.replace(' s ', 's ')
if value.endswith(' s'):
value = (value[:(- 2)] + 's')
general_typos = {'fen ditton': 'fenditton', 'guesthouse': 'guest house', 'steveage': 'stevenage', 'stantsted': 'stansted', 'storthford': 'stortford', 'shortford': 'stortford', 'weish': 'welsh', 'bringham': 'birmingham', 'liverpoool': 'liverpool', 'petersborough': 'peterborough', 'el shaddai': 'el shaddia', 'wendesday': 'wednesday', 'brazliian': 'brazilian', 'graffton': 'grafton'}
for (k, v) in general_typos.items():
value = value.replace(k, v)
value_replacement = {'center': 'centre', 'caffe uno': 'cafe uno', 'caffee uno': 'cafe uno', 'christs college': 'christ college', 'churchill college': 'churchills college', 'sat': 'saturday', 'saint johns chop shop house': 'saint johns chop house', 'good luck chinese food takeaway': 'good luck', 'asian': 'asian oriental', 'gallery at 12': 'gallery at 12 a high street'}
if (version == '2.1'):
value_replacement['portuguese'] = 'portugese'
value_replacement['museum of archaeology and anthropology'] = 'museum of archaelogy and anthropology'
if (version == '2.4'):
value_replacement['portugese'] = 'portuguese'
value_replacement['museum of archaelogy and anthropology'] = 'museum of archaeology and anthropology'
for (k, v) in value_replacement.items():
if (value == k):
value = v
if ((':' in value) and (len(value) < 5)):
value = ('0' + value)
if (slot in named_entity_slots):
value = check_prefix_suffix(value, ontology[slot])
if value:
fixed[slot] = value
return fixed |
def format_xc_code(description):
description = description.replace(' ', '').replace('\n', '').upper()
if ('RSH' not in description):
return description
frags = description.split('RSH')
out = [frags[0]]
for frag in frags[1:]:
(rsh_key, rest) = frag.split(')')
if (',' in rsh_key):
(omega, alpha, beta) = rsh_key[1:].split(',')
frag = ((('(' + ';'.join((alpha, beta, omega))) + ')') + rest)
out.append(frag)
return 'RSH'.join(out) |
class TMid3cp(_TTools):
TOOL_NAME = u'mid3cp'
def setUp(self):
super(TMid3cp, self).setUp()
original = os.path.join(DATA_DIR, 'silence-44-s.mp3')
(fd, self.filename) = mkstemp(suffix='oau.mp3')
os.close(fd)
shutil.copy(original, self.filename)
(fd, self.blank_file) = mkstemp(suffix='.mp3')
os.close(fd)
def tearDown(self):
super(TMid3cp, self).tearDown()
os.unlink(self.filename)
os.unlink(self.blank_file)
def test_merge(self):
id3 = ID3(self.filename)
id3.delete()
id3.add(mutagen.id3.TALB(text=[u'foo']))
id3.save(v2_version=3)
target = ID3()
target.add(mutagen.id3.TPE1(text=[u'bar', u'quux']))
target.save(self.blank_file, v2_version=4)
(res, out, err) = self.call2(self.filename, self.blank_file, '--merge')
assert (not any([res, out, err]))
result = ID3(self.blank_file)
assert (result.version == (2, 4, 0))
assert (result.getall('TALB')[0].text == [u'foo'])
assert (result.getall('TPE1')[0].text == [u'bar', u'quux'])
def test_merge_dst_no_tag(self):
id3 = ID3(self.filename)
id3.delete()
id3.save(v2_version=3)
with open(self.blank_file, 'wb') as h:
h.write(b'SOMEDATA')
(res, out, err) = self.call2(self.filename, self.blank_file, '--merge')
assert (not any([res, out, err]))
result = ID3(self.blank_file)
assert (result.version == (2, 3, 0))
def test_noop(self):
(res, out, err) = self.call2()
self.assertNotEqual(res, 0)
self.failUnless(('Usage:' in err))
def test_src_equal_dst(self):
res = self.call2(self.filename, self.filename)[0]
self.assertEqual(res, 0)
def test_copy(self):
res = self.call(self.filename, self.blank_file)[0]
self.failIf(res)
original_id3 = ID3(self.filename)
copied_id3 = ID3(self.blank_file)
self.assertEqual(copied_id3.version, (2, 3, 0))
frame = copied_id3['TPE1']
frame.text = frame.text[0].split('/')
self.failUnlessEqual(original_id3, copied_id3)
for key in original_id3:
self.failUnless((key in copied_id3))
self.failUnlessEqual(copied_id3[key], original_id3[key])
def test_include_id3v1(self):
self.call('--write-v1', self.filename, self.blank_file)
with open(self.blank_file, 'rb') as fileobj:
fileobj.seek((- 128), 2)
frames = ParseID3v1(fileobj.read(128))
self.failUnless(frames)
def test_exclude_tag_unicode(self):
self.call('-x', '', self.filename, self.blank_file)
def test_exclude_single_tag(self):
self.call('-x', 'TLEN', self.filename, self.blank_file)
original_id3 = ID3(self.filename)
copied_id3 = ID3(self.blank_file)
self.failUnless(('TLEN' in original_id3))
self.failIf(('TLEN' in copied_id3))
def test_exclude_multiple_tag(self):
self.call('-x', 'TLEN', '-x', 'TCON', '-x', 'TALB', self.filename, self.blank_file)
original_id3 = ID3(self.filename)
copied_id3 = ID3(self.blank_file)
self.failUnless(('TLEN' in original_id3))
self.failUnless(('TCON' in original_id3))
self.failUnless(('TALB' in original_id3))
self.failIf(('TLEN' in copied_id3))
self.failIf(('TCON' in copied_id3))
self.failIf(('TALB' in copied_id3))
def test_no_src_header(self):
(fd, blank_file2) = mkstemp(suffix='.mp3')
os.close(fd)
try:
err = self.call2(self.blank_file, blank_file2)[2]
self.failUnless(('No ID3 header found' in err))
finally:
os.unlink(blank_file2)
def test_verbose(self):
err = self.call2(self.filename, '--verbose', self.blank_file)[2]
self.failUnless(('mp3 contains:' in err))
self.failUnless(('Successfully saved' in err))
def test_quiet(self):
out = self.call(self.filename, self.blank_file)[1]
self.failIf(out)
def test_exit_status(self):
(status, out, err) = self.call2(self.filename)
self.assertTrue(status)
(status, out, err) = self.call2(self.filename, self.filename)
self.assertFalse(status)
(status, out, err) = self.call2(self.blank_file, self.filename)
self.assertTrue(status)
(status, out, err) = self.call2('', self.filename)
self.assertTrue(status)
(status, out, err) = self.call2(self.filename, self.blank_file)
self.assertFalse(status)
def test_v23_v24(self):
self.assertEqual(ID3(self.filename).version, (2, 3, 0))
self.call(self.filename, self.blank_file)
self.assertEqual(ID3(self.blank_file).version, (2, 3, 0))
ID3(self.filename).save()
self.call(self.filename, self.blank_file)
self.assertEqual(ID3(self.blank_file).version, (2, 4, 0)) |
def _check_has_no_phase_dynamics_shared_during_the_phase(problem_type, **kwargs):
if (not isinstance(problem_type, SocpType.COLLOCATION)):
if ('phase_dynamics' in kwargs):
if (kwargs['phase_dynamics'] == PhaseDynamics.SHARED_DURING_THE_PHASE):
raise ValueError('The dynamics cannot be SHARED_DURING_THE_PHASE with a trapezoidal stochastic ocp.phase_dynamics is set to PhaseDynamics.ONE_PER_NODE by default.') |
def test_geth_discover_next_available_nonce_concurrent_transactions(deploy_client: JSONRPCClient, skip_if_parity: bool) -> None:
def send_transaction(to: Address) -> None:
deploy_client.transact(EthTransfer(to_address=to, value=0, gas_price=gas_price_for_fast_transaction(deploy_client.web3)))
greenlets = {gevent.spawn(send_transaction, to=make_address()) for _ in range(100)}
gevent.joinall(set(greenlets), raise_error=True)
nonce = geth_discover_next_available_nonce(web3=deploy_client.web3, address=deploy_client.address)
msg = 'The nonce must increase exactly once per transaciton.'
assert (nonce == 100), msg
assert (nonce == deploy_client._available_nonce), msg |
def cross_layer_equalization_depthwise_layers():
model = MobileNetV2().to(torch.device('cpu'))
model.eval()
layer_list = [(model.features[0][0], model.features[0][1]), (model.features[1].conv[0], model.features[1].conv[1]), (model.features[1].conv[3], model.features[1].conv[4])]
bn_dict = {}
for conv_bn in layer_list:
bn_dict[conv_bn[0]] = conv_bn[1]
batch_norm_fold.fold_given_batch_norms(model, layer_list)
utils.replace_modules_of_type1_with_type2(model, torch.nn.ReLU6, torch.nn.ReLU)
consecutive_layer_list = [(model.features[0][0], model.features[1].conv[0], model.features[1].conv[3])]
scaling_factor_list = cross_layer_equalization.CrossLayerScaling.scale_cls_sets(consecutive_layer_list)
ClsSetInfo = cross_layer_equalization.ClsSetInfo
ClsPairInfo = cross_layer_equalization.ClsSetInfo.ClsSetLayerPairInfo
cls_set_info_list = [ClsSetInfo(ClsPairInfo(model.features[0][0], model.features[1].conv[0], scaling_factor_list[0][0], True)), ClsSetInfo(ClsPairInfo(model.features[1].conv[0], model.features[1].conv[3], scaling_factor_list[0][1], True))]
cross_layer_equalization.HighBiasFold.bias_fold(cls_set_info_list, bn_dict) |
def run_experiments(config, files, aws):
os.environ['RXGB_ALLOW_ELASTIC_TUNE'] = '1'
condition = config['condition']
num_boost_round = config['num_boost_round']
num_workers = config['num_workers']
num_affected_workers = config['affected_workers']
regression = config['regression']
use_gpu = config['use_gpu']
seed = config['seed']
metric = ('eval-error' if (not regression) else 'eval-rmse')
xgboost_params: Dict = config['xgboost_params']
ray_params: RayParams = config['ray_params']
np.random.seed(seed)
np.random.shuffle(files)
last_train_index = int((0.8 * len(files)))
train_files = list(files[:last_train_index])
eval_files = list(files[last_train_index:])
if num_affected_workers:
affected_workers = np.random.choice(np.arange(1, num_workers), size=num_affected_workers, replace=False).tolist()
else:
affected_workers = None
np.random.seed(seed)
sharding_mode = RayShardingMode.INTERLEAVED
if (condition == 'calibrate'):
final_files = train_files
final_workers = num_workers
ray_params.num_actors = final_workers
(bst, results) = train_ray(train_files=final_files, eval_files=eval_files, num_workers=final_workers, num_boost_round=num_boost_round, regression=regression, use_gpu=use_gpu, ray_params=ray_params, xgboost_params=xgboost_params, ft_manager=None, aws=aws, early_stopping_rounds=10)
return results
if (condition == 'fewer_workers'):
remove_shards = []
if (affected_workers is not None):
for rank in affected_workers:
remove_shards += _get_sharding_indices(sharding=sharding_mode, rank=rank, num_actors=num_workers, n=len(train_files))
mask = np.ones(len(train_files), dtype=bool)
mask[remove_shards] = False
final_files = np.array(train_files)[mask].tolist()
final_workers = (num_workers - len(affected_workers))
else:
final_files = train_files
final_workers = num_workers
ray_params.num_actors = final_workers
(bst, results) = train_ray(train_files=final_files, eval_files=eval_files, num_workers=final_workers, num_boost_round=num_boost_round, regression=regression, use_gpu=use_gpu, ray_params=ray_params, xgboost_params=xgboost_params, ft_manager=None, aws=aws)
return results
if (num_affected_workers == 0):
return {metric: float('inf')}
if (condition == 'non_elastic'):
ray_params.elastic_training = False
ray_params.max_failed_actors = len(affected_workers)
ray_params.max_actor_restarts = 1
ft_manager = ft_setup(workers=affected_workers, num_rounds=num_boost_round, die_round_factor=0.5, comeback_round_factor=0.0)
elif (condition == 'elastic_no_comeback'):
ray_params.elastic_training = True
ray_params.max_failed_actors = len(affected_workers)
ray_params.max_actor_restarts = 1
ft_manager = ft_setup(workers=affected_workers, num_rounds=num_boost_round, die_round_factor=0.5, comeback_round_factor=1.1)
elif (condition == 'elastic_comeback'):
ray_params.elastic_training = True
ray_params.max_failed_actors = len(affected_workers)
ray_params.max_actor_restarts = 1
ft_manager = ft_setup(workers=affected_workers, num_rounds=num_boost_round, die_round_factor=0.5, comeback_round_factor=0.75)
else:
raise ValueError('Unknown condition:', condition)
(bst, results) = train_ray(train_files=train_files, eval_files=eval_files, num_workers=num_workers, num_boost_round=num_boost_round, regression=regression, use_gpu=use_gpu, ray_params=ray_params, xgboost_params=xgboost_params, ft_manager=ft_manager, aws=aws)
return results |
def main(argv):
tf.config.experimental.set_visible_devices([], 'GPU')
os.environ['XLA_PYTHON_CLIENT_PREALLOCATE'] = 'false'
if (FLAGS.mode == 'train'):
tf.io.gfile.makedirs(FLAGS.workdir)
gfile_stream = tf.io.gfile.GFile(os.path.join(FLAGS.workdir, 'stdout.txt'), 'w')
handler = logging.StreamHandler(gfile_stream)
formatter = logging.Formatter('%(levelname)s - %(filename)s - %(asctime)s - %(message)s')
handler.setFormatter(formatter)
logger = logging.getLogger()
logger.addHandler(handler)
logger.setLevel('INFO')
run_lib.train(FLAGS.config, FLAGS.workdir)
elif (FLAGS.mode == 'eval'):
run_lib.evaluate(FLAGS.config, FLAGS.workdir, FLAGS.eval_folder)
else:
raise ValueError(f'Mode {FLAGS.mode} not recognized.') |
def test_rename_keys():
results = dict(joints_3d=np.ones([17, 3]), joints_3d_visible=np.ones([17, 3]))
pipeline = dict(type='RenameKeys', key_pairs=[('joints_3d', 'target'), ('joints_3d_visible', 'target_weight')])
pipeline = build_from_cfg(pipeline, PIPELINES)
results = pipeline(results)
assert ('joints_3d' not in results)
assert ('joints_3d_visible' not in results)
assert ('target' in results)
assert ('target_weight' in results)
assert (results['target'].shape == (17, 3))
assert (results['target_weight'].shape == (17, 3)) |
class Path(metaclass=AsyncAutoWrapperType):
_forward: ClassVar[list[str]]
_wraps: ClassVar[type] = pathlib.Path
_forwards: ClassVar[type] = pathlib.PurePath
_forward_magic: ClassVar[list[str]] = ['__str__', '__bytes__', '__truediv__', '__rtruediv__', '__eq__', '__lt__', '__le__', '__gt__', '__ge__', '__hash__']
_wrap_iter: ClassVar[list[str]] = ['glob', 'rglob', 'iterdir']
def __init__(self, *args: StrPath) -> None:
self._wrapped = pathlib.Path(*args)
if (not TYPE_CHECKING):
def __getattr__(self, name):
if (name in self._forward):
value = getattr(self._wrapped, name)
return rewrap_path(value)
raise AttributeError(name)
def __dir__(self) -> list[str]:
return [*super().__dir__(), *self._forward]
def __repr__(self) -> str:
return f'trio.Path({str(self)!r})'
def __fspath__(self) -> str:
return os.fspath(self._wrapped)
async def open(self, mode: OpenTextMode='r', buffering: int=(- 1), encoding: (str | None)=None, errors: (str | None)=None, newline: (str | None)=None) -> _AsyncIOWrapper[TextIOWrapper]:
...
async def open(self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None=None, errors: None=None, newline: None=None) -> _AsyncIOWrapper[FileIO]:
...
async def open(self, mode: OpenBinaryModeUpdating, buffering: Literal[((- 1), 1)]=(- 1), encoding: None=None, errors: None=None, newline: None=None) -> _AsyncIOWrapper[BufferedRandom]:
...
async def open(self, mode: OpenBinaryModeWriting, buffering: Literal[((- 1), 1)]=(- 1), encoding: None=None, errors: None=None, newline: None=None) -> _AsyncIOWrapper[BufferedWriter]:
...
async def open(self, mode: OpenBinaryModeReading, buffering: Literal[((- 1), 1)]=(- 1), encoding: None=None, errors: None=None, newline: None=None) -> _AsyncIOWrapper[BufferedReader]:
...
async def open(self, mode: OpenBinaryMode, buffering: int=(- 1), encoding: None=None, errors: None=None, newline: None=None) -> _AsyncIOWrapper[BinaryIO]:
...
async def open(self, mode: str, buffering: int=(- 1), encoding: (str | None)=None, errors: (str | None)=None, newline: (str | None)=None) -> _AsyncIOWrapper[IO[Any]]:
...
(pathlib.Path.open)
async def open(self, *args: Any, **kwargs: Any) -> _AsyncIOWrapper[IO[Any]]:
func = partial(self._wrapped.open, *args, **kwargs)
value = (await trio.to_thread.run_sync(func))
return trio.wrap_file(value)
if TYPE_CHECKING:
def __bytes__(self) -> bytes:
...
def __truediv__(self, other: StrPath) -> Path:
...
def __rtruediv__(self, other: StrPath) -> Path:
...
def __lt__(self, other: (Path | pathlib.PurePath)) -> bool:
...
def __le__(self, other: (Path | pathlib.PurePath)) -> bool:
...
def __gt__(self, other: (Path | pathlib.PurePath)) -> bool:
...
def __ge__(self, other: (Path | pathlib.PurePath)) -> bool:
...
def parts(self) -> tuple[(str, ...)]:
...
def drive(self) -> str:
...
def root(self) -> str:
...
def anchor(self) -> str:
...
def name(self) -> str:
...
def suffix(self) -> str:
...
def suffixes(self) -> list[str]:
...
def stem(self) -> str:
...
def parents(self) -> Sequence[pathlib.Path]:
...
def parent(self) -> Path:
...
def as_posix(self) -> str:
...
def as_uri(self) -> str:
...
def is_absolute(self) -> bool:
...
def is_reserved(self) -> bool:
...
def match(self, path_pattern: str) -> bool:
...
def relative_to(self, *other: StrPath) -> Path:
...
def with_name(self, name: str) -> Path:
...
def with_suffix(self, suffix: str) -> Path:
...
def joinpath(self, *other: StrPath) -> Path:
...
if (sys.version_info >= (3, 9)):
def is_relative_to(self, *other: StrPath) -> bool:
...
def with_stem(self, stem: str) -> Path:
...
async def cwd(self) -> Path:
...
if (sys.version_info >= (3, 10)):
async def stat(self, *, follow_symlinks: bool=True) -> os.stat_result:
...
async def chmod(self, mode: int, *, follow_symlinks: bool=True) -> None:
...
else:
async def stat(self) -> os.stat_result:
...
async def chmod(self, mode: int) -> None:
...
async def exists(self) -> bool:
...
async def glob(self, pattern: str) -> Iterable[Path]:
...
async def is_dir(self) -> bool:
...
async def is_file(self) -> bool:
...
async def is_symlink(self) -> bool:
...
async def is_socket(self) -> bool:
...
async def is_fifo(self) -> bool:
...
async def is_block_device(self) -> bool:
...
async def is_char_device(self) -> bool:
...
async def iterdir(self) -> Iterable[Path]:
...
async def lchmod(self, mode: int) -> None:
...
async def lstat(self) -> os.stat_result:
...
async def mkdir(self, mode: int=511, parents: bool=False, exist_ok: bool=False) -> None:
...
if (sys.platform != 'win32'):
async def owner(self) -> str:
...
async def group(self) -> str:
...
async def is_mount(self) -> bool:
...
if (sys.version_info >= (3, 9)):
async def readlink(self) -> Path:
...
async def rename(self, target: StrPath) -> Path:
...
async def replace(self, target: StrPath) -> Path:
...
async def resolve(self, strict: bool=False) -> Path:
...
async def rglob(self, pattern: str) -> Iterable[Path]:
...
async def rmdir(self) -> None:
...
async def symlink_to(self, target: StrPath, target_is_directory: bool=False) -> None:
...
if (sys.version_info >= (3, 10)):
async def hardlink_to(self, target: (str | pathlib.Path)) -> None:
...
async def touch(self, mode: int=438, exist_ok: bool=True) -> None:
...
async def unlink(self, missing_ok: bool=False) -> None:
...
async def home(self) -> Path:
...
async def absolute(self) -> Path:
...
async def expanduser(self) -> Path:
...
async def read_bytes(self) -> bytes:
...
async def read_text(self, encoding: (str | None)=None, errors: (str | None)=None) -> str:
...
async def samefile(self, other_path: ((bytes | int) | StrPath)) -> bool:
...
async def write_bytes(self, data: bytes) -> int:
...
if (sys.version_info >= (3, 10)):
async def write_text(self, data: str, encoding: (str | None)=None, errors: (str | None)=None, newline: (str | None)=None) -> int:
...
else:
async def write_text(self, data: str, encoding: (str | None)=None, errors: (str | None)=None) -> int:
...
if (sys.version_info < (3, 12)):
async def link_to(self, target: (StrPath | bytes)) -> None:
...
if (sys.version_info >= (3, 12)):
async def is_junction(self) -> bool:
...
walk: Any
async def with_segments(self, *pathsegments: StrPath) -> Path:
... |
def setup_logger(level: int=logging.ERROR, log_filename: Optional[str]=None) -> None:
fmt = '[%(asctime)s] %(levelname)s in %(module)s: %(message)s'
date_fmt = '%H:%M:%S'
formatter = logging.Formatter(fmt, datefmt=date_fmt)
logger = logging.getLogger('pytube')
logger.setLevel(level)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
if (log_filename is not None):
file_handler = logging.FileHandler(log_filename)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler) |
class F21Handler(BaseHandler):
version = F21
commandMap = {'auth': commands.authconfig.FC3_Authconfig, 'authconfig': commands.authconfig.FC3_Authconfig, 'autopart': commands.autopart.F21_AutoPart, 'autostep': commands.autostep.FC3_AutoStep, 'bootloader': commands.bootloader.F21_Bootloader, 'btrfs': commands.btrfs.F17_BTRFS, 'cdrom': commands.cdrom.FC3_Cdrom, 'clearpart': commands.clearpart.F21_ClearPart, 'cmdline': commands.displaymode.FC3_DisplayMode, 'device': commands.device.F8_Device, 'deviceprobe': commands.deviceprobe.FC3_DeviceProbe, 'dmraid': commands.dmraid.FC6_DmRaid, 'driverdisk': commands.driverdisk.F14_DriverDisk, 'eula': commands.eula.F20_Eula, 'fcoe': commands.fcoe.F13_Fcoe, 'firewall': commands.firewall.F20_Firewall, 'firstboot': commands.firstboot.FC3_Firstboot, 'graphical': commands.displaymode.FC3_DisplayMode, 'group': commands.group.F12_Group, 'halt': commands.reboot.F18_Reboot, 'harddrive': commands.harddrive.FC3_HardDrive, 'ignoredisk': commands.ignoredisk.F14_IgnoreDisk, 'install': commands.install.F20_Install, 'iscsi': commands.iscsi.F17_Iscsi, 'iscsiname': commands.iscsiname.FC6_IscsiName, 'keyboard': commands.keyboard.F18_Keyboard, 'lang': commands.lang.F19_Lang, 'liveimg': commands.liveimg.F19_Liveimg, 'logging': commands.logging.FC6_Logging, 'logvol': commands.logvol.F21_LogVol, 'mediacheck': commands.mediacheck.FC4_MediaCheck, 'method': commands.method.F19_Method, 'multipath': commands.multipath.FC6_MultiPath, 'network': commands.network.F21_Network, 'nfs': commands.nfs.FC6_NFS, 'ostreesetup': commands.ostreesetup.F21_OSTreeSetup, 'part': commands.partition.F20_Partition, 'partition': commands.partition.F20_Partition, 'poweroff': commands.reboot.F18_Reboot, 'raid': commands.raid.F20_Raid, 'realm': commands.realm.F19_Realm, 'reboot': commands.reboot.F18_Reboot, 'repo': commands.repo.F21_Repo, 'rescue': commands.rescue.F10_Rescue, 'rootpw': commands.rootpw.F18_RootPw, 'selinux': commands.selinux.FC3_SELinux, 'services': commands.services.FC6_Services, 'shutdown': commands.reboot.F18_Reboot, 'skipx': commands.skipx.FC3_SkipX, 'sshpw': commands.sshpw.F13_SshPw, 'text': commands.displaymode.FC3_DisplayMode, 'timezone': commands.timezone.F18_Timezone, 'updates': commands.updates.F7_Updates, 'upgrade': commands.upgrade.F20_Upgrade, 'url': commands.url.F18_Url, 'user': commands.user.F19_User, 'vnc': commands.vnc.F9_Vnc, 'volgroup': commands.volgroup.F21_VolGroup, 'xconfig': commands.xconfig.F14_XConfig, 'zerombr': commands.zerombr.F9_ZeroMbr, 'zfcp': commands.zfcp.F14_ZFCP}
dataMap = {'BTRFSData': commands.btrfs.F17_BTRFSData, 'DriverDiskData': commands.driverdisk.F14_DriverDiskData, 'DeviceData': commands.device.F8_DeviceData, 'DmRaidData': commands.dmraid.FC6_DmRaidData, 'FcoeData': commands.fcoe.F13_FcoeData, 'GroupData': commands.group.F12_GroupData, 'IscsiData': commands.iscsi.F17_IscsiData, 'LogVolData': commands.logvol.F21_LogVolData, 'MultiPathData': commands.multipath.FC6_MultiPathData, 'NetworkData': commands.network.F21_NetworkData, 'PartData': commands.partition.F18_PartData, 'RaidData': commands.raid.F18_RaidData, 'RepoData': commands.repo.F21_RepoData, 'SshPwData': commands.sshpw.F13_SshPwData, 'UserData': commands.user.F19_UserData, 'VolGroupData': commands.volgroup.F21_VolGroupData, 'ZFCPData': commands.zfcp.F14_ZFCPData} |
def score_all_questions(num_workers):
print('Loading data...')
corpus = HotpotQuestions()
train = corpus.get_train()
dev = corpus.get_dev()
workers = ProcessPool(num_workers, initializer=init, initargs=[])
print('Scoring train...')
new_train = []
with tqdm(total=len(train)) as pbar:
for q in tqdm(workers.imap_unordered(assign_scores, train)):
new_train.append(q)
pbar.update()
print('Scoring dev...')
new_dev = []
with tqdm(total=len(dev)) as pbar:
for q in tqdm(workers.imap_unordered(assign_scores, dev)):
new_dev.append(q)
pbar.update()
HotpotQuestions.make_corpus(new_train, new_dev) |
def validate_test(kw):
def get_list(key):
return deserialize_list(kw.get(key, ''))
errors = []
if (kw.get('SCRIPT-REL-PATH') == 'boost.test'):
project_path = kw.get('BUILD-FOLDER-PATH', '')
if ((not project_path.startswith('maps')) and (not project_path.startswith('devtools'))):
errors.append('BOOSTTEST is not allowed here')
size_timeout = collections.OrderedDict(sorted({'SMALL': 60, 'MEDIUM': 600, 'LARGE': (sys.maxint - 1), 'FAT': (sys.maxint - 2)}.items(), key=(lambda t: t[1])))
size = kw.get('SIZE', 'SMALL')
tags = get_list('TAG')
in_autocheck = (('ya:not_autocheck' not in tags) and ('ya:manual' not in tags))
if (size not in size_timeout):
errors.append('Unknown test size: [[imp]]{}[[rst]], choose from [[imp]]{}[[rst]]'.format(size, ', '.join(size_timeout.keys())))
else:
try:
timeout = int((kw.get('TEST-TIMEOUT', size_timeout[size]) or size_timeout[size]))
if (timeout < 0):
raise Exception('Timeout must be > 0')
if ((size_timeout[size] < timeout) and in_autocheck):
suggested_size = None
for (s, t) in size_timeout.items():
if (timeout <= t):
suggested_size = s
break
errors.append('Max allowed timeout for test size [[imp]]{}[[rst]] is [[imp]]{} sec[[rst]], suggested size: [[imp]]{}[[rst]]'.format(size, size_timeout[size], suggested_size))
except Exception as e:
errors.append('Error when parsing test timeout: [[bad]]{}[[rst]]'.format(e))
requirements = kw.get('REQUIREMENTS', '')
is_fat = ((size == 'FAT') or ('ya:fat' in tags))
for req in ['container', 'ram', 'disk']:
if (((req + ':') in requirements) and (not is_fat)):
errors.append('Only [[imp]]FAT[[rst]] tests can have [[imp]]{}[[rst]] requirement'.format(req))
if (in_autocheck and (size == 'LARGE') and (not is_fat)):
errors.append('LARGE test must have ya:fat tag')
data = get_list('TEST-DATA')
data_prefixes = ['arcadia', 'arcadia_tests_data', 'sbr://']
validate_re = re.compile('^({})'.format('|'.join(data_prefixes)))
for d in data:
if (not validate_re.match(d)):
errors.append('Path [[imp]]{}[[rst]] in the test data section should start with one of the following prefixes: [[imp]]{}[[rst]]'.format(d, ', '.join(data_prefixes)))
if kw.get('FUZZ-OPTS'):
for option in get_list('FUZZ-OPTS'):
if (not option.startswith('-')):
errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should start with '-'".format(option))
break
eqpos = option.find('=')
if ((eqpos == (- 1)) or (len(option) == (eqpos + 1))):
errors.append("Unrecognized fuzzer option '[[imp]]{}[[rst]]'. All fuzzer options should obtain value specified after '='".format(option))
break
if ((option[(eqpos - 1)] == ' ') or (option[(eqpos + 1)] == ' ')):
errors.append("Spaces are not allowed: '[[imp]]{}[[rst]]'".format(option))
break
if (option[:eqpos] in ('-runs', '-dict', '-jobs', '-workers', '-artifact_prefix', '-print_final_stats')):
errors.append("You can't use '[[imp]]{}[[rst]]' - it will be automatically calculated or configured during run".format(option))
break
if ((kw.get('USE_ARCADIA_PYTHON') == 'yes') and (kw.get('SCRIPT-REL-PATH') == 'py.test')):
errors.append('PYTEST_SCRIPT is deprecated')
return errors |
def plot_table(tbl, columns=None, title='', title_loc='left', header=True, colWidths=None, rowLoc='right', colLoc='right', colLabels=None, edges='horizontal', orient='horizontal', figsize=(5.5, 6), savefig=None, show=False):
if (columns is not None):
try:
tbl.columns = columns
except Exception:
pass
fig = _plt.figure(figsize=figsize)
ax = _plt.subplot(111, frame_on=False)
if (title != ''):
ax.set_title(title, fontweight='bold', fontsize=14, color='black', loc=title_loc)
the_table = ax.table(cellText=tbl.values, colWidths=colWidths, rowLoc=rowLoc, colLoc=colLoc, edges=edges, colLabels=(tbl.columns if header else colLabels), loc='center', zorder=2)
the_table.auto_set_font_size(False)
the_table.set_fontsize(12)
the_table.scale(1, 1)
for ((row, col), cell) in the_table.get_celld().items():
cell.set_height(0.08)
cell.set_text_props(color='black')
cell.set_edgecolor('#dddddd')
if ((row == 0) and header):
cell.set_edgecolor('black')
cell.set_facecolor('black')
cell.set_linewidth(2)
cell.set_text_props(weight='bold', color='black')
elif ((col == 0) and ('vertical' in orient)):
cell.set_edgecolor('#dddddd')
cell.set_linewidth(1)
cell.set_text_props(weight='bold', color='black')
elif (row > 1):
cell.set_linewidth(1)
ax.grid(False)
ax.set_xticks([])
ax.set_yticks([])
try:
_plt.subplots_adjust(hspace=0)
except Exception:
pass
try:
fig.tight_layout(w_pad=0, h_pad=0)
except Exception:
pass
if savefig:
if isinstance(savefig, dict):
_plt.savefig(**savefig)
else:
_plt.savefig(savefig)
if show:
_plt.show(block=False)
_plt.close()
if (not show):
return fig
return None |
def test_calibration_mat_alpha_3():
(lc, dict_nom, betaT) = setup3()
calib3 = ra.Calibration(lc, target_beta=betaT, dict_nom_vals=dict_nom, calib_var='z', est_method='matrix', calib_method='alpha', print_output=False)
calib3.run()
dfXst = pd.DataFrame(data=[[0.6194, 1.0194, 1.8722, 1.2591, 1.6108, 3.5045], [0.6137, 1.0202, 1.4497, 1.727, 1.7667, 3.4546], [0.6124, 1.0207, 1.5489, 1.3686, 1.8671, 3.3951]], columns=['R', 'G', 'Q1', 'Q2', 'Q3', 'z'], index=['Q1_max', 'Q2_max', 'Q3_max'])
dfphi = pd.DataFrame(data=[[0.8005], [0.7931], [0.7915]], columns=['R'], index=['Q1_max', 'Q2_max', 'Q3_max'])
dfgamma = pd.DataFrame(data=[[1.0194, 1.3634, 1.1072, 1.2269], [1.0202, 1.3634, 1.1072, 1.2269], [1.0207, 1.3634, 1.1072, 1.2269]], columns=['G', 'Q1', 'Q2', 'Q3'], index=['Q1_max', 'Q2_max', 'Q3_max'])
dfpsi = pd.DataFrame(data=[[1.0, 1.0, 0.7778, 0.7997], [1.0, 0.8352, 1.0, 0.7997], [1.0, 0.8352, 0.7778, 1.0]], columns=['G', 'Q1', 'Q2', 'Q3'], index=['Q1_max', 'Q2_max', 'Q3_max'])
vect_design_z3 = np.array([3.5442, 3.4616, 3.3951])
vect_design_beta3 = np.array([4.8494, 4.9144, 4.9925])
assert (pytest.approx(calib3.dfXstarcal, abs=0.0001) == dfXst)
assert (pytest.approx(calib3.df_phi, abs=0.0001) == dfphi)
assert (pytest.approx(calib3.df_gamma, abs=0.0001) == dfgamma)
assert (pytest.approx(calib3.df_psi, abs=0.0001) == dfpsi)
assert (pytest.approx(calib3.get_design_param_factor(), abs=0.001) == vect_design_z3)
assert (pytest.approx(calib3.calc_beta_design_param(np.max(vect_design_z3)), abs=0.0001) == vect_design_beta3) |
def evaluate(dataset, LOG, **kwargs):
if (dataset in ['Inaturalist', 'sop', 'cars196', 'cub']):
ret = evaluate_one_dataset(LOG, **kwargs)
elif (dataset in ['vehicle_id']):
ret = evaluate_multiple_datasets(LOG, **kwargs)
else:
raise Exception('No implementation for dataset {} available!')
return ret |
def gen_filelist(fname, fileList, folderName=''):
with open(fname, 'w') as f:
for it in fileList:
if (len(it) == 1):
f.write((('\n\n**' + it[FileName]) + '**\n\n'))
else:
f.write((((('- ' + gen_url(it[FileName], folderName)) + '\t\t') + it[Summary]) + '\n'))
if ((len(it) >= (Desc + 1)) and (len(it[Desc]) > 0)):
desc = it[Desc]
if ((desc.find('[') >= 0) and (desc.find(']') >= 0)):
desc = link_filelist(desc, folderName)
f.write((('\n>' + desc) + '\n\n')) |
(scope='function')
def terminal(radian_command):
with Terminal.open(radian_command) as t:
(yield t)
t.sendintr()
t.write('q()\n')
start_time = time.time()
while t.isalive():
if ((time.time() - start_time) > 15):
raise Exception("radian didn't quit cleanly")
time.sleep(0.1) |
class WorldbytezCom(XFSAccount):
__name__ = 'WorldbytezCom'
__type__ = 'account'
__version__ = '0.06'
__status__ = 'testing'
__description__ = 'Worldbytez.com account plugin'
__license__ = 'GPLv3'
__authors__ = [('Walter Purcaro', '')]
PLUGIN_DOMAIN = 'worldbytez.com' |
class CustomCalloutItemDirective(Directive):
option_spec = {'header': directives.unchanged, 'description': directives.unchanged, 'button_link': directives.unchanged, 'button_text': directives.unchanged}
def run(self):
try:
if ('description' in self.options):
description = self.options['description']
else:
description = ''
if ('header' in self.options):
header = self.options['header']
else:
raise ValueError('header not doc found')
if ('button_link' in self.options):
button_link = self.options['button_link']
else:
button_link = ''
if ('button_text' in self.options):
button_text = self.options['button_text']
else:
button_text = ''
except FileNotFoundError as e:
print(e)
return []
except ValueError as e:
print(e)
raise
return []
callout_rst = CALLOUT_TEMPLATE.format(description=description, header=header, button_link=button_link, button_text=button_text)
callout_list = StringList(callout_rst.split('\n'))
callout = nodes.paragraph()
self.state.nested_parse(callout_list, self.content_offset, callout)
return [callout] |
def create_balcony(options: BalconyOptions):
from ...btools.building.array import ArrayProperty
from ...btools.building.sizeoffset import SizeOffsetProperty
from ...btools.building.railing import RailProperty, RailFillProperty, PostFillProperty, WallFillProperty
register_property(ArrayProperty)
register_property(SizeOffsetProperty)
register_property(RailFillProperty)
register_property(PostFillProperty)
register_property(WallFillProperty)
register_property(RailProperty)
from ...btools.building.balcony import BalconyProperty
from ...btools.building.balcony.balcony_ops import build
register_property(BalconyProperty)
bpy.types.Scene.balcony_prop = bpy.props.PointerProperty(type=BalconyProperty)
prop = bpy.context.scene.balcony_prop
prop.init(get_selected_face_dimensions(bpy.context))
props_dict = dict_from_prop(prop)
props_dict.update(asdict(options))
prop_from_dict(prop, props_dict)
result = build(bpy.context, prop)
del bpy.types.Scene.balcony_prop
return result |
def sync_random_seed(seed=None, device='cuda'):
if (seed is None):
seed = np.random.randint((2 ** 31))
assert isinstance(seed, int)
(rank, world_size) = get_dist_info()
if (world_size == 1):
return seed
if (rank == 0):
random_num = torch.tensor(seed, dtype=torch.int32, device=device)
else:
random_num = torch.tensor(0, dtype=torch.int32, device=device)
dist.broadcast(random_num, src=0)
return random_num.item() |
def _binary_precision_update_input_check(input: torch.Tensor, target: torch.Tensor) -> None:
if (input.shape != target.shape):
raise ValueError(f'The `input` and `target` should have the same dimensions, got shapes {input.shape} and {target.shape}.')
if (target.ndim != 1):
raise ValueError(f'target should be a one-dimensional tensor, got shape {target.shape}.') |
def create_dataloader(opt, world_size, rank):
dataset = find_dataset_using_name(opt.dataset_mode)
instance = dataset()
instance.initialize(opt)
print(('dataset [%s] of size %d was created' % (type(instance).__name__, len(instance))))
if opt.isTrain:
train_sampler = torch.utils.data.distributed.DistributedSampler(instance, num_replicas=world_size, rank=rank)
dataloader = torch.utils.data.DataLoader(instance, batch_size=opt.batchSize, sampler=train_sampler, shuffle=False, num_workers=int(opt.nThreads), drop_last=opt.isTrain)
else:
dataloader = torch.utils.data.DataLoader(instance, batch_size=opt.batchSize, shuffle=False, num_workers=int(opt.nThreads), drop_last=opt.isTrain)
return dataloader |
class Critic(nn.Module):
def __init__(self, backbone, device='cpu'):
super().__init__()
self.device = torch.device(device)
self.backbone = backbone.to(device)
latent_dim = getattr(backbone, 'output_dim')
self.last = nn.Linear(latent_dim, 1).to(device)
def forward(self, obs, actions=None):
obs = torch.as_tensor(obs, device=self.device, dtype=torch.float32)
if (actions is not None):
actions = torch.as_tensor(actions, device=self.device, dtype=torch.float32).flatten(1)
obs = torch.cat([obs, actions], dim=1)
logits = self.backbone(obs)
values = self.last(logits)
return values |
.parametrize('rlo,rhi', [(1, 2), ('a', 'b')])
def test_valueholder_ordering(rlo, rhi):
(vlo, vhi) = (ValueHolder(rlo), ValueHolder(rhi))
for lo in (rlo, vlo):
for hi in (rhi, vhi):
assert (lo < hi)
assert (hi > lo)
assert (lo <= lo)
assert (not (lo < lo))
assert (lo >= lo) |
class ChainStateStateMachine(RuleBasedStateMachine):
def __init__(self):
self.replay_path: bool = False
self.address_to_privkey: Dict[(Address, PrivateKey)] = {}
self.address_to_client: Dict[(Address, Client)] = {}
self.transfer_order = TransferOrder()
super().__init__()
def new_address(self) -> Address:
(privkey, address) = factories.make_privkey_address()
self.address_to_privkey[address] = privkey
return address
def _new_channel_state(self, our_address, partner_address):
identifier = factories.make_canonical_identifier(token_network_address=self.token_network_address)
our_state = factories.NettingChannelEndStateProperties(balance=TokenAmount(1000), address=our_address)
partner_state = factories.NettingChannelEndStateProperties(balance=TokenAmount(1000), address=partner_address)
return factories.create(factories.NettingChannelStateProperties(our_state=our_state, partner_state=partner_state, canonical_identifier=identifier))
def new_channel(self, client_address: Address, partner_address: Address=None) -> Address:
if (not partner_address):
partner_address = self.new_address()
client = self.address_to_client[client_address]
channel = self._new_channel_state(client_address, partner_address)
client.address_to_channel[partner_address] = channel
partner_client = self.address_to_client.get(partner_address)
if (partner_client is not None):
mirrored = deepcopy(channel)
(mirrored.our_state, mirrored.partner_state) = (mirrored.partner_state, mirrored.our_state)
partner_client.address_to_channel[client_address] = mirrored
return partner_address
def _new_channel_transaction(self, client_address, partner_address):
client = self.address_to_client[client_address]
channel_state = client.address_to_channel[partner_address]
assert isinstance(channel_state, NettingChannelState)
channel_new_state_change = ContractReceiveChannelNew(transaction_hash=factories.make_transaction_hash(), channel_state=channel_state, block_number=self.block_number, block_hash=factories.make_block_hash())
node.state_transition(client.chain_state, channel_new_state_change)
def new_channel_with_transaction(self, client_address: Address, partner_address: Address=None) -> Address:
partner_address = self.new_channel(client_address, partner_address)
self._new_channel_transaction(client_address, partner_address)
if (partner_address in self.address_to_client):
self._new_channel_transaction(partner_address, client_address)
return partner_address
def new_client(self) -> Address:
address = self.new_address()
chain_state = ChainState(pseudo_random_generator=self.random, block_number=self.block_number, block_hash=self.block_hash, our_address=address, chain_id=factories.UNIT_CHAIN_ID)
chain_state.identifiers_to_tokennetworkregistries[self.token_network_registry_address] = deepcopy(self.token_network_registry_state)
chain_state.tokennetworkaddresses_to_tokennetworkregistryaddresses[self.token_network_address] = self.token_network_registry_address
self.address_to_client[address] = Client(chain_state=chain_state)
return address
(target=routes, block_number=integers(min_value=(GENESIS_BLOCK_NUMBER + 1)), random=randoms(), random_seed=random_module())
def initialize_all(self, block_number, random, random_seed):
self.random_seed = random_seed
self.block_number = block_number
self.block_hash = factories.make_block_hash()
self.random = random
self.token_network_address = factories.UNIT_TOKEN_NETWORK_ADDRESS
self.token_id = factories.UNIT_TOKEN_ADDRESS
self.token_network_state = TokenNetworkState(address=self.token_network_address, token_address=self.token_id)
self.token_network_registry_address = factories.make_token_network_registry_address()
self.token_network_registry_state = TokenNetworkRegistryState(self.token_network_registry_address, [self.token_network_state])
return multiple(*self.create_network())
def event(self, description):
if (not self.replay_path):
event(description)
()
def chain_state_invariants(self):
for client in self.address_to_client.values():
client.assert_monotonicity_invariants()
client.assert_channel_state_invariants()
def channel_opened(self, partner_address, client_address):
try:
client = self.address_to_client[client_address]
except KeyError:
return False
else:
needed_channel = client.address_to_channel[partner_address]
return (channel.get_status(needed_channel) == ChannelState.STATE_OPENED)
def create_network(self):
raise NotImplementedError('Every fuzz test needs to override this.') |
def find_matching_team_invite(code, user_obj):
found = lookup_team_invite(code)
if ((found.user is not None) and (found.user != user_obj)):
message = ('This invite is intended for user "%s".\n Please login to that account and try again.' % found.user.username)
raise DataModelException(message)
return found |
def setup_database_for_testing(testcase):
if ((not IS_TESTING_REAL_DATABASE) and (not isinstance(db.obj, SqliteDatabase))):
raise RuntimeError('Attempted to wipe production database!')
if (not db_initialized_for_testing.is_set()):
logger.debug('Setting up DB for testing.')
if (os.environ.get('SKIP_DB_SCHEMA', '') != 'true'):
wipe_database()
initialize_database()
populate_database()
models_missing_data = find_models_missing_data()
if models_missing_data:
raise RuntimeError('%s models are missing data: %s', len(models_missing_data), models_missing_data)
if (not IS_TESTING_REAL_DATABASE):
db.obj.execute_sql('PRAGMA foreign_keys = ON;')
db_initialized_for_testing.set()
Repository.kind.get_id('image')
testcases[testcase] = {}
testcases[testcase]['transaction'] = db.transaction()
testcases[testcase]['transaction'].__enter__()
testcases[testcase]['savepoint'] = db.savepoint()
testcases[testcase]['savepoint'].__enter__() |
def cycleGetFreElem(preFixData, e, minsup):
copyPreFixData = list(copy.deepcopy(preFixData))
allFreSequence = []
allElem = getElem(copyPreFixData)
(freElem, notFreElem) = useCycleGetFreElem(copyPreFixData, e, allElem, minsup)
deleteNotFreElem(copyPreFixData, notFreElem)
thisAllPrefixData = getAllPrefixData(freElem, e, copyPreFixData)
for x in freElem:
if set('_').issubset(set(x)):
newElem = [[e, x[1]]]
allFreSequence.append(newElem)
else:
temp2 = [[e], [x]]
allFreSequence.append(temp2)
lengthFreElem = len(freElem)
for i in range(lengthFreElem):
temp = cycleGetFreElem(thisAllPrefixData[i], freElem[i], minsup)
for x in temp:
if set('_').issubset(x[0][0]):
t = copy.deepcopy(x)
t[0] = [e, str(t[0][0])[1]]
allFreSequence.append(t)
else:
t2 = copy.deepcopy(x)
t2.insert(0, [e])
allFreSequence.append(t2)
return allFreSequence |
def create_masked_lm_predictions(tokens, masked_lm_prob, max_predictions_per_seq, vocab_words, rng):
cand_indexes = []
for (i, token) in enumerate(tokens):
if ((token == '[CLS]') or (token == '[SEP]')):
continue
cand_indexes.append(i)
rng.shuffle(cand_indexes)
output_tokens = list(tokens)
num_to_predict = min(max_predictions_per_seq, max(1, int(round((len(tokens) * masked_lm_prob)))))
masked_lms = []
covered_indexes = set()
for index in cand_indexes:
if (len(masked_lms) >= num_to_predict):
break
if (index in covered_indexes):
continue
covered_indexes.add(index)
masked_token = None
if (rng.random() < 0.8):
masked_token = '[MASK]'
elif (rng.random() < 0.5):
masked_token = tokens[index]
else:
masked_token = vocab_words[rng.randint(0, (len(vocab_words) - 1))]
output_tokens[index] = masked_token
masked_lms.append(MaskedLmInstance(index=index, label=tokens[index]))
masked_lms = sorted(masked_lms, key=(lambda x: x.index))
masked_lm_positions = []
masked_lm_labels = []
for p in masked_lms:
masked_lm_positions.append(p.index)
masked_lm_labels.append(p.label)
return (output_tokens, masked_lm_positions, masked_lm_labels) |
class Migration(migrations.Migration):
dependencies = [('core', '0002_auto__1707')]
operations = [migrations.CreateModel(name='ArchivedPlaylist', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('list_id', models.CharField(max_length=200, unique=True)), ('title', models.CharField(max_length=1000)), ('counter', models.IntegerField())]), migrations.CreateModel(name='PlaylistEntry', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('index', models.IntegerField()), ('url', models.CharField(max_length=200)), ('playlist', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='entries', to='core.ArchivedPlaylist')), ('song', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='playlist_entries', to='core.ArchivedSong'))], options={'ordering': ['playlist', 'index']}), migrations.CreateModel(name='ArchivedPlaylistQuery', fields=[('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('query', models.CharField(max_length=1000)), ('playlist', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='queries', to='core.ArchivedPlaylist'))]), migrations.AddField(model_name='requestlog', name='playlist', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='core.ArchivedPlaylist'))] |
def _set_tensors(obj, all_params, max_depth=20):
def action(elmt, name, objdict, key):
objdict[key] = all_params.pop(0)
crit = (lambda elmt: (isinstance(elmt, torch.Tensor) and (elmt.dtype in torch_float_type)))
_traverse_obj(obj, action=action, crit=crit, prefix='', max_depth=max_depth) |
class Project(PymiereBaseObject):
def __init__(self, pymiere_id=None):
super(Project, self).__init__(pymiere_id)
def documentID(self):
return self._eval_on_this_object('documentID')
def documentID(self, documentID):
raise AttributeError("Attribute 'documentID' is read-only")
def name(self):
return self._eval_on_this_object('name')
def name(self, name):
raise AttributeError("Attribute 'name' is read-only")
def path(self):
return self._eval_on_this_object('path')
def path(self, path):
raise AttributeError("Attribute 'path' is read-only")
def rootItem(self):
kwargs = self._eval_on_this_object('rootItem')
return (ProjectItem(**kwargs) if kwargs else None)
def rootItem(self, rootItem):
raise AttributeError("Attribute 'rootItem' is read-only")
def sequences(self):
kwargs = self._eval_on_this_object('sequences')
return (SequenceCollection(**kwargs) if kwargs else None)
def sequences(self, sequences):
raise AttributeError("Attribute 'sequences' is read-only")
' f '
def activeSequence(self):
kwargs = self._eval_on_this_object('activeSequence')
return (Sequence(**kwargs) if kwargs else None)
def activeSequence(self, activeSequence):
self._check_type(activeSequence, Sequence, 'Project.activeSequence')
self._eval_on_this_object('activeSequence = {}'.format(_format_object_to_es(activeSequence)))
def isCloudProject(self):
return self._eval_on_this_object('isCloudProject')
def isCloudProject(self, isCloudProject):
raise AttributeError("Attribute 'isCloudProject' is read-only")
def cloudProjectLocalID(self):
return self._eval_on_this_object('cloudProjectLocalID')
def cloudProjectLocalID(self, cloudProjectLocalID):
raise AttributeError("Attribute 'cloudProjectLocalID' is read-only")
def bind(self, eventName, function):
self._check_type(eventName, str, 'arg "eventName" of function "Project.bind"')
self._check_type(function, any, 'arg "function" of function "Project.bind"')
self._eval_on_this_object('bind({}, {})'.format(_format_object_to_es(eventName), _format_object_to_es(function)))
def unbind(self, eventName):
self._check_type(eventName, str, 'arg "eventName" of function "Project.unbind"')
self._eval_on_this_object('unbind({})'.format(_format_object_to_es(eventName)))
def setTimeout(self, eventName, function, milliseconds):
self._check_type(eventName, str, 'arg "eventName" of function "Project.setTimeout"')
self._check_type(function, any, 'arg "function" of function "Project.setTimeout"')
self._check_type(milliseconds, float, 'arg "milliseconds" of function "Project.setTimeout"')
self._eval_on_this_object('setTimeout({}, {}, {})'.format(_format_object_to_es(eventName), _format_object_to_es(function), _format_object_to_es(milliseconds)))
def openSequence(self, sequenceID):
self._check_type(sequenceID, str, 'arg "sequenceID" of function "Project.openSequence"')
return self._eval_on_this_object('openSequence({})'.format(_format_object_to_es(sequenceID)))
def importFiles(self, arrayOfFilePathsToImport, suppressUI, targetBin, importAsNumberedStills):
self._check_type(arrayOfFilePathsToImport, list, 'arg "arrayOfFilePathsToImport" of function "Project.importFiles"')
self._check_type(suppressUI, bool, 'arg "suppressUI" of function "Project.importFiles"')
self._check_type(targetBin, ProjectItem, 'arg "targetBin" of function "Project.importFiles"')
self._check_type(importAsNumberedStills, bool, 'arg "importAsNumberedStills" of function "Project.importFiles"')
return self._eval_on_this_object('importFiles({}, {}, {}, {})'.format(_format_object_to_es(arrayOfFilePathsToImport), _format_object_to_es(suppressUI), _format_object_to_es(targetBin), _format_object_to_es(importAsNumberedStills)))
def importSequences(self, projectPath, sequences):
self._check_type(projectPath, str, 'arg "projectPath" of function "Project.importSequences"')
self._check_type(sequences, list, 'arg "sequences" of function "Project.importSequences"')
result = self._eval_on_this_object('importSequences({}, {})'.format(_format_object_to_es(projectPath), _format_object_to_es(sequences)))
return result
def importAllAEComps(self, pathOfContainingProject, optionalTargetBin):
self._check_type(pathOfContainingProject, str, 'arg "pathOfContainingProject" of function "Project.importAEComps"')
self._check_type(optionalTargetBin, ProjectItem, 'arg "optionalTargetBin" of function "Project.importAEComps"')
return self._eval_on_this_object('importAEComps({},{})'.format(_format_object_to_es(pathOfContainingProject), _format_object_to_es(optionalTargetBin)))
def importAEComps(self, pathOfContainingProject, arrayOfCompNames, optionalTargetBin):
self._check_type(pathOfContainingProject, str, 'arg "pathOfContainingProject" of function "Project.importAEComps"')
self._check_type(arrayOfCompNames, list, 'arg "arrayOfCompNames" of function "Project.importAEComps"')
self._check_type(optionalTargetBin, ProjectItem, 'arg "optionalTargetBin" of function "Project.importAEComps"')
return self._eval_on_this_object('importAEComps({},{},{})'.format(_format_object_to_es(pathOfContainingProject), _format_object_to_es(arrayOfCompNames), _format_object_to_es(optionalTargetBin)))
def createNewSequence(self, sequenceName, placeholderID):
self._check_type(sequenceName, str, 'arg "sequenceName" of function "Project.createNewSequence"')
self._check_type(placeholderID, str, 'arg "placeholderID" of function "Project.createNewSequence"')
self._eval_on_this_object('createNewSequence({}, {})'.format(_format_object_to_es(sequenceName), _format_object_to_es(placeholderID)))
def createNewSequenceFromClips(self, sequenceName, arrayOfProjectItems, destinationBin=None):
return Sequence(**self._eval_on_this_object('createNewSequenceFromClips({}, {}, {})'.format(_format_object_to_es(sequenceName), _format_object_to_es(arrayOfProjectItems), _format_object_to_es(destinationBin))))
def deleteSequence(self, sequence):
self._check_type(sequence, Sequence, 'arg "sequence" of function "Project.deleteSequence"')
return self._eval_on_this_object('deleteSequence({})'.format(_format_object_to_es(sequence)))
def exportFinalCutProXML(self, exportPath, suppressUI):
self._check_type(exportPath, str, 'arg "exportPath" of function "Project.exportFinalCutProXML"')
self._check_type(suppressUI, float, 'arg "suppressUI" of function "Project.exportFinalCutProXML"')
return self._eval_on_this_object('exportFinalCutProXML({}, {})'.format(_format_object_to_es(exportPath), _format_object_to_es(suppressUI)))
def exportTimeline(self, exportControllerName):
self._check_type(exportControllerName, str, 'arg "exportControllerName" of function "Project.exportTimeline"')
return self._eval_on_this_object('exportTimeline({})'.format(_format_object_to_es(exportControllerName)))
def exportOMF(self, sequence, filePath, OMFTitle, sampleRate, bitsPerSample, audioEncapsulated, audioFileFormat, trimAudioFiles, handleFrames, includePan):
self._check_type(sequence, Sequence, 'arg "sequence" of function "Project.exportOMF"')
self._check_type(filePath, str, 'arg "filePath" of function "Project.exportOMF"')
self._check_type(OMFTitle, str, 'arg "OMFTitle" of function "Project.exportOMF"')
self._check_type(sampleRate, float, 'arg "sampleRate" of function "Project.exportOMF"')
self._check_type(bitsPerSample, float, 'arg "bitsPerSample" of function "Project.exportOMF"')
self._check_type(audioEncapsulated, float, 'arg "audioEncapsulated" of function "Project.exportOMF"')
self._check_type(audioFileFormat, float, 'arg "audioFileFormat" of function "Project.exportOMF"')
self._check_type(trimAudioFiles, float, 'arg "trimAudioFiles" of function "Project.exportOMF"')
self._check_type(handleFrames, float, 'arg "handleFrames" of function "Project.exportOMF"')
self._check_type(includePan, float, 'arg "includePan" of function "Project.exportOMF"')
return self._eval_on_this_object('exportOMF({}, {}, {}, {}, {}, {}, {}, {}, {}, {})'.format(_format_object_to_es(sequence), _format_object_to_es(filePath), _format_object_to_es(OMFTitle), _format_object_to_es(sampleRate), _format_object_to_es(bitsPerSample), _format_object_to_es(audioEncapsulated), _format_object_to_es(audioFileFormat), _format_object_to_es(trimAudioFiles), _format_object_to_es(handleFrames), _format_object_to_es(includePan)))
def exportAAF(self, sequence, filePath, mixDownVideo, explodeToMono, sampleRate, bitsPerSample, embedAudio, audioFileFormat, trimSources, handleFrames):
self._check_type(sequence, Sequence, 'arg "sequence" of function "Project.exportAAF"')
self._check_type(filePath, str, 'arg "filePath" of function "Project.exportAAF"')
self._check_type(mixDownVideo, float, 'arg "mixDownVideo" of function "Project.exportAAF"')
self._check_type(explodeToMono, float, 'arg "explodeToMono" of function "Project.exportAAF"')
self._check_type(sampleRate, float, 'arg "sampleRate" of function "Project.exportAAF"')
self._check_type(bitsPerSample, float, 'arg "bitsPerSample" of function "Project.exportAAF"')
self._check_type(embedAudio, float, 'arg "embedAudio" of function "Project.exportAAF"')
self._check_type(audioFileFormat, float, 'arg "audioFileFormat" of function "Project.exportAAF"')
self._check_type(trimSources, float, 'arg "trimSources" of function "Project.exportAAF"')
self._check_type(handleFrames, float, 'arg "handleFrames" of function "Project.exportAAF"')
return self._eval_on_this_object('exportAAF({}, {}, {}, {}, {}, {}, {}, {}, {}, {})'.format(_format_object_to_es(sequence), _format_object_to_es(filePath), _format_object_to_es(mixDownVideo), _format_object_to_es(explodeToMono), _format_object_to_es(sampleRate), _format_object_to_es(bitsPerSample), _format_object_to_es(embedAudio), _format_object_to_es(audioFileFormat), _format_object_to_es(trimSources), _format_object_to_es(handleFrames)))
def saveAs(self, saveAsPath):
self._check_type(saveAsPath, str, 'arg "saveAsPath" of function "Project.saveAs"')
return self._eval_on_this_object('saveAs({})'.format(_format_object_to_es(saveAsPath)))
def save(self):
self._eval_on_this_object('save()')
def pauseGrowing(self, pausedOrNot):
self._check_type(pausedOrNot, float, 'arg "pausedOrNot" of function "Project.pauseGrowing"')
return self._eval_on_this_object('pauseGrowing({})'.format(_format_object_to_es(pausedOrNot)))
def closeDocument(self, save=True):
self._check_type(save, bool, 'arg "save" of function "Project.save"')
return self._eval_on_this_object('closeDocument({})'.format(_format_object_to_es(save)))
def placeAsset(self, arg1):
self._check_type(arg1, any, 'arg "arg1" of function "Project.placeAsset"')
return self._eval_on_this_object('placeAsset({})'.format(_format_object_to_es(arg1)))
def addPropertyToProjectMetadataSchema(self, name, label, type):
self._check_type(name, str, 'arg "name" of function "Project.addPropertyToProjectMetadataSchema"')
self._check_type(label, str, 'arg "label" of function "Project.addPropertyToProjectMetadataSchema"')
self._check_type(type, float, 'arg "type" of function "Project.addPropertyToProjectMetadataSchema"')
return self._eval_on_this_object('addPropertyToProjectMetadataSchema({}, {}, {})'.format(_format_object_to_es(name), _format_object_to_es(label), _format_object_to_es(type)))
def getInsertionBin(self):
return ProjectItem(**self._eval_on_this_object('getInsertionBin()'))
def getProjectPanelMetadata(self):
self._eval_on_this_object('getProjectPanelMetadata()')
def setProjectPanelMetadata(self):
self._eval_on_this_object('setProjectPanelMetadata()')
def setScratchDiskPath(self, value, type):
self._check_type(value, str, 'arg "value" of function "Project.setScratchDiskPath"')
self._check_type(type, str, 'arg "type" of function "Project.setScratchDiskPath"')
self._eval_on_this_object('setScratchDiskPath({}, {})'.format(_format_object_to_es(value), _format_object_to_es(type)))
def consolidateDuplicates(self):
self._eval_on_this_object('consolidateDuplicates()')
def setEnableTranscodeOnIngest(self, inEnable):
self._check_type(inEnable, bool, 'arg "inEnable" of function "Project.setEnableTranscodeOnIngest"')
return self._eval_on_this_object('setEnableTranscodeOnIngest({})'.format(_format_object_to_es(inEnable))) |
class ChoiceFeedbackSerializer(serializers.Serializer):
id = serializers.IntegerField()
value_id = serializers.IntegerField()
def validate(self, data):
if object_exists(ChoiceFeedbackQuestion, pk=data['id']):
if ChoiceFeedbackQuestionValue.objects.filter(question_id=data['id'], pk=data['value_id']).exists():
return data
raise serializers.ValidationError("The multiple choice value isn't associated with question") |
class AverageMeter():
def __init__(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += (val * n)
self.count += n
self.avg = (self.sum / self.count)
def clear(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0 |
def extract_few_shot_feature(cfg, clip_model, train_loader_cache):
cache_keys = []
cache_values = []
with torch.no_grad():
for augment_idx in range(cfg['augment_epoch']):
train_features = []
print('Augment Epoch: {:} / {:}'.format(augment_idx, cfg['augment_epoch']))
for (i, (images, target)) in enumerate(tqdm(train_loader_cache)):
images = images.cuda()
image_features = clip_model.encode_image(images)
train_features.append(image_features)
if (augment_idx == 0):
target = target.cuda()
cache_values.append(target)
cache_keys.append(torch.cat(train_features, dim=0).unsqueeze(0))
cache_keys = torch.cat(cache_keys, dim=0).mean(dim=0)
cache_keys /= cache_keys.norm(dim=(- 1), keepdim=True)
cache_keys = cache_keys.permute(1, 0)
cache_values = F.one_hot(torch.cat(cache_values, dim=0)).half()
torch.save(cache_keys, (((cfg['cache_dir'] + '/keys_') + str(cfg['shots'])) + 'shots.pt'))
torch.save(cache_values, (((cfg['cache_dir'] + '/values_') + str(cfg['shots'])) + 'shots.pt'))
return |
def _find_chrome_win() -> Optional[str]:
import winreg as reg
reg_path = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\App Paths\\chrome.exe'
chrome_path: Optional[str] = None
for install_type in (reg.HKEY_CURRENT_USER, reg.HKEY_LOCAL_MACHINE):
try:
reg_key = reg.OpenKey(install_type, reg_path, 0, reg.KEY_READ)
chrome_path = reg.QueryValue(reg_key, None)
reg_key.Close()
if (not os.path.isfile(chrome_path)):
continue
except WindowsError:
chrome_path = None
else:
break
return chrome_path |
def getExceptionMessage(exceptionDetails: dict) -> str:
exception = exceptionDetails.get('exception')
if exception:
return (exception.get('description') or exception.get('value'))
message = exceptionDetails.get('text', '')
stackTrace = exceptionDetails.get('stackTrace', dict())
if stackTrace:
for callframe in stackTrace.get('callFrames'):
location = ((((str(callframe.get('url', '')) + ':') + str(callframe.get('lineNumber', ''))) + ':') + str(callframe.get('columnNumber')))
functionName = callframe.get('functionName', '<anonymous>')
message = (message + f'''
at {functionName} ({location})''')
return message |
class CIFARSEPreResNet(nn.Module):
def __init__(self, channels, init_block_channels, bottleneck, in_channels=3, in_size=(32, 32), num_classes=10):
super(CIFARSEPreResNet, self).__init__()
self.in_size = in_size
self.num_classes = num_classes
self.features = nn.Sequential()
self.features.add_module('init_block', conv3x3_block(in_channels=in_channels, out_channels=init_block_channels))
in_channels = init_block_channels
for (i, channels_per_stage) in enumerate(channels):
stage = nn.Sequential()
for (j, out_channels) in enumerate(channels_per_stage):
stride = (2 if ((j == 0) and (i != 0)) else 1)
stage.add_module('unit{}'.format((j + 1)), SEPreResUnit(in_channels=in_channels, out_channels=out_channels, stride=stride, bottleneck=bottleneck, conv1_stride=False))
in_channels = out_channels
self.features.add_module('stage{}'.format((i + 1)), stage)
self.features.add_module('final_pool', nn.AvgPool2d(kernel_size=8, stride=1))
self.output = nn.Linear(in_features=in_channels, out_features=num_classes)
self._init_params()
def _init_params(self):
for (name, module) in self.named_modules():
if isinstance(module, nn.Conv2d):
init.kaiming_uniform_(module.weight)
if (module.bias is not None):
init.constant_(module.bias, 0)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), (- 1))
x = self.output(x)
return x |
def test_aws_session_class_unsigned_noboto3(monkeypatch):
import rasterio.session
monkeypatch.setenv('AWS_NO_SIGN_REQUEST', 'YES')
monkeypatch.setattr(rasterio.session, 'boto3', None)
assert (rasterio.session.boto3 is None)
sesh = AWSSession()
assert (sesh.unsigned is True)
assert (sesh.get_credential_options()['AWS_NO_SIGN_REQUEST'] == 'YES')
monkeypatch.undo() |
class TestSAFEGRD(unittest.TestCase):
('rasterio.open')
def setUp(self, mocked_rio_open):
from satpy.readers.sar_c_safe import SAFEGRD
filename_info = {'mission_id': 'S1A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, 'polarization': 'vv'}
filetype_info = 'bla'
self.noisefh = mock.MagicMock()
self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=['y', 'x'])
self.calfh = mock.MagicMock()
self.calfh.get_calibration_constant.return_value = 1
self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=['y', 'x'])
self.annotationfh = mock.MagicMock()
self.test_fh = SAFEGRD('S1A_IW_GRDH_1SDV_T024655_T024720_025730_02DC2A_AE07.SAFE/measurement/s1a-iw-grd-vv-t024655-t024720-025730-02dc2a-001.tiff', filename_info, filetype_info, self.calfh, self.noisefh, self.annotationfh)
self.mocked_rio_open = mocked_rio_open
def test_instantiate(self):
assert (self.test_fh._polarization == 'vv')
assert (self.test_fh.calibration == self.calfh)
assert (self.test_fh.noise == self.noisefh)
self.mocked_rio_open.assert_called()
('xarray.open_dataset')
def test_read_calibrated_natural(self, mocked_xarray_open):
calibration = mock.MagicMock()
calibration.name = 'sigma_nought'
mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), dims=['y', 'x'])
xarr = self.test_fh.get_dataset(DataQuery(name='measurement', polarization='vv', calibration=calibration, quantity='natural'), info=dict())
np.testing.assert_allclose(xarr, [[np.nan, 2], [5, 10]])
('xarray.open_dataset')
def test_read_calibrated_dB(self, mocked_xarray_open):
calibration = mock.MagicMock()
calibration.name = 'sigma_nought'
mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), dims=['y', 'x'])
xarr = self.test_fh.get_dataset(DataQuery(name='measurement', polarization='vv', calibration=calibration, quantity='dB'), info=dict())
np.testing.assert_allclose(xarr, [[np.nan, 3.0103], [6.9897, 10]])
def test_read_lon_lats(self):
class FakeGCP():
def __init__(self, *args):
(self.row, self.col, self.x, self.y, self.z) = args
gcps = [FakeGCP(0, 0, 0, 0, 0), FakeGCP(0, 3, 1, 0, 0), FakeGCP(3, 0, 0, 1, 0), FakeGCP(3, 3, 1, 1, 0), FakeGCP(0, 7, 2, 0, 0), FakeGCP(3, 7, 2, 1, 0), FakeGCP(7, 7, 2, 2, 0), FakeGCP(7, 3, 1, 2, 0), FakeGCP(7, 0, 0, 2, 0), FakeGCP(0, 15, 3, 0, 0), FakeGCP(3, 15, 3, 1, 0), FakeGCP(7, 15, 3, 2, 0), FakeGCP(15, 15, 3, 3, 0), FakeGCP(15, 7, 2, 3, 0), FakeGCP(15, 3, 1, 3, 0), FakeGCP(15, 0, 0, 3, 0)]
crs = dict(init='epsg:4326')
self.mocked_rio_open.return_value.gcps = [gcps, crs]
self.mocked_rio_open.return_value.shape = [16, 16]
query = DataQuery(name='longitude', polarization='vv')
xarr = self.test_fh.get_dataset(query, info=dict())
expected = np.array([[3.e-16, 0., 0., 1.0, 0., 0., 0.3625, 8.e-17, (- 0.), (- 0.675), (- 0.), (- 0.), (- 0.875), (- 0.), 0., 1.0], [1., 1.324375, 1., 1., 1., 1., 0., 0.8, 0.63125, 0., 0.3578125, 0., 0., 0., 0.4834375, 0.], [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 0., 0., 0.], [2.0, 1., 1., 2.0, 2., 2., 2.0125, 2.0, 1., 1.925, 1., 1., 1.625, 1., 1., 1.0], [1., 2., 2., 2., 2., 2., 2., 2., 2., 2.2846216, 2., 2., 1., 1., 1., 1.], [1., 1., 2.2994083, 2.5734127, 2., 2., 2., 2., 2., 2., 2.2875744, 2., 2., 1., 1., 1.], [0.7125, 1., 2., 2., 3., 3.1238485, 3., 2., 2.6890377, 2., 2.2125372, 2., 1., 1., 2., 2.], [5.e-16, 1.4, 2., 3.0, 3., 3., 3., 3.0, 2., 2., 2.0, 1., 1., 1., 2., 3.0], [(- 0.), 1., 2., 3., 3., 3., 3., 3.0, 2., 2., 1., 1., 1., 1., 2., 3.], [(- 1.275), 0., 2., 3., 3., 3., 3., 2., 2., 1., 1., 1., 1., 1., 2., 3.], [(- 1.), 0.6953125, 2., 3., 3., 3., 3., 2.75, 2.109375, 1.4780506, 0., 0., 0., 1., 2., 3.], [(- 1.), 0., 2., 3., 3., 3., 3., 2., 1., 1., 0., 0., 0., 0., 2., 3.8984127], [(- 1.375), 0., 2., 3., 3., 3., 2., 2., 1., 0., 0., 0., 0., 0., 1., 3.], [(- 0.), 1., 2., 2.7797619, 2., 2., 2., 1., 1., 0., 0., (- 0.), 0., 0., 1., 2.7797619], [0., 1., 2., 2., 2.4180506, 2., 1., 1.45, 1., 0., 0., 0., 0., 0., 0., 1.], [3.0, 2., 2., 2.0, 1., 1., 1., 1.0, 0., 0., 0., 0., 0., 0., 0., 0.0]])
np.testing.assert_allclose(xarr.values, expected) |
_module()
class I3DHead(BaseHead):
def __init__(self, num_classes, in_channels, loss_cls=dict(type='CrossEntropyLoss'), spatial_type='avg', dropout_ratio=0.5, init_std=0.01, **kwargs):
super().__init__(num_classes, in_channels, loss_cls, **kwargs)
self.spatial_type = spatial_type
self.dropout_ratio = dropout_ratio
self.init_std = init_std
if (self.dropout_ratio != 0):
self.dropout = nn.Dropout(p=self.dropout_ratio)
else:
self.dropout = None
self.fc_cls = nn.Linear(self.in_channels, self.num_classes)
if (self.spatial_type == 'avg'):
self.avg_pool = nn.AdaptiveAvgPool3d((1, 1, 1))
else:
self.avg_pool = None
def init_weights(self):
normal_init(self.fc_cls, std=self.init_std)
def forward(self, x):
if (self.avg_pool is not None):
x = self.avg_pool(x)
if (self.dropout is not None):
x = self.dropout(x)
x = x.view(x.shape[0], (- 1))
cls_score = self.fc_cls(x)
return cls_score |
class SyncProgress():
def __init__(self, response_queue: NotifyingQueue[Tuple[(UUID, JSONResponse, datetime)]]) -> None:
self.synced_event = SyncEvent()
self.processed_event = SyncEvent()
self.sync_iteration = 0
self.processed_iteration = 0
self.last_synced: Optional[UUID] = None
self.last_processed: Optional[UUID] = None
self.response_queue = response_queue
def set_synced(self, token: UUID) -> None:
self.sync_iteration += 1
self.last_synced = token
self.synced_event.set([token])
self.synced_event.clear()
def set_processed(self, tokens: List[UUID]) -> None:
self.processed_iteration += len(tokens)
self.last_processed = tokens[(- 1)]
self.processed_event.set(tokens)
self.processed_event.clear()
def is_processed(self, response_token: UUID) -> bool:
response_list = list(self.response_queue.queue.queue)
for response_data in response_list:
token = response_data[0]
if (token == response_token):
return False
return True
def wait_for_processed(self, token: UUID, offset: int=0) -> Optional[UUID]:
counter = 0
while (not self.is_processed(token)):
processed_token = self.processed_event.wait()
if (token == processed_token):
break
while (counter < offset):
self.processed_event.wait()
counter += 1
return self.last_processed |
class FunctionEmitterVisitor(OpVisitor[None]):
def __init__(self, emitter: Emitter, declarations: Emitter, source_path: str, module_name: str) -> None:
self.emitter = emitter
self.names = emitter.names
self.declarations = declarations
self.source_path = source_path
self.module_name = module_name
self.literals = emitter.context.literals
self.rare = False
self.next_block: (BasicBlock | None) = None
self.ops: list[Op] = []
self.op_index = 0
def temp_name(self) -> str:
return self.emitter.temp_name()
def visit_goto(self, op: Goto) -> None:
if (op.label is not self.next_block):
self.emit_line(('goto %s;' % self.label(op.label)))
def visit_branch(self, op: Branch) -> None:
(true, false) = (op.true, op.false)
negated = op.negated
negated_rare = False
if ((true is self.next_block) and (op.traceback_entry is None)):
(true, false) = (false, true)
negated = (not negated)
negated_rare = True
neg = ('!' if negated else '')
cond = ''
if (op.op == Branch.BOOL):
expr_result = self.reg(op.value)
cond = f'{neg}{expr_result}'
elif (op.op == Branch.IS_ERROR):
typ = op.value.type
compare = ('!=' if negated else '==')
if isinstance(typ, RTuple):
cond = self.emitter.tuple_undefined_check_cond(typ, self.reg(op.value), self.c_error_value, compare)
else:
cond = f'{self.reg(op.value)} {compare} {self.c_error_value(typ)}'
else:
assert False, 'Invalid branch'
if ((op.traceback_entry is not None) or op.rare):
if (not negated_rare):
cond = f'unlikely({cond})'
else:
cond = f'likely({cond})'
if (false is self.next_block):
if (op.traceback_entry is None):
if (true is not self.next_block):
self.emit_line(f'if ({cond}) goto {self.label(true)};')
else:
self.emit_line(f'if ({cond}) {{')
self.emit_traceback(op)
self.emit_lines(('goto %s;' % self.label(true)), '}')
else:
self.emit_line(f'if ({cond}) {{')
self.emit_traceback(op)
if (true is not self.next_block):
self.emit_line(('goto %s;' % self.label(true)))
self.emit_lines('} else', (' goto %s;' % self.label(false)))
def visit_return(self, op: Return) -> None:
value_str = self.reg(op.value)
self.emit_line(('return %s;' % value_str))
def visit_tuple_set(self, op: TupleSet) -> None:
dest = self.reg(op)
tuple_type = op.tuple_type
self.emitter.declare_tuple_struct(tuple_type)
if (len(op.items) == 0):
self.emit_line(f'{dest}.empty_struct_error_flag = 0;')
else:
for (i, item) in enumerate(op.items):
self.emit_line(f'{dest}.f{i} = {self.reg(item)};')
def visit_assign(self, op: Assign) -> None:
dest = self.reg(op.dest)
src = self.reg(op.src)
if (dest != src):
if (op.src.type.is_unboxed and (not op.dest.type.is_unboxed)):
src = f'(void *){src}'
self.emit_line(f'{dest} = {src};')
def visit_assign_multi(self, op: AssignMulti) -> None:
typ = op.dest.type
assert isinstance(typ, RArray)
dest = self.reg(op.dest)
self.emit_line(('%s%s[%d] = %s;' % (self.emitter.ctype_spaced(typ.item_type), dest, len(op.src), c_array_initializer([self.reg(s) for s in op.src], indented=True))))
def visit_load_error_value(self, op: LoadErrorValue) -> None:
if isinstance(op.type, RTuple):
values = [self.c_undefined_value(item) for item in op.type.types]
tmp = self.temp_name()
self.emit_line('{} {} = {{ {} }};'.format(self.ctype(op.type), tmp, ', '.join(values)))
self.emit_line(f'{self.reg(op)} = {tmp};')
else:
self.emit_line(f'{self.reg(op)} = {self.c_error_value(op.type)};')
def visit_load_literal(self, op: LoadLiteral) -> None:
index = self.literals.literal_index(op.value)
if (not is_int_rprimitive(op.type)):
self.emit_line(('%s = CPyStatics[%d];' % (self.reg(op), index)), ann=op.value)
else:
self.emit_line(('%s = (CPyTagged)CPyStatics[%d] | 1;' % (self.reg(op), index)), ann=op.value)
def get_attr_expr(self, obj: str, op: (GetAttr | SetAttr), decl_cl: ClassIR) -> str:
cast = f'({op.class_type.struct_name(self.emitter.names)} *)'
if (decl_cl.is_trait and op.class_type.class_ir.is_trait):
trait_attr_index = list(decl_cl.attributes).index(op.attr)
offset = self.emitter.temp_name()
self.declarations.emit_line(f'size_t {offset};')
self.emitter.emit_line('{} = {};'.format(offset, 'CPy_FindAttrOffset({}, {}, {})'.format(self.emitter.type_struct_name(decl_cl), f'({cast}{obj})->vtable', trait_attr_index)))
attr_cast = f'({self.ctype(op.class_type.attr_type(op.attr))} *)'
return f'*{attr_cast}((char *){obj} + {offset})'
else:
if op.class_type.class_ir.is_trait:
assert (not decl_cl.is_trait)
cast = f'({decl_cl.struct_name(self.emitter.names)} *)'
return f'({cast}{obj})->{self.emitter.attr(op.attr)}'
def visit_get_attr(self, op: GetAttr) -> None:
dest = self.reg(op)
obj = self.reg(op.obj)
rtype = op.class_type
cl = rtype.class_ir
(attr_rtype, decl_cl) = cl.attr_details(op.attr)
prefer_method = (cl.is_trait and attr_rtype.error_overlap)
if cl.get_method(op.attr, prefer_method=prefer_method):
version = ('_TRAIT' if cl.is_trait else '')
self.emit_line(('%s = CPY_GET_ATTR%s(%s, %s, %d, %s, %s); /* %s */' % (dest, version, obj, self.emitter.type_struct_name(rtype.class_ir), rtype.getter_index(op.attr), rtype.struct_name(self.names), self.ctype(rtype.attr_type(op.attr)), op.attr)))
else:
attr_expr = self.get_attr_expr(obj, op, decl_cl)
self.emitter.emit_line(f'{dest} = {attr_expr};')
always_defined = cl.is_always_defined(op.attr)
merged_branch = None
if (not always_defined):
self.emitter.emit_undefined_attr_check(attr_rtype, dest, '==', obj, op.attr, cl, unlikely=True)
branch = self.next_branch()
if (branch is not None):
if ((branch.value is op) and (branch.op == Branch.IS_ERROR) and (branch.traceback_entry is not None) and (not branch.negated)):
self.emit_attribute_error(branch, cl.name, op.attr)
self.emit_line(('goto %s;' % self.label(branch.true)))
merged_branch = branch
self.emitter.emit_line('}')
if (not merged_branch):
exc_class = 'PyExc_AttributeError'
self.emitter.emit_line('PyErr_SetString({}, "attribute {} of {} undefined");'.format(exc_class, repr(op.attr), repr(cl.name)))
if (attr_rtype.is_refcounted and (not op.is_borrowed)):
if ((not merged_branch) and (not always_defined)):
self.emitter.emit_line('} else {')
self.emitter.emit_inc_ref(dest, attr_rtype)
if merged_branch:
if (merged_branch.false is not self.next_block):
self.emit_line(('goto %s;' % self.label(merged_branch.false)))
self.op_index += 1
elif (not always_defined):
self.emitter.emit_line('}')
def next_branch(self) -> (Branch | None):
if ((self.op_index + 1) < len(self.ops)):
next_op = self.ops[(self.op_index + 1)]
if isinstance(next_op, Branch):
return next_op
return None
def visit_set_attr(self, op: SetAttr) -> None:
if (op.error_kind == ERR_FALSE):
dest = self.reg(op)
obj = self.reg(op.obj)
src = self.reg(op.src)
rtype = op.class_type
cl = rtype.class_ir
(attr_rtype, decl_cl) = cl.attr_details(op.attr)
if cl.get_method(op.attr):
assert ((not op.is_init) and (op.error_kind == ERR_FALSE)), ('%s %d %d %s' % (op.attr, op.is_init, op.error_kind, rtype))
version = ('_TRAIT' if cl.is_trait else '')
self.emit_line(('%s = CPY_SET_ATTR%s(%s, %s, %d, %s, %s, %s); /* %s */' % (dest, version, obj, self.emitter.type_struct_name(rtype.class_ir), rtype.setter_index(op.attr), src, rtype.struct_name(self.names), self.ctype(rtype.attr_type(op.attr)), op.attr)))
else:
attr_expr = self.get_attr_expr(obj, op, decl_cl)
if ((not op.is_init) and attr_rtype.is_refcounted):
always_defined = cl.is_always_defined(op.attr)
if (not always_defined):
self.emitter.emit_undefined_attr_check(attr_rtype, attr_expr, '!=', obj, op.attr, cl)
self.emitter.emit_dec_ref(attr_expr, attr_rtype)
if (not always_defined):
self.emitter.emit_line('}')
elif (attr_rtype.error_overlap and (not cl.is_always_defined(op.attr))):
self.emitter.emit_attr_bitmap_set(src, obj, attr_rtype, cl, op.attr)
self.emitter.emit_line(f'{attr_expr} = {src};')
if (op.error_kind == ERR_FALSE):
self.emitter.emit_line(f'{dest} = 1;')
PREFIX_MAP: Final = {NAMESPACE_STATIC: STATIC_PREFIX, NAMESPACE_TYPE: TYPE_PREFIX, NAMESPACE_MODULE: MODULE_PREFIX}
def visit_load_static(self, op: LoadStatic) -> None:
dest = self.reg(op)
prefix = self.PREFIX_MAP[op.namespace]
name = self.emitter.static_name(op.identifier, op.module_name, prefix)
if (op.namespace == NAMESPACE_TYPE):
name = ('(PyObject *)%s' % name)
self.emit_line(f'{dest} = {name};', ann=op.ann)
def visit_init_static(self, op: InitStatic) -> None:
value = self.reg(op.value)
prefix = self.PREFIX_MAP[op.namespace]
name = self.emitter.static_name(op.identifier, op.module_name, prefix)
if (op.namespace == NAMESPACE_TYPE):
value = ('(PyTypeObject *)%s' % value)
self.emit_line(f'{name} = {value};')
self.emit_inc_ref(name, op.value.type)
def visit_tuple_get(self, op: TupleGet) -> None:
dest = self.reg(op)
src = self.reg(op.src)
self.emit_line(f'{dest} = {src}.f{op.index};')
if (not op.is_borrowed):
self.emit_inc_ref(dest, op.type)
def get_dest_assign(self, dest: Value) -> str:
if (not dest.is_void):
return (self.reg(dest) + ' = ')
else:
return ''
def visit_call(self, op: Call) -> None:
dest = self.get_dest_assign(op)
args = ', '.join((self.reg(arg) for arg in op.args))
lib = self.emitter.get_group_prefix(op.fn)
cname = op.fn.cname(self.names)
self.emit_line(f'{dest}{lib}{NATIVE_PREFIX}{cname}({args});')
def visit_method_call(self, op: MethodCall) -> None:
dest = self.get_dest_assign(op)
obj = self.reg(op.obj)
rtype = op.receiver_type
class_ir = rtype.class_ir
name = op.method
method = rtype.class_ir.get_method(name)
assert (method is not None)
is_direct = class_ir.is_method_final(name)
obj_args = ([] if (method.decl.kind == FUNC_STATICMETHOD) else ([f'(PyObject *)Py_TYPE({obj})'] if (method.decl.kind == FUNC_CLASSMETHOD) else [obj]))
args = ', '.join((obj_args + [self.reg(arg) for arg in op.args]))
mtype = native_function_type(method, self.emitter)
version = ('_TRAIT' if rtype.class_ir.is_trait else '')
if is_direct:
lib = self.emitter.get_group_prefix(method.decl)
self.emit_line(f'{dest}{lib}{NATIVE_PREFIX}{method.cname(self.names)}({args});')
else:
method_idx = rtype.method_index(name)
self.emit_line('{}CPY_GET_METHOD{}({}, {}, {}, {}, {})({}); /* {} */'.format(dest, version, obj, self.emitter.type_struct_name(rtype.class_ir), method_idx, rtype.struct_name(self.names), mtype, args, op.method))
def visit_inc_ref(self, op: IncRef) -> None:
src = self.reg(op.src)
self.emit_inc_ref(src, op.src.type)
def visit_dec_ref(self, op: DecRef) -> None:
src = self.reg(op.src)
self.emit_dec_ref(src, op.src.type, is_xdec=op.is_xdec)
def visit_box(self, op: Box) -> None:
self.emitter.emit_box(self.reg(op.src), self.reg(op), op.src.type, can_borrow=True)
def visit_cast(self, op: Cast) -> None:
branch = self.next_branch()
handler = None
if (branch is not None):
if ((branch.value is op) and (branch.op == Branch.IS_ERROR) and (branch.traceback_entry is not None) and (not branch.negated) and (branch.false is self.next_block)):
handler = TracebackAndGotoHandler(self.label(branch.true), self.source_path, self.module_name, branch.traceback_entry)
self.op_index += 1
self.emitter.emit_cast(self.reg(op.src), self.reg(op), op.type, src_type=op.src.type, error=handler)
def visit_unbox(self, op: Unbox) -> None:
self.emitter.emit_unbox(self.reg(op.src), self.reg(op), op.type)
def visit_unreachable(self, op: Unreachable) -> None:
self.emitter.emit_line('CPy_Unreachable();')
def visit_raise_standard_error(self, op: RaiseStandardError) -> None:
if (op.value is not None):
if isinstance(op.value, str):
message = op.value.replace('"', '\\"')
self.emitter.emit_line(f'PyErr_SetString(PyExc_{op.class_name}, "{message}");')
elif isinstance(op.value, Value):
self.emitter.emit_line('PyErr_SetObject(PyExc_{}, {});'.format(op.class_name, self.emitter.reg(op.value)))
else:
assert False, 'op value type must be either str or Value'
else:
self.emitter.emit_line(f'PyErr_SetNone(PyExc_{op.class_name});')
self.emitter.emit_line(f'{self.reg(op)} = 0;')
def visit_call_c(self, op: CallC) -> None:
if op.is_void:
dest = ''
else:
dest = self.get_dest_assign(op)
args = ', '.join((self.reg(arg) for arg in op.args))
self.emitter.emit_line(f'{dest}{op.function_name}({args});')
def visit_truncate(self, op: Truncate) -> None:
dest = self.reg(op)
value = self.reg(op.src)
self.emit_line(f'{dest} = {value};')
def visit_extend(self, op: Extend) -> None:
dest = self.reg(op)
value = self.reg(op.src)
if op.signed:
src_cast = self.emit_signed_int_cast(op.src.type)
else:
src_cast = self.emit_unsigned_int_cast(op.src.type)
self.emit_line(f'{dest} = {src_cast}{value};')
def visit_load_global(self, op: LoadGlobal) -> None:
dest = self.reg(op)
self.emit_line(f'{dest} = {op.identifier};', ann=op.ann)
def visit_int_op(self, op: IntOp) -> None:
dest = self.reg(op)
lhs = self.reg(op.lhs)
rhs = self.reg(op.rhs)
if (op.op == IntOp.RIGHT_SHIFT):
lhs = (self.emit_signed_int_cast(op.lhs.type) + lhs)
rhs = (self.emit_signed_int_cast(op.rhs.type) + rhs)
self.emit_line(f'{dest} = {lhs} {op.op_str[op.op]} {rhs};')
def visit_comparison_op(self, op: ComparisonOp) -> None:
dest = self.reg(op)
lhs = self.reg(op.lhs)
rhs = self.reg(op.rhs)
lhs_cast = ''
rhs_cast = ''
if (op.op in (ComparisonOp.SLT, ComparisonOp.SGT, ComparisonOp.SLE, ComparisonOp.SGE)):
lhs_cast = self.emit_signed_int_cast(op.lhs.type)
rhs_cast = self.emit_signed_int_cast(op.rhs.type)
elif (op.op in (ComparisonOp.ULT, ComparisonOp.UGT, ComparisonOp.ULE, ComparisonOp.UGE)):
lhs_cast = self.emit_unsigned_int_cast(op.lhs.type)
rhs_cast = self.emit_unsigned_int_cast(op.rhs.type)
elif (isinstance(op.lhs, Integer) and (op.lhs.value < 0)):
rhs_cast = self.emit_signed_int_cast(op.rhs.type)
elif (isinstance(op.rhs, Integer) and (op.rhs.value < 0)):
lhs_cast = self.emit_signed_int_cast(op.lhs.type)
self.emit_line(f'{dest} = {lhs_cast}{lhs} {op.op_str[op.op]} {rhs_cast}{rhs};')
def visit_float_op(self, op: FloatOp) -> None:
dest = self.reg(op)
lhs = self.reg(op.lhs)
rhs = self.reg(op.rhs)
if (op.op != FloatOp.MOD):
self.emit_line(f'{dest} = {lhs} {op.op_str[op.op]} {rhs};')
else:
self.emit_line(f'{dest} = fmod({lhs}, {rhs});')
def visit_float_neg(self, op: FloatNeg) -> None:
dest = self.reg(op)
src = self.reg(op.src)
self.emit_line(f'{dest} = -{src};')
def visit_float_comparison_op(self, op: FloatComparisonOp) -> None:
dest = self.reg(op)
lhs = self.reg(op.lhs)
rhs = self.reg(op.rhs)
self.emit_line(f'{dest} = {lhs} {op.op_str[op.op]} {rhs};')
def visit_load_mem(self, op: LoadMem) -> None:
dest = self.reg(op)
src = self.reg(op.src)
type = self.ctype(op.type)
self.emit_line(f'{dest} = *({type} *){src};')
def visit_set_mem(self, op: SetMem) -> None:
dest = self.reg(op.dest)
src = self.reg(op.src)
dest_type = self.ctype(op.dest_type)
if (dest != src):
self.emit_line(f'*({dest_type} *){dest} = {src};')
def visit_get_element_ptr(self, op: GetElementPtr) -> None:
dest = self.reg(op)
src = self.reg(op.src)
assert isinstance(op.src_type, RStruct)
assert (op.field in op.src_type.names), 'Invalid field name.'
self.emit_line('{} = ({})&(({} *){})->{};'.format(dest, op.type._ctype, op.src_type.name, src, op.field))
def visit_load_address(self, op: LoadAddress) -> None:
typ = op.type
dest = self.reg(op)
if isinstance(op.src, Register):
src = self.reg(op.src)
elif isinstance(op.src, LoadStatic):
prefix = self.PREFIX_MAP[op.src.namespace]
src = self.emitter.static_name(op.src.identifier, op.src.module_name, prefix)
else:
src = op.src
self.emit_line(f'{dest} = ({typ._ctype})&{src};')
def visit_keep_alive(self, op: KeepAlive) -> None:
pass
def visit_unborrow(self, op: Unborrow) -> None:
dest = self.reg(op)
src = self.reg(op.src)
self.emit_line(f'{dest} = {src};')
def label(self, label: BasicBlock) -> str:
return self.emitter.label(label)
def reg(self, reg: Value) -> str:
if isinstance(reg, Integer):
val = reg.value
if ((val == 0) and is_pointer_rprimitive(reg.type)):
return 'NULL'
s = str(val)
if (val >= (1 << 31)):
if (val >= (1 << 63)):
s += 'ULL'
else:
s += 'LL'
elif (val == (- (1 << 63))):
s = '(-LL - 1)'
elif (val <= (- (1 << 31))):
s += 'LL'
return s
elif isinstance(reg, Float):
r = repr(reg.value)
if (r == 'inf'):
return 'INFINITY'
elif (r == '-inf'):
return '-INFINITY'
elif (r == 'nan'):
return 'NAN'
return r
else:
return self.emitter.reg(reg)
def ctype(self, rtype: RType) -> str:
return self.emitter.ctype(rtype)
def c_error_value(self, rtype: RType) -> str:
return self.emitter.c_error_value(rtype)
def c_undefined_value(self, rtype: RType) -> str:
return self.emitter.c_undefined_value(rtype)
def emit_line(self, line: str, *, ann: object=None) -> None:
self.emitter.emit_line(line, ann=ann)
def emit_lines(self, *lines: str) -> None:
self.emitter.emit_lines(*lines)
def emit_inc_ref(self, dest: str, rtype: RType) -> None:
self.emitter.emit_inc_ref(dest, rtype, rare=self.rare)
def emit_dec_ref(self, dest: str, rtype: RType, is_xdec: bool) -> None:
self.emitter.emit_dec_ref(dest, rtype, is_xdec=is_xdec, rare=self.rare)
def emit_declaration(self, line: str) -> None:
self.declarations.emit_line(line)
def emit_traceback(self, op: Branch) -> None:
if (op.traceback_entry is not None):
self.emitter.emit_traceback(self.source_path, self.module_name, op.traceback_entry)
def emit_attribute_error(self, op: Branch, class_name: str, attr: str) -> None:
assert (op.traceback_entry is not None)
globals_static = self.emitter.static_name('globals', self.module_name)
self.emit_line(('CPy_AttributeError("%s", "%s", "%s", "%s", %d, %s);' % (self.source_path.replace('\\', '\\\\'), op.traceback_entry[0], class_name, attr, op.traceback_entry[1], globals_static)))
if DEBUG_ERRORS:
self.emit_line('assert(PyErr_Occurred() != NULL && "failure w/o err!");')
def emit_signed_int_cast(self, type: RType) -> str:
if is_tagged(type):
return '(Py_ssize_t)'
else:
return ''
def emit_unsigned_int_cast(self, type: RType) -> str:
if is_int32_rprimitive(type):
return '(uint32_t)'
elif is_int64_rprimitive(type):
return '(uint64_t)'
else:
return '' |
def _handle_first_parameter(pyobject, parameters):
kind = pyobject.get_kind()
if (not parameters):
if (not pyobject.get_param_names(special_args=False)):
return
parameters.append(pyobjects.get_unknown())
if (kind == 'method'):
parameters[0] = pyobjects.PyObject(pyobject.parent)
if (kind == 'classmethod'):
parameters[0] = pyobject.parent |
def upload_to_pypi(version: str, dry_run: bool=True) -> None:
assert re.match('v?[1-9]\\.[0-9]+\\.[0-9](\\+\\S+)?$', version)
if ('dev' in version):
assert dry_run, 'Must use --dry-run with dev versions of mypy'
if version.startswith('v'):
version = version[1:]
target_dir = tempfile.mkdtemp()
dist = (Path(target_dir) / 'dist')
dist.mkdir()
print(f'Temporary target directory: {target_dir}')
release = get_release_for_tag(f'v{version}')
download_all_release_assets(release, dist)
spot_check_dist(dist, version)
check_sdist(dist, version)
upload_dist(dist, dry_run)
print('<< All done! >>') |
class FittingsTreeView(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent, id=wx.ID_ANY)
self.parent = parent
pmainSizer = wx.BoxSizer(wx.VERTICAL)
tree = self.fittingsTreeCtrl = wx.TreeCtrl(self, wx.ID_ANY, style=(wx.TR_DEFAULT_STYLE | wx.TR_HIDE_ROOT))
pmainSizer.Add(tree, 1, (wx.EXPAND | wx.ALL), 0)
self.root = tree.AddRoot('Fits')
self.populateSkillTree(None)
self.Bind(wx.EVT_TREE_SEL_CHANGED, self.displayFit)
self.SetSizer(pmainSizer)
self.Layout()
def populateSkillTree(self, data):
if (data is None):
return
root = self.root
tree = self.fittingsTreeCtrl
tree.DeleteChildren(root)
sEsi = Esi.getInstance()
dict = {}
fits = data
for fit in fits:
if (fit['fitting_id'] in sEsi.fittings_deleted):
continue
ship = getItem(fit['ship_type_id'])
if (ship is None):
pyfalog.debug('Cannot find ship type id: {}'.format(fit['ship_type_id']))
continue
if (ship.name not in dict):
dict[ship.name] = []
dict[ship.name].append(fit)
for (name, fits) in dict.items():
shipID = tree.AppendItem(root, name)
for fit in fits:
fitId = tree.AppendItem(shipID, fit['name'])
tree.SetItemData(fitId, json.dumps(fit))
tree.SortChildren(root)
def displayFit(self, event):
selection = self.fittingsTreeCtrl.GetSelection()
data = self.fittingsTreeCtrl.GetItemData(selection)
if (data is None):
event.Skip()
return
fit = json.loads(data)
list = []
for item in fit['items']:
try:
cargo = Cargo(getItem(item['type_id']))
cargo.amount = item['quantity']
list.append(cargo)
except (KeyboardInterrupt, SystemExit):
raise
except Exception as e:
pyfalog.critical('Exception caught in displayFit')
pyfalog.critical(e)
self.parent.fitView.fitSelection = selection
self.parent.fitView.update(list) |
def test_folding(workspace):
doc = Document(DOC_URI, workspace, DOC)
ranges = pylsp_folding_range(doc)
expected = [{'startLine': 1, 'endLine': 6}, {'startLine': 2, 'endLine': 3}, {'startLine': 5, 'endLine': 6}, {'startLine': 8, 'endLine': 11}, {'startLine': 12, 'endLine': 20}, {'startLine': 13, 'endLine': 14}, {'startLine': 15, 'endLine': 16}, {'startLine': 17, 'endLine': 18}, {'startLine': 19, 'endLine': 20}, {'startLine': 22, 'endLine': 35}, {'startLine': 23, 'endLine': 35}, {'startLine': 24, 'endLine': 25}, {'startLine': 27, 'endLine': 29}, {'startLine': 28, 'endLine': 29}, {'startLine': 30, 'endLine': 31}, {'startLine': 32, 'endLine': 34}, {'startLine': 33, 'endLine': 34}, {'startLine': 38, 'endLine': 39}, {'startLine': 41, 'endLine': 43}, {'startLine': 42, 'endLine': 43}, {'startLine': 45, 'endLine': 54}, {'startLine': 47, 'endLine': 51}, {'startLine': 49, 'endLine': 51}, {'startLine': 50, 'endLine': 51}, {'startLine': 52, 'endLine': 54}, {'startLine': 53, 'endLine': 54}, {'startLine': 56, 'endLine': 57}, {'startLine': 59, 'endLine': 65}, {'startLine': 60, 'endLine': 61}, {'startLine': 62, 'endLine': 63}, {'startLine': 64, 'endLine': 65}, {'startLine': 67, 'endLine': 68}]
if (sys.version_info[:2] >= (3, 9)):
expected.insert(4, {'startLine': 9, 'endLine': 10})
assert (ranges == expected) |
def create(config_file: str) -> TrackerBase:
config = _read_config(config_file)
if (('protocol' not in config) or ('root_path' not in config)):
raise Exception(f"Please specify 'protocol' and 'root_path' in {config_file}")
protocol = config['protocol']
del config['protocol']
root = config['root_path']
del config['root_path']
fs = fsspec.filesystem(protocol, **config)
tracker = FsspecTracker(fs, root)
return tracker |
class MJVGEOM(Structure):
_fields_ = [('type', c_int), ('dataid', c_int), ('objtype', c_int), ('objid', c_int), ('category', c_int), ('texid', c_int), ('texuniform', c_int), ('texrepeat', (c_float * 2)), ('size', (c_float * 3)), ('pos', (c_float * 3)), ('mat', (c_float * 9)), ('rgba', (c_float * 4)), ('emission', c_float), ('specular', c_float), ('shininess', c_float), ('reflectance', c_float), ('label', (c_char * 100)), ('camdist', c_float), ('rbound', c_float), ('transparent', c_ubyte)] |
_HEADS_REGISTRY.register()
class EmbeddingHead(nn.Module):
def __init__(self, cfg):
super().__init__()
feat_dim = cfg.MODEL.BACKBONE.FEAT_DIM
embedding_dim = cfg.MODEL.HEADS.EMBEDDING_DIM
num_classes = cfg.MODEL.HEADS.NUM_CLASSES
neck_feat = cfg.MODEL.HEADS.NECK_FEAT
pool_type = cfg.MODEL.HEADS.POOL_LAYER
cls_type = cfg.MODEL.HEADS.CLS_LAYER
with_bnneck = cfg.MODEL.HEADS.WITH_BNNECK
norm_type = cfg.MODEL.HEADS.NORM
if (pool_type == 'fastavgpool'):
self.pool_layer = FastGlobalAvgPool2d()
elif (pool_type == 'avgpool'):
self.pool_layer = nn.AdaptiveAvgPool2d(1)
elif (pool_type == 'maxpool'):
self.pool_layer = nn.AdaptiveMaxPool2d(1)
elif (pool_type == 'gempoolP'):
self.pool_layer = GeneralizedMeanPoolingP()
elif (pool_type == 'gempool'):
self.pool_layer = GeneralizedMeanPooling()
elif (pool_type == 'avgmaxpool'):
self.pool_layer = AdaptiveAvgMaxPool2d()
elif (pool_type == 'clipavgpool'):
self.pool_layer = ClipGlobalAvgPool2d()
elif (pool_type == 'identity'):
self.pool_layer = nn.Identity()
elif (pool_type == 'flatten'):
self.pool_layer = Flatten()
else:
raise KeyError(f'{pool_type} is not supported!')
self.neck_feat = neck_feat
self.mask_layer = nn.Linear(feat_dim, 1, bias=False)
bottleneck = []
if (embedding_dim > 0):
bottleneck.append(nn.Conv2d(feat_dim, embedding_dim, 1, 1, bias=False))
feat_dim = embedding_dim
if with_bnneck:
bottleneck.append(get_norm(norm_type, feat_dim, bias_freeze=True))
self.bottleneck = nn.Sequential(*bottleneck)
if (cls_type == 'linear'):
self.classifier = nn.Linear(feat_dim, num_classes, bias=False)
elif (cls_type == 'arcSoftmax'):
self.classifier = ArcSoftmax(cfg, feat_dim, num_classes)
elif (cls_type == 'circleSoftmax'):
self.classifier = CircleSoftmax(cfg, feat_dim, num_classes)
elif (cls_type == 'cosSoftmax'):
self.classifier = CosSoftmax(cfg, feat_dim, num_classes)
else:
raise KeyError(f'{cls_type} is not supported!')
self.bottleneck.apply(weights_init_kaiming)
self.classifier.apply(weights_init_classifier)
def forward(self, features, targets=None):
global_feat = self.pool_layer(features)
bn_feat = self.bottleneck(global_feat)
bn_feat = bn_feat[(..., 0, 0)]
if (not self.training):
return bn_feat
if (self.classifier.__class__.__name__ == 'Linear'):
cls_outputs = self.classifier(bn_feat)
pred_class_logits = F.linear(bn_feat, self.classifier.weight)
else:
if (len(targets) > 56):
cls_outputs = self.classifier(bn_feat, targets)
else:
cls_outputs = []
pred_class_logits = (self.classifier.s * F.linear(F.normalize(bn_feat), F.normalize(self.classifier.weight)))
if (self.neck_feat == 'before'):
feat = global_feat[(..., 0, 0)]
elif (self.neck_feat == 'after'):
feat = bn_feat
else:
raise KeyError(f'{self.neck_feat} is invalid for MODEL.HEADS.NECK_FEAT')
return {'cls_outputs': cls_outputs, 'pred_class_logits': pred_class_logits, 'features': feat} |
_series_method
def logit(s: 'Series', error: str='warn') -> 'Series':
import numpy as np
import scipy
s = s.copy()
outside_support = ((s <= 0) | (s >= 1))
if outside_support.any():
msg = f'{outside_support.sum()} value(s) are outside of (0, 1)'
if (error.lower() == 'warn'):
warnings.warn(msg, RuntimeWarning)
if (error.lower() == 'raise'):
raise RuntimeError(msg)
else:
pass
s[outside_support] = np.nan
return scipy.special.logit(s) |
def _migrate_v7(json_dict: dict) -> dict:
renamed_items = {'3HP Life Capsule': 'Small Life Capsule', '4HP Life Capsule': 'Medium Life Capsule', '5HP Life Capsule': 'Large Life Capsule', 'Missile Expansion (24)': 'Large Missile Expansion'}
for game in json_dict['game_modifications']:
if (game['game'] != 'cave_story'):
continue
for (world, locations) in game['locations'].items():
game['locations'][world] = {k: renamed_items.get(v, v) for (k, v) in locations.items()}
game['starting_items']['Missiles'] = 5
return json_dict |
(scope='session')
def unicode_images():
parent_bytes = layer_bytes_for_contents(b'parent contents')
image_bytes = layer_bytes_for_contents(b'some contents')
return [Image(id='parentid', bytes=parent_bytes, parent_id=None), Image(id='someid', bytes=image_bytes, parent_id='parentid', config={'comment': 'the PaweA\x82 KamiA\x84ski image', 'author': 'Some guy'})] |
class Sampler(torch.utils.data.sampler.Sampler):
def __init__(self, opt, image_dict, image_list, **kwargs):
self.pars = opt
self.image_dict = image_dict
self.image_list = image_list
self.classes = list(self.image_dict.keys())
self.batch_size = opt.bs
self.samples_per_class = opt.samples_per_class
self.sampler_length = (len(image_list) // opt.bs)
assert ((self.batch_size % self.samples_per_class) == 0), '#Samples per class must divide batchsize!'
self.name = 'class_random_sampler'
self.requires_storage = False
def __iter__(self):
for _ in range(self.sampler_length):
subset = []
draws = (self.batch_size // self.samples_per_class)
for _ in range(draws):
class_key = random.choice(self.classes)
class_ix_list = [random.choice(self.image_dict[class_key])[(- 1)] for _ in range(self.samples_per_class)]
subset.extend(class_ix_list)
(yield subset)
def __len__(self):
return self.sampler_length |
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument('root', metavar='DIR', help='root directory containing flac files to index')
parser.add_argument('--valid-percent', default=0.01, type=float, metavar='D', help='percentage of data to use as validation set (between 0 and 1)')
parser.add_argument('--dest', default='.', type=str, metavar='DIR', help='output directory')
parser.add_argument('--ext', default='flac', type=str, metavar='EXT', help='extension to look for')
parser.add_argument('--seed', default=42, type=int, metavar='N', help='random seed')
parser.add_argument('--path-must-contain', default=None, type=str, metavar='FRAG', help='if set, path must contain this substring for a file to be included in the manifest')
return parser |
def configure_output() -> None:
rich.reconfigure(force_terminal=True, no_color=getattr(args, 'no_color', False), highlight=False, theme=rich.theme.Theme({'logging.level.debug': 'green', 'logging.level.info': 'blue', 'logging.level.warning': 'yellow', 'logging.level.error': 'red', 'logging.level.critical': 'reverse red'}))
logging.config.dictConfig({'disable_existing_loggers': False, 'version': 1, 'handlers': {'console': {'class': 'rich.logging.RichHandler', 'show_time': False, 'show_path': False, 'highlighter': rich.highlighter.NullHighlighter()}}, 'root': {'handlers': ['console']}}) |
class Definition():
def __init__(self, definition: (Sequence[(Argument | Option)] | None)=None) -> None:
self._arguments: dict[(str, Argument)] = {}
self._required_count = 0
self._has_list_argument = False
self._has_optional = False
self._options: dict[(str, Option)] = {}
self._shortcuts: dict[(str, str)] = {}
self.set_definition((definition or []))
def arguments(self) -> list[Argument]:
return list(self._arguments.values())
def argument_count(self) -> int:
if self._has_list_argument:
return sys.maxsize
return len(self._arguments)
def required_argument_count(self) -> int:
return self._required_count
def argument_defaults(self) -> dict[(str, Any)]:
values = {}
for argument in self._arguments.values():
values[argument.name] = argument.default
return values
def options(self) -> list[Option]:
return list(self._options.values())
def option_defaults(self) -> dict[(str, Any)]:
return {o.name: o.default for o in self._options.values()}
def set_definition(self, definition: Sequence[(Argument | Option)]) -> None:
arguments = []
options = []
for item in definition:
if isinstance(item, Option):
options.append(item)
else:
arguments.append(item)
self.set_arguments(arguments)
self.set_options(options)
def set_arguments(self, arguments: list[Argument]) -> None:
self._arguments = {}
self._required_count = 0
self._has_list_argument = False
self._has_optional = False
self.add_arguments(arguments)
def add_arguments(self, arguments: list[Argument]) -> None:
for argument in arguments:
self.add_argument(argument)
def add_argument(self, argument: Argument) -> None:
if (argument.name in self._arguments):
raise CleoLogicError(f'An argument with name "{argument.name}" already exists')
if self._has_list_argument:
raise CleoLogicError('Cannot add an argument after a list argument')
if (argument.is_required() and self._has_optional):
raise CleoLogicError('Cannot add a required argument after an optional one')
if argument.is_list():
self._has_list_argument = True
if argument.is_required():
self._required_count += 1
else:
self._has_optional = True
self._arguments[argument.name] = argument
def argument(self, name: (str | int)) -> Argument:
if (not self.has_argument(name)):
raise ValueError(f'The "{name}" argument does not exist')
if isinstance(name, int):
arguments = list(self._arguments.values())
return arguments[name]
return self._arguments[name]
def has_argument(self, name: (str | int)) -> bool:
if isinstance(name, int):
return (abs((name + (name < 0))) < len(self._arguments))
return (name in self._arguments)
def set_options(self, options: list[Option]) -> None:
self._options = {}
self._shortcuts = {}
self.add_options(options)
def add_options(self, options: list[Option]) -> None:
for option in options:
self.add_option(option)
def add_option(self, option: Option) -> None:
if ((option.name in self._options) and (option != self._options[option.name])):
raise CleoLogicError(f'An option named "{option.name}" already exists')
if option.shortcut:
for shortcut in option.shortcut.split('|'):
if ((shortcut in self._shortcuts) and (option.name != self._shortcuts[shortcut])):
raise CleoLogicError(f'An option with shortcut "{shortcut}" already exists')
self._options[option.name] = option
if option.shortcut:
for shortcut in option.shortcut.split('|'):
self._shortcuts[shortcut] = option.name
def option(self, name: str) -> Option:
if (not self.has_option(name)):
raise ValueError(f'The option "--{name}" option does not exist')
return self._options[name]
def has_option(self, name: str) -> bool:
return (name in self._options)
def has_shortcut(self, shortcut: str) -> bool:
return (shortcut in self._shortcuts)
def option_for_shortcut(self, shortcut: str) -> Option:
return self._options[self.shortcut_to_name(shortcut)]
def shortcut_to_name(self, shortcut: str) -> str:
if (shortcut not in self._shortcuts):
raise ValueError(f'The "-{shortcut}" option does not exist')
return self._shortcuts[shortcut]
def synopsis(self, short: bool=False) -> str:
elements = []
if (short and self._options):
elements.append('[options]')
elif (not short):
for option in self._options.values():
value = ''
if option.accepts_value():
formatted = (option.name.upper() if option.requires_value() else f'[{option.name.upper()}]')
value = f' {formatted}'
shortcut = ''
if option.shortcut:
shortcut = f'-{option.shortcut}|'
elements.append(f'[{shortcut}--{option.name}{value}]')
if (elements and self._arguments):
elements.append('[--]')
tail = ''
for argument in self._arguments.values():
element = f'<{argument.name}>'
if argument.is_list():
element += '...'
if (not argument.is_required()):
element = ('[' + element)
tail += ']'
elements.append(element)
return (' '.join(elements) + tail) |
def run_step(context):
logger.debug('started')
context.assert_key_has_value(key='add', caller=__name__)
step_input = context.get_formatted('add')
assert_key_is_truthy(obj=step_input, key='set', caller=__name__, parent='add')
assert_key_exists(obj=step_input, key='addMe', caller=__name__, parent='add')
the_set = step_input['set']
add_me = step_input['addMe']
is_update = step_input.get('unpack', None)
if (is_update is None):
is_update = isinstance(add_me, list)
if isinstance(the_set, str):
existing_set = context.get(the_set, None)
if existing_set:
add_or_update_set(existing_set, add_me, is_update)
else:
context[the_set] = (set(add_me) if is_update else {add_me})
else:
add_or_update_set(the_set, add_me, is_update)
logger.debug('done') |
class TestImageFolder():
def test_init_ok(self, tmpdir):
tmpdir.mkdir('train')
tmpdir.mkdir('test')
train_dataset = ImageFolder(tmpdir, split='train')
test_dataset = ImageFolder(tmpdir, split='test')
assert (len(train_dataset) == 0)
assert (len(test_dataset) == 0)
def test_count_ok(self, tmpdir):
tmpdir.mkdir('train')
((tmpdir / 'train') / 'img1.jpg').write('')
((tmpdir / 'train') / 'img2.jpg').write('')
((tmpdir / 'train') / 'img3.jpg').write('')
train_dataset = ImageFolder(tmpdir, split='train')
assert (len(train_dataset) == 3)
def test_invalid_dir(self, tmpdir):
with pytest.raises(RuntimeError):
ImageFolder(tmpdir)
def test_load(self, tmpdir):
tmpdir.mkdir('train')
save_fake_image(((tmpdir / 'train') / 'img0.jpeg').strpath)
train_dataset = ImageFolder(tmpdir, split='train')
assert isinstance(train_dataset[0], Image.Image)
def test_load_transforms(self, tmpdir):
tmpdir.mkdir('train')
save_fake_image(((tmpdir / 'train') / 'img0.jpeg').strpath)
transform = transforms.Compose([transforms.CenterCrop((128, 128)), transforms.ToTensor()])
train_dataset = ImageFolder(tmpdir, split='train', transform=transform)
assert isinstance(train_dataset[0], torch.Tensor)
assert (train_dataset[0].size() == (3, 128, 128)) |
class TestVariable(QiskitOptimizationTestCase):
def test_init(self):
quadratic_program = QuadraticProgram()
name = 'variable'
lowerbound = 0
upperbound = 10
vartype = Variable.Type.INTEGER
variable = Variable(quadratic_program, name, lowerbound, upperbound, vartype)
self.assertEqual(variable.name, name)
self.assertEqual(variable.lowerbound, lowerbound)
self.assertEqual(variable.upperbound, upperbound)
self.assertEqual(variable.vartype, Variable.Type.INTEGER)
def test_init_default(self):
quadratic_program = QuadraticProgram()
name = 'variable'
variable = Variable(quadratic_program, name)
self.assertEqual(variable.name, name)
self.assertEqual(variable.lowerbound, 0)
self.assertEqual(variable.upperbound, INFINITY)
self.assertEqual(variable.vartype, Variable.Type.CONTINUOUS) |
class TestInlineQueryResultCachedAudioBase():
id_ = 'id'
type_ = 'audio'
audio_file_id = 'audio file id'
caption = 'caption'
parse_mode = 'HTML'
caption_entities = [MessageEntity(MessageEntity.ITALIC, 0, 7)]
input_message_content = InputTextMessageContent('input_message_content')
reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton('reply_markup')]]) |
class Scenario(ScenarioGenerator):
def __init__(self):
super().__init__()
def road(self, **kwargs):
road = xodr.create_road([xodr.Line(1000)], 0, 2, 2)
odr = xodr.OpenDrive('myroad')
odr.add_road(road)
odr.adjust_roads_and_lanes()
guardrail = xodr.Object(0, 0, height=0.3, zOffset=0.4, Type=xodr.ObjectType.barrier, name='guardRail')
road.add_object_roadside(guardrail, 0, 0, tOffset=0.8)
delineator = xodr.Object(0, 0, height=1, zOffset=0, Type=xodr.ObjectType.pole, name='delineator')
road.add_object_roadside(delineator, 50, sOffset=25, tOffset=0.85)
emergencyCallbox = xodr.Object(30, (- 6), Type=xodr.ObjectType.pole, name='emergencyCallBox')
road.add_object(emergencyCallbox)
jerseyBarrier = xodr.Object(0, 0, height=0.75, zOffset=0, Type=xodr.ObjectType.barrier, name='jerseyBarrier')
jerseyBarrier.repeat(repeatLength=25, repeatDistance=0, sStart=240)
road.add_object(jerseyBarrier)
return odr |
def startredir(redirport, target, port):
dsz.control.echo.Off()
cmd = ('redirect -tcp -lplisten %s -target %s %s' % (redirport, target, port))
dsz.control.echo.On()
(succ, redircmdid) = dsz.cmd.RunEx(cmd, dsz.RUN_FLAG_RECORD)
if (not succ):
dsz.ui.Echo(('Failed: redirect -tcp -lplisten %s -target %s %s' % (redirport, target, port)), dsz.ERROR)
return 0
return redircmdid |
def test_doc_inherit():
chars = tuple((string.ascii_letters + string.digits))
random = np.random.default_rng(seed=42)
doc = ''.join(random.choice(chars, 1000))
def func_a():
...
func_a.__doc__ = doc
_inherit(func_a)
def func_b():
...
_inherit(doc)
def func_c():
...
assert (doc == func_a.__doc__ == func_b.__doc__ == func_c.__doc__)
with pytest.warns(UserWarning):
_inherit(doc, warn_class=True)
class A():
pass
with warnings.catch_warnings():
warnings.simplefilter('error')
_inherit(doc, warn_class=False)
class A():
pass |
def override_module_args(args: Namespace) -> Tuple[(List[str], List[str])]:
overrides = []
deletes = []
if (args is not None):
assert (hasattr(args, 'task') and hasattr(args, 'criterion') and hasattr(args, 'optimizer') and hasattr(args, 'lr_scheduler'))
if (args.task in TASK_DATACLASS_REGISTRY):
overrides.append('task={}'.format(args.task))
overrides.append('task._name={}'.format(args.task))
overrides.extend(_override_attr('task', TASK_DATACLASS_REGISTRY[args.task], args))
else:
deletes.append('task')
if (args.criterion in CRITERION_DATACLASS_REGISTRY):
overrides.append('criterion={}'.format(args.criterion))
overrides.append('criterion._name={}'.format(args.criterion))
overrides.extend(_override_attr('criterion', CRITERION_DATACLASS_REGISTRY[args.criterion], args))
else:
deletes.append('criterion')
if (args.optimizer in OPTIMIZER_DATACLASS_REGISTRY):
overrides.append('optimizer={}'.format(args.optimizer))
overrides.append('optimizer._name={}'.format(args.optimizer))
overrides.extend(_override_attr('optimizer', OPTIMIZER_DATACLASS_REGISTRY[args.optimizer], args))
else:
deletes.append('optimizer')
if (args.lr_scheduler in LR_SCHEDULER_DATACLASS_REGISTRY):
overrides.append('lr_scheduler={}'.format(args.lr_scheduler))
overrides.append('lr_scheduler._name={}'.format(args.lr_scheduler))
overrides.extend(_override_attr('lr_scheduler', LR_SCHEDULER_DATACLASS_REGISTRY[args.lr_scheduler], args))
else:
deletes.append('lr_scheduler')
no_dc = True
if hasattr(args, 'arch'):
if (args.arch in ARCH_MODEL_REGISTRY):
m_cls = ARCH_MODEL_REGISTRY[args.arch]
dc = getattr(m_cls, '__dataclass', None)
if (dc is not None):
overrides.append('model={}'.format(args.arch))
overrides.append('model._name={}'.format(args.arch))
overrides.extend(_override_attr('model', dc, args))
no_dc = False
if no_dc:
deletes.append('model')
return (overrides, deletes) |
(context_settings={'help_option_names': ['-h', '--help'], 'max_content_width': 120}, invoke_without_command=True)
('--env', '-e', 'env_name', envvar=AppEnvVars.ENV, default='default', help='The name of the environment to use [env var: `HATCH_ENV`]')
('--project', '-p', envvar=ConfigEnvVars.PROJECT, help='The name of the project to work on [env var: `HATCH_PROJECT`]')
('--verbose', '-v', envvar=AppEnvVars.VERBOSE, count=True, help='Increase verbosity (can be used additively) [env var: `HATCH_VERBOSE`]')
('--quiet', '-q', envvar=AppEnvVars.QUIET, count=True, help='Decrease verbosity (can be used additively) [env var: `HATCH_QUIET`]')
('--color/--no-color', default=None, help='Whether or not to display colored output (default is auto-detection) [env vars: `FORCE_COLOR`/`NO_COLOR`]')
('--interactive/--no-interactive', envvar=AppEnvVars.INTERACTIVE, default=None, help='Whether or not to allow features like prompts and progress bars (default is auto-detection) [env var: `HATCH_INTERACTIVE`]')
('--data-dir', envvar=ConfigEnvVars.DATA, help='The path to a custom directory used to persist data [env var: `HATCH_DATA_DIR`]')
('--cache-dir', envvar=ConfigEnvVars.CACHE, help='The path to a custom directory used to cache data [env var: `HATCH_CACHE_DIR`]')
('--config', 'config_file', envvar=ConfigEnvVars.CONFIG, help='The path to a custom config file to use [env var: `HATCH_CONFIG`]')
_option(version=__version__, prog_name='Hatch')
_context
def hatch(ctx: click.Context, env_name, project, verbose, quiet, color, interactive, data_dir, cache_dir, config_file):
if (color is None):
if (os.environ.get(AppEnvVars.NO_COLOR) == '1'):
color = False
elif (os.environ.get(AppEnvVars.FORCE_COLOR) == '1'):
color = True
if ((interactive is None) and running_in_ci()):
interactive = False
app = Application(ctx.exit, verbosity=(verbose - quiet), enable_color=color, interactive=interactive)
app.env_active = os.environ.get(AppEnvVars.ENV_ACTIVE)
if (app.env_active and ((param_source := ctx.get_parameter_source('env_name')) is not None) and (param_source.name == 'DEFAULT')):
app.env = app.env_active
else:
app.env = env_name
if config_file:
app.config_file.path = Path(config_file).resolve()
if (not app.config_file.path.is_file()):
app.abort(f'The selected config file `{app.config_file.path}` does not exist.')
elif (not app.config_file.path.is_file()):
if app.verbose:
app.display_waiting('No config file found, creating one with default settings now...')
try:
app.config_file.restore()
if app.verbose:
app.display_success('Success! Please see `hatch config`.')
except OSError:
app.abort(f'Unable to create config file located at `{app.config_file.path}`. Please check your permissions.')
if (not ctx.invoked_subcommand):
app.display_info(ctx.get_help())
return
ctx.obj = app
try:
app.config_file.load()
except OSError as e:
app.abort(f'Error loading configuration: {e}')
app.config.terminal.styles.parse_fields()
errors = app.initialize_styles(app.config.terminal.styles.raw_data)
if (errors and (color is not False) and (not app.quiet)):
for error in errors:
app.display_warning(error)
app.data_dir = Path((data_dir or app.config.dirs.data)).expand()
app.cache_dir = Path((cache_dir or app.config.dirs.cache)).expand()
if project:
app.project = Project.from_config(app.config, project)
if ((app.project is None) or (app.project.root is None)):
app.abort(f'Unable to locate project {project}')
return
app.project = Project(Path.cwd())
if (app.config.mode == 'local'):
return
if (app.config.mode == 'project'):
if (not app.config.project):
app.display_warning('Mode is set to `project` but no project is set, defaulting to the current directory')
return
possible_project = Project.from_config(app.config, app.config.project)
if (possible_project is None):
app.display_warning(f'Unable to locate project {app.config.project}, defaulting to the current directory')
else:
app.project = possible_project
return
if ((app.config.mode == 'aware') and (app.project.root is None)):
if (not app.config.project):
app.display_warning('Mode is set to `aware` but no project is set, defaulting to the current directory')
return
possible_project = Project.from_config(app.config, app.config.project)
if (possible_project is None):
app.display_warning(f'Unable to locate project {app.config.project}, defaulting to the current directory')
else:
app.project = possible_project
return |
def main():
global logger
args = get_args()
args = set_seed_logger(args)
(device, n_gpu) = init_device(args, args.local_rank)
tokenizer = ClipTokenizer()
assert (args.task_type == 'retrieval')
model = init_model(args, device, n_gpu, args.local_rank)
assert ((args.freeze_layer_num <= 12) and (args.freeze_layer_num >= (- 1)))
if (hasattr(model, 'clip') and (args.freeze_layer_num > (- 1))):
for (name, param) in model.clip.named_parameters():
if ((name.find('ln_final.') == 0) or (name.find('text_projection') == 0) or (name.find('logit_scale') == 0) or (name.find('visual.ln_post.') == 0) or (name.find('visual.proj') == 0)):
continue
elif ((name.find('visual.transformer.resblocks.') == 0) or (name.find('transformer.resblocks.') == 0)):
layer_num = int(name.split('.resblocks.')[1].split('.')[0])
if (layer_num >= args.freeze_layer_num):
continue
if ((args.linear_patch == '3d') and name.find('conv2.')):
continue
else:
param.requires_grad = False
assert (args.datatype in DATALOADER_DICT)
assert ((DATALOADER_DICT[args.datatype]['test'] is not None) or (DATALOADER_DICT[args.datatype]['val'] is not None))
(test_dataloader, test_length) = (None, 0)
if (DATALOADER_DICT[args.datatype]['test'] is not None):
(test_dataloader, test_length) = DATALOADER_DICT[args.datatype]['test'](args, tokenizer)
if (DATALOADER_DICT[args.datatype]['val'] is not None):
(val_dataloader, val_length) = DATALOADER_DICT[args.datatype]['val'](args, tokenizer, subset='val')
else:
(val_dataloader, val_length) = (test_dataloader, test_length)
if (test_dataloader is None):
(test_dataloader, test_length) = (val_dataloader, val_length)
if (args.local_rank == 0):
logger.info('***** Running test *****')
logger.info(' Num examples = %d', test_length)
logger.info(' Batch size = %d', args.batch_size_val)
logger.info(' Num steps = %d', len(test_dataloader))
logger.info('***** Running val *****')
logger.info(' Num examples = %d', val_length)
if args.do_train:
(train_dataloader, train_length, train_sampler) = DATALOADER_DICT[args.datatype]['train'](args, tokenizer)
num_train_optimization_steps = ((int(((len(train_dataloader) + args.gradient_accumulation_steps) - 1)) / args.gradient_accumulation_steps) * args.epochs)
coef_lr = args.coef_lr
(optimizer, scheduler, model) = prep_optimizer(args, model, num_train_optimization_steps, device, n_gpu, args.local_rank, coef_lr=coef_lr)
if (args.local_rank == 0):
logger.info('***** Running training *****')
logger.info(' Num examples = %d', train_length)
logger.info(' Batch size = %d', args.batch_size)
logger.info(' Num steps = %d', (num_train_optimization_steps * args.gradient_accumulation_steps))
best_score = 1e-05
best_output_model_file = 'None'
resumed_epoch = 0
if args.resume_model:
checkpoint = torch.load(args.resume_model, map_location='cpu')
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
resumed_epoch = (checkpoint['epoch'] + 1)
resumed_loss = checkpoint['loss']
global_step = 0
for epoch in range(resumed_epoch, args.epochs):
train_sampler.set_epoch(epoch)
(tr_loss, global_step) = train_epoch(epoch, args, model, train_dataloader, device, n_gpu, optimizer, scheduler, global_step, local_rank=args.local_rank)
if (args.local_rank == 0):
logger.info('Epoch %d/%s Finished, Train Loss: %f', (epoch + 1), args.epochs, tr_loss)
output_model_file = save_model(epoch, args, model, optimizer, tr_loss, type_name='')
R1 = eval_epoch(args, model, test_dataloader, device, n_gpu)
if (best_score <= R1):
best_score = R1
best_output_model_file = output_model_file
logger.info('The best model is: {}, the R1 is: {:.4f}'.format(best_output_model_file, best_score))
elif args.do_eval:
if (args.local_rank == 0):
eval_epoch(args, model, test_dataloader, device, n_gpu) |
class Buhne():
def __init__(self):
self._core = CoreStage()
self._core.facade = self
self._core.sprite_facade_class = Figur
def fuge_eine_figur_hinzu(self, costume='default'):
return self._core.pystage_createsprite(costume=costume)
def abspielen(self):
self._core.pystage_play()
def erzeuge_klon_von(self, sprite='_myself_'):
return self._core.control_create_clone_of(sprite)
def stoppe_alles(self):
return self._core.control_stop_all()
def stoppe_andere_skripte_der_figur(self):
return self._core.control_stop_other()
def stoppe_dieses_skript(self):
return self._core.control_stop_this()
def warte_sekunden(self, secs):
return self._core.control_wait(secs)
def andere_um(self, name, value):
return self._core.data_changevariableby(name, value)
def verstecke_variable(self, name):
return self._core.data_hidevariable(name)
def setze_auf(self, name, value):
return self._core.data_setvariableto(name, value)
def zeige_variable(self, name):
return self._core.data_showvariable(name)
def data_variable(self, name):
return self._core.data_variable(name)
def sende_an_alle(self, message):
return self._core.event_broadcast(message)
def sende_an_alle_und_warte(self, message):
return self._core.event_broadcastandwait(message)
def wenn_das_buhnenbild_zu_wechselt(self, backdrop, generator_function, name='', no_refresh=False):
return self._core.event_whenbackdropswitchesto(backdrop, generator_function, name, no_refresh)
def wenn_ich_empfange(self, message, generator_function, name='', no_refresh=False):
return self._core.event_whenbroadcastreceived(message, generator_function, name, no_refresh)
def wenn_GREENFLAG_angeklickt_wird(self, generator_function, name='', no_refresh=False):
return self._core.event_whenflagclicked(generator_function, name, no_refresh)
def wenn_lautstarke_GREATERTHAN(self, value, generator_function, name='', no_refresh=False):
return self._core.event_whengreaterthan_loudness(value, generator_function, name, no_refresh)
def wenn_stoppuhr_GREATERTHAN(self, value, generator_function, name='', no_refresh=False):
return self._core.event_whengreaterthan_timer(value, generator_function, name, no_refresh)
def wenn_taste_gedruckt_wird(self, key, generator_function, name='', no_refresh=False):
return self._core.event_whenkeypressed(key, generator_function, name, no_refresh)
def wenn_diese_figur_angeklickt_wird(self, generator_function, name='', no_refresh=False):
return self._core.event_whenthisspriteclicked(generator_function, name, no_refresh)
def buhnenbild_name(self):
return self._core.looks_backdropnumbername_name()
def buhnenbild_nummer(self):
return self._core.looks_backdropnumbername_number()
def andere_effekt_helligkeit_um(self, value):
return self._core.looks_changeeffectby_brightness(value)
def andere_effekt_farbe_um(self, value):
return self._core.looks_changeeffectby_color(value)
def andere_effekt_fischauge_um(self, value):
return self._core.looks_changeeffectby_fisheye(value)
def andere_effekt_durchsichtigkeit_um(self, value):
return self._core.looks_changeeffectby_ghost(value)
def andere_effekt_mosaik_um(self, value):
return self._core.looks_changeeffectby_mosaic(value)
def andere_effekt_pixel_um(self, value):
return self._core.looks_changeeffectby_pixelate(value)
def andere_effekt_wirbel_um(self, value):
return self._core.looks_changeeffectby_whirl(value)
def schalte_grafikeffekte_aus(self):
return self._core.looks_cleargraphiceffects()
def nachstes_buhnenbild(self):
return self._core.looks_nextbackdrop()
def setze_effekt_helligkeit_auf(self, value):
return self._core.looks_seteffectto_brightness(value)
def setze_effekt_farbe_auf(self, value):
return self._core.looks_seteffectto_color(value)
def setze_effekt_fischauge_auf(self, value):
return self._core.looks_seteffectto_fisheye(value)
def setze_effekt_durchsichtigkeit_auf(self, value):
return self._core.looks_seteffectto_ghost(value)
def setze_effekt_mosaik_auf(self, value):
return self._core.looks_seteffectto_mosaic(value)
def setze_effekt_pixel_auf(self, value):
return self._core.looks_seteffectto_pixelate(value)
def setze_effekt_wirbel_auf(self, value):
return self._core.looks_seteffectto_whirl(value)
def wechsle_zu_buhnenbild(self, backdrop):
return self._core.looks_switchbackdropto(backdrop)
def wechsle_zu_buhnenbild_und_warte(self, backdrop):
return self._core.looks_switchbackdroptoandwait(backdrop)
def von(self, operator, number):
return self._core.operator_mathop(operator, number)
def zufallszahl_von_bis(self, start, end):
return self._core.operator_random(start, end)
def pystage_addbackdrop(self, name, center_x=None, center_y=None):
return self._core.pystage_addbackdrop(name, center_x, center_y)
def pystage_addsound(self, name):
return self._core.pystage_addsound(name)
def pystage_createsprite(self, costume='default'):
return self._core.pystage_createsprite(costume)
def pystage_insertbackdrop(self, index, name, center_x=None, center_y=None):
return self._core.pystage_insertbackdrop(index, name, center_x, center_y)
def pystage_makevariable(self, name, all_sprites=True):
return self._core.pystage_makevariable(name, all_sprites)
def pystage_play(self):
return self._core.pystage_play()
def pystage_replacebackdrop(self, index, name, center_x=None, center_y=None):
return self._core.pystage_replacebackdrop(index, name, center_x, center_y)
def pystage_setmonitorposition(self, name, x, y):
return self._core.pystage_setmonitorposition(name, x, y)
def pystage_setmonitorstyle_large(self, name):
return self._core.pystage_setmonitorstyle_large(name)
def pystage_setmonitorstyle_normal(self, name):
return self._core.pystage_setmonitorstyle_normal(name)
def pystage_setmonitorstyle_slider(self, name):
return self._core.pystage_setmonitorstyle_slider(name)
def antwort(self):
return self._core.sensing_answer()
def frage_und_warte(self, question):
return self._core.sensing_askandwait(question)
def datum_im_moment(self):
return self._core.sensing_current_date()
def wochentag_im_moment(self):
return self._core.sensing_current_dayofweek()
def stunde_im_moment(self):
return self._core.sensing_current_hour()
def minute_im_moment(self):
return self._core.sensing_current_minute()
def monat_im_moment(self):
return self._core.sensing_current_month()
def sekunde_im_moment(self):
return self._core.sensing_current_second()
def jahr_im_moment(self):
return self._core.sensing_current_year()
def tage_seit(self):
return self._core.sensing_dayssince2000()
def taste_gedruckt(self, key):
return self._core.sensing_keypressed(key)
def lautstarke(self):
return self._core.sensing_loudness()
def maustaste_gedruckt(self):
return self._core.sensing_mousedown()
def maus_x_position(self):
return self._core.sensing_mousex()
def maus_y_position(self):
return self._core.sensing_mousey()
def buhnenbildname_von(self, stage='_stage_'):
return self._core.sensing_of_backdropname(stage)
def buhnenbildnummer_von(self, stage='_stage_'):
return self._core.sensing_of_backdropnumber(stage)
def kostumname_von(self, sprite):
return self._core.sensing_of_costumename(sprite)
def kostumnummer_von(self, sprite):
return self._core.sensing_of_costumenumber(sprite)
def richtung_von(self, sprite):
return self._core.sensing_of_direction(sprite)
def groe_von(self, sprite):
return self._core.sensing_of_size(sprite)
def von(self, variable, sprite='_stage_'):
return self._core.sensing_of_variable(variable, sprite)
def lautstarke_von(self, sprite='_stage_'):
return self._core.sensing_of_volume(sprite)
def x_position_von(self, sprite):
return self._core.sensing_of_xposition(sprite)
def y_position_von(self, sprite):
return self._core.sensing_of_yposition(sprite)
def setze_stoppuhr_zuruck(self):
return self._core.sensing_resettimer()
def setze_ziehbarkeit_auf_ziehbar(self):
return self._core.sensing_setdragmode_draggable()
def setze_ziehbarkeit_auf_nicht_ziehbar(self):
return self._core.sensing_setdragmode_notdraggable()
def stoppuhr(self):
return self._core.sensing_timer()
def benutzername(self):
return self._core.sensing_username()
def andere_effekt_aussteuern_links_rechts_um(self, value):
return self._core.sound_changeeffectby_pan(value)
def andere_effekt_hohe_um(self, value):
return self._core.sound_changeeffectby_pitch(value)
def andere_lautstarke_um(self, value):
return self._core.sound_changevolumeby(value)
def schalte_klangeffekte_aus(self):
return self._core.sound_cleareffects()
def spiele_klang(self, name, loop=0):
return self._core.sound_play(name, loop)
def spiele_klang_ganz(self, name):
return self._core.sound_playuntildone(name)
def setze_effekt_aussteuern_links_rechts_auf(self, value):
return self._core.sound_seteffectto_pan(value)
def setze_effekt_hohe_auf(self, value):
return self._core.sound_seteffectto_pitch(value)
def setze_lautstarke_auf(self, value):
return self._core.sound_setvolumeto(value)
def stoppe_alle_klange(self):
return self._core.sound_stopallsounds()
def lautstarke(self):
return self._core.sound_volume() |
class AnswerSerializer(serializers.ModelSerializer):
author = serializers.StringRelatedField()
created_at = serializers.SerializerMethodField()
likes_count = serializers.SerializerMethodField()
user_has_liked_answer = serializers.SerializerMethodField()
question_slug = serializers.SerializerMethodField()
class Meta():
model = Answer
exclude = ['id', 'question', 'voters', 'updated_at']
def get_created_at(self, instance):
return instance.created_at.strftime('%B %d, %Y')
def get_likes_count(self, instance):
return instance.voters.count()
def get_user_has_liked_answer(self, instance):
request = self.context.get('request')
return instance.voters.filter(pk=request.user.pk).exists()
def get_question_slug(self, instance):
return instance.question.slug |
class Migration(migrations.Migration):
dependencies = [('options', '0027_meta')]
operations = [migrations.AlterModelOptions(name='option', options={'ordering': ('uri',), 'verbose_name': 'Option', 'verbose_name_plural': 'Options'}), migrations.RenameField(model_name='optionset', old_name='key', new_name='uri_path'), migrations.AlterField(model_name='optionset', name='uri', field=models.URLField(blank=True, help_text='The Uniform Resource Identifier of this option set (auto-generated).', max_length=800, verbose_name='URI')), migrations.AlterField(model_name='optionset', name='uri_path', field=models.CharField(blank=True, help_text='The path for the URI of this option set.', max_length=512, verbose_name='URI Path'))] |
def iterate_tfrecord_file(data: BufferedIOBase) -> Iterator[memoryview]:
length_bytes = bytearray(8)
crc_bytes = bytearray(4)
data_bytes = bytearray(1024)
while True:
bytes_read = data.readinto(length_bytes)
if (bytes_read == 0):
break
elif (bytes_read != 8):
raise RuntimeError('Invalid tfrecord file: failed to read the record size.')
if (data.readinto(crc_bytes) != 4):
raise RuntimeError('Invalid tfrecord file: failed to read the start token.')
(length,) = struct.unpack('<Q', length_bytes)
if (length > len(data_bytes)):
data_bytes = data_bytes.zfill(int((length * 1.5)))
data_bytes_view = memoryview(data_bytes)[:length]
if (data.readinto(data_bytes_view) != length):
raise RuntimeError('Invalid tfrecord file: failed to read the record.')
if (data.readinto(crc_bytes) != 4):
raise RuntimeError('Invalid tfrecord file: failed to read the end token.')
(yield data_bytes_view) |
class SaveEditorHandler(webBase.BaseHandler):
def post(self):
action = self.get_argument('action', default=None, strip=False)
logging.info(action)
table_name = self.get_argument('table_name', default=None, strip=False)
stockWeb = stock_web_dic.STOCK_WEB_DATA_MAP[table_name]
param_map = {}
for (item, val) in self.request.arguments.items():
item_key = re.search('\\]\\[(.*?)\\]', item)
if item_key:
tmp_1 = item_key.group()
if tmp_1:
tmp_1 = tmp_1.replace('][', '').replace(']', '')
param_map[tmp_1] = val[0].decode('utf-8')
if (action == 'create'):
logging.info('create')
tmp_columns = '`, `'.join(stockWeb.columns)
tmp_values = []
for tmp_key in stockWeb.columns:
tmp_values.append(param_map[tmp_key])
tmp_values2 = "', '".join(tmp_values)
insert_sql = (" INSERT INTO %s (`%s`) VALUES('%s'); " % (stockWeb.table_name, tmp_columns, tmp_values2))
logging.info(insert_sql)
try:
self.db.execute(insert_sql)
except Exception as e:
err = {'error': str(e)}
logging.info(err)
self.write(err)
return
elif (action == 'edit'):
logging.info('edit')
tmp_update = genSql(stockWeb.columns, param_map, ',')
tmp_where = genSql(stockWeb.primary_key, param_map, 'and')
update_sql = (' UPDATE %s SET %s WHERE %s ' % (stockWeb.table_name, tmp_update, tmp_where))
logging.info(update_sql)
try:
self.db.execute(update_sql)
except Exception as e:
err = {'error': str(e)}
logging.info(err)
self.write(err)
return
elif (action == 'remove'):
logging.info('remove')
tmp_where = genSql(stockWeb.primary_key, param_map, 'and')
delete_sql = (' DELETE FROM %s WHERE %s ' % (stockWeb.table_name, tmp_where))
logging.info(delete_sql)
try:
self.db.execute(delete_sql)
except Exception as e:
err = {'error': str(e)}
logging.info(err)
self.write(err)
return
self.write('{"data":[{}]}') |
def NASNet(input_shape=None, penultimate_filters=4032, num_blocks=6, stem_block_filters=96, skip_reduction=True, filter_multiplier=2, include_top=True, weights=None, input_tensor=None, pooling=None, classes=1000, default_size=None, params=PARAM_NONE, **kwargs):
global backend, layers, models, keras_utils
(backend, layers, models, keras_utils) = get_submodules_from_kwargs(kwargs)
if (not ((weights in {'imagenet', None}) or os.path.exists(weights))):
raise ValueError('The `weights` argument should be either `None` (random initialization), `imagenet` (pre-training on ImageNet), or the path to the weights file to be loaded.')
if ((weights == 'imagenet') and include_top and (classes != 1000)):
raise ValueError('If using `weights` as `"imagenet"` with `include_top` as true, `classes` should be 1000')
if (isinstance(input_shape, tuple) and (None in input_shape) and (weights == 'imagenet')):
raise ValueError((('When specifying the input shape of a NASNet and loading `ImageNet` weights, the input_shape argument must be static (no None entries). Got: `input_shape=' + str(input_shape)) + '`.'))
if (default_size is None):
default_size = 331
input_shape = _obtain_input_shape(input_shape, default_size=default_size, min_size=32, data_format=backend.image_data_format(), require_flatten=True, weights=weights)
if (backend.image_data_format() != 'channels_last'):
warnings.warn('The NASNet family of models is only available for the input data format "channels_last" (width, height, channels). However your settings specify the default data format "channels_first" (channels, width, height). You should set `image_data_format="channels_last"` in your Keras config located at ~/.keras/keras.json. The model being returned right now will expect inputs to follow the "channels_last" data format.')
backend.set_image_data_format('channels_last')
old_data_format = 'channels_first'
else:
old_data_format = None
if (input_tensor is None):
img_input = layers.Input(shape=input_shape)
elif (not backend.is_keras_tensor(input_tensor)):
img_input = layers.Input(tensor=input_tensor, shape=input_shape)
else:
img_input = input_tensor
if ((penultimate_filters % (24 * (filter_multiplier ** 2))) != 0):
raise ValueError(('For NASNet-A models, the `penultimate_filters` must be a multiple of 24 * (`filter_multiplier` ** 2). Current value: %d' % penultimate_filters))
channel_dim = (1 if (backend.image_data_format() == 'channels_first') else (- 1))
filters = (penultimate_filters // 24)
x = layers.Conv2D(stem_block_filters, (3, 3), strides=(2, 2), padding='valid', use_bias=False, name='stem_conv1', kernel_initializer='he_normal')(img_input, params=params)
x = layers.BatchNormalization(axis=channel_dim, momentum=0.9997, epsilon=0.001, name='stem_bn1')(x, params=params)
p = None
(x, p) = _reduction_a_cell(x, p, (filters // (filter_multiplier ** 2)), params=params, block_id='stem_1')
(x, p) = _reduction_a_cell(x, p, (filters // filter_multiplier), params=params, block_id='stem_2')
for i in range(num_blocks):
(x, p) = _normal_a_cell(x, p, filters, params=params, block_id=('%d' % i))
(x, p0) = _reduction_a_cell(x, p, (filters * filter_multiplier), params=params, block_id=('reduce_%d' % num_blocks))
p = (p0 if (not skip_reduction) else p)
for i in range(num_blocks):
(x, p) = _normal_a_cell(x, p, (filters * filter_multiplier), params=params, block_id=('%d' % ((num_blocks + i) + 1)))
(x, p0) = _reduction_a_cell(x, p, (filters * (filter_multiplier ** 2)), params=params, block_id=('reduce_%d' % (2 * num_blocks)))
p = (p0 if (not skip_reduction) else p)
for i in range(num_blocks):
(x, p) = _normal_a_cell(x, p, (filters * (filter_multiplier ** 2)), params=params, block_id=('%d' % (((2 * num_blocks) + i) + 1)))
x = layers.Activation('relu')(x)
if include_top:
x = layers.GlobalAveragePooling2D()(x)
x = layers.Dense(classes, activation='softmax', name='predictions')(x, params=params)
elif (pooling == 'avg'):
x = layers.GlobalAveragePooling2D()(x)
elif (pooling == 'max'):
x = layers.GlobalMaxPooling2D()(x)
if (input_tensor is not None):
inputs = keras_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
model = models.Model(inputs, x, name='NASNet')
if (weights == 'imagenet'):
if (default_size == 224):
if include_top:
weights_path = keras_utils.get_file('nasnet_mobile.h5', NASNET_MOBILE_WEIGHT_PATH, cache_subdir='models', file_hash='020fb642bf7360b370c678b08e0adf61')
else:
weights_path = keras_utils.get_file('nasnet_mobile_no_top.h5', NASNET_MOBILE_WEIGHT_PATH_NO_TOP, cache_subdir='models', file_hash='1ed92395b5b598bdda52abe5c0dbfd63')
model.load_weights(weights_path)
elif (default_size == 331):
if include_top:
weights_path = keras_utils.get_file('nasnet_large.h5', NASNET_LARGE_WEIGHT_PATH, cache_subdir='models', file_hash='11577c9a518f0070763c2b964a382f17')
else:
weights_path = keras_utils.get_file('nasnet_large_no_top.h5', NASNET_LARGE_WEIGHT_PATH_NO_TOP, cache_subdir='models', file_hash='d81d89dc07e6e56530c4e77faddd61b5')
model.load_weights(weights_path)
else:
raise ValueError('ImageNet weights can only be loaded with NASNetLarge or NASNetMobile')
elif (weights is not None):
model.load_weights(weights)
if old_data_format:
backend.set_image_data_format(old_data_format)
return (x, model) |
class ProjectCommitDiscussionManager(RetrieveMixin, CreateMixin, RESTManager):
_path = '/projects/{project_id}/repository/commits/{commit_id}/discussions'
_obj_cls = ProjectCommitDiscussion
_from_parent_attrs = {'project_id': 'project_id', 'commit_id': 'id'}
_create_attrs = RequiredOptional(required=('body',), optional=('created_at',))
def get(self, id: Union[(str, int)], lazy: bool=False, **kwargs: Any) -> ProjectCommitDiscussion:
return cast(ProjectCommitDiscussion, super().get(id=id, lazy=lazy, **kwargs)) |
_small_list(immutable=True, attrname='vals', factoryname='_make', unbox_num=True, nonull=True)
class ConsEnv(Env):
_immutable_ = True
_immutable_fields_ = ['_prev']
_attrs_ = ['_prev']
def __init__(self, prev):
assert isinstance(prev, Env)
self._prev = prev
def consenv_get_size(self):
return self._get_size_list()
def make(vals, prev):
if vals:
return ConsEnv._make(vals, prev)
return prev
def make0(prev):
return prev
def make1(w_val, prev):
return ConsEnv._make1(w_val, prev)
def make2(w_val1, w_val2, prev):
return ConsEnv._make2(w_val1, w_val2, prev)
def make_n(n_vals, prev):
if n_vals:
return ConsEnv._make_n(n_vals, prev)
return prev
_safe
def lookup(self, sym, env_structure):
jit.promote(env_structure)
for (i, s) in enumerate(env_structure.elems):
if (s is sym):
v = self._get_list(i)
assert (v is not None)
return v
prev = self.get_prev(env_structure)
return prev.lookup(sym, env_structure.prev)
def get_prev(self, env_structure):
jit.promote(env_structure)
if env_structure.elems:
return self._prev
return self
def __repr__(self):
return ('<%s %r %r>' % (self.__class__.__name__, [x.tostring() for x in self._get_full_list()], self._prev)) |
_fixtures(FieldFixture)
def test_required_constraint(fixture):
selector = 'find me'
required_constraint = RequiredConstraint(dependency_expression=selector)
assert (required_constraint.parameters == selector)
with expected(RequiredConstraint):
required_constraint.validate_input('')
with expected(RequiredConstraint):
required_constraint.validate_input(None)
space = ' '
with expected(RequiredConstraint):
required_constraint.validate_input(space)
with expected(RequiredConstraint):
required_constraint.validate_input((space * 56))
with expected(NoException):
required_constraint.validate_input(' something valid ')
with expected(NoException):
required_constraint.validate_input('something else that_is_valid')
with expected(NoException):
required_constraint.validate_input('.') |
(jax.pmap, axis_name='batch')
def train_step(state, drp_rng, **model_inputs):
def loss_fn(params):
start_labels = model_inputs.pop('start_labels')
end_labels = model_inputs.pop('end_labels')
pooled_labels = model_inputs.pop('pooled_labels')
outputs = state.apply_fn(**model_inputs, params=params, dropout_rng=drp_rng, train=True)
(start_logits, end_logits, pooled_logits) = outputs
return state.loss_fn(start_logits, start_labels, end_logits, end_labels, pooled_logits, pooled_labels)
(drp_rng, new_drp_rng) = jax.random.split(drp_rng)
grad_fn = jax.value_and_grad(loss_fn)
(loss, grads) = grad_fn(state.params)
metrics = jax.lax.pmean({'loss': loss}, axis_name='batch')
grads = jax.lax.pmean(grads, 'batch')
state = state.apply_gradients(grads=grads)
return (state, metrics, new_drp_rng) |
class YieldExpr(Expression):
__slots__ = ('expr',)
__match_args__ = ('expr',)
expr: (Expression | None)
def __init__(self, expr: (Expression | None)) -> None:
super().__init__()
self.expr = expr
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_yield_expr(self) |
def test_upload_generic_package(tmp_path, gitlab_cli, project):
path = (tmp_path / file_name)
path.write_text(file_content)
cmd = ['-v', 'generic-package', 'upload', '--project-id', project.id, '--package-name', package_name, '--path', path, '--package-version', package_version, '--file-name', file_name]
ret = gitlab_cli(cmd)
assert ('201 Created' in ret.stdout) |
class Transformation():
def __init__(self, transform_type=None):
self.transform_types = ['cholesky', 'svd']
self.transform_type = transform_type
if (self.transform_type is None):
self.transform_type = 'cholesky'
if (self.transform_type not in self.transform_types):
raise ValueError('Undefined transformation type')
self.T = None
self.inv_T = None
def x_to_u(self, x, marg):
nrv = len(marg)
u = np.zeros(nrv)
for i in range(nrv):
u[i] = marg[i].x_to_u(x[i])
u = np.dot(self.T, u)
return u
def u_to_x(self, u, marg):
nrv = len(marg)
z = np.dot(self.inv_T, u)
x = np.zeros(nrv)
for i in range(nrv):
x[i] = marg[i].u_to_x(z[i])
return x
def jacobian(self, u, x, marg):
nrv = len(marg)
u = np.dot(self.inv_T, u)
J_u_x = np.zeros((nrv, nrv))
for i in range(nrv):
J_u_x[i][i] = marg[i].jacobian(u[i], x[i])
J_u_x = np.dot(self.T, J_u_x)
return J_u_x
def compute(self, Ro):
if (self.transform_type == self.transform_types[0]):
self._computeCholesky(Ro)
elif (self.transform_type == self.transform_types[1]):
self._computeSVD(Ro)
else:
raise ValueError('Transform type not set')
def _computeCholesky(self, Ro):
try:
L = np.linalg.cholesky(Ro)
except np.linalg.LinAlgError as e:
print(f'Error: Cholesky decomposition: {e.message}')
self.T = np.linalg.inv(L)
self.inv_T = L
def _computeSVD(self, Ro):
try:
(U, D, V) = np.linalg.svd(Ro)
except np.linalg.LinAlgError as e:
print(f'Error: singular value decomposition: {e.message}')
sqrtD = (np.sqrt(D) * np.eye(len(D)))
R = (U sqrtD)
self.T = np.linalg.inv(R)
self.inv_T = R |
def main(argv):
args = parseArgs(argv)
args.pathQuantizedUnits = abspath(args.pathQuantizedUnits)
args.pathOutputFile = abspath(args.pathOutputFile)
args.pathLSTMCheckpoint = abspath(args.pathLSTMCheckpoint)
if (args.dict is not None):
args.dict = abspath(args.dict)
print('')
print(f'Reading input file from {args.pathQuantizedUnits}')
input_file_names = []
intput_file_seqs = []
with open(args.pathQuantizedUnits, 'r') as f:
for line in f:
(file_name, file_seq) = line.strip().split('\t')
file_seq = file_seq.replace(',', ' ')
input_file_names.append(file_name)
intput_file_seqs.append(file_seq)
print(f'Found {len(input_file_names)} sequences!')
pathOutputDir = dirname(args.pathOutputFile)
if (pathOutputDir and (not exists(pathOutputDir))):
print('')
print(f'Creating the output directory at {pathOutputDir}')
Path(pathOutputDir).mkdir(parents=True, exist_ok=True)
if args.resume:
if exists(args.pathOutputFile):
existing_file_names = []
with open(args.pathOutputFile, 'r') as f:
lines = [line for line in f]
for line in lines:
(file_name, score) = line.strip().split()
existing_file_names.append(file_name)
assert (input_file_names[:len(existing_file_names)] == existing_file_names), 'The file names in the existing output file do not match the input file!!'
input_file_names = input_file_names[len(existing_file_names):]
intput_file_seqs = intput_file_seqs[len(existing_file_names):]
print(f'Found existing output file, continue to compute scores of {len(intput_file_seqs)} sequences left!')
else:
assert (not exists(args.pathOutputFile)), f'Output file {args.pathOutputFile} already exists !!! If you want to continue computing scores, please check the --resume option.'
if (args.dict is None):
pathData = dirname(args.pathLSTMCheckpoint)
else:
pathData = dirname(args.dict)
assert exists(join(pathData, 'dict.txt')), f'Dictionary file (dict.txt) not found in {pathData}'
print('')
print(f'Loading LSTM model from {args.pathLSTMCheckpoint}...')
print(f'Path data {pathData}')
(model, task) = loadLSTMLMCheckpoint(args.pathLSTMCheckpoint, pathData)
model.eval()
print('Model loaded !')
print('')
print(f'Computing log-probabilities and saving results to {args.pathOutputFile}...')
_ = compute_proba_LSTM(intput_file_seqs, model, task, batch_size=args.batchSize, gpu=(not args.cpu), verbose=False, print_tokens=False, save_to=args.pathOutputFile, file_names=input_file_names) |
def _cap_fees(x_list: List[Fraction], y_list: List[Fraction]) -> Tuple[(List[Fraction], List[Fraction])]:
x_list = copy(x_list)
y_list = copy(y_list)
for i in range((len(x_list) - 1)):
(y1, y2) = y_list[i:(i + 2)]
if ((sign(y1) * sign(y2)) == (- 1)):
(x1, x2) = x_list[i:(i + 2)]
new_x = (x1 + ((abs(y1) / abs((y2 - y1))) * (x2 - x1)))
new_index = bisect(x_list, new_x)
x_list.insert(new_index, new_x)
y_list.insert(new_index, Fraction(0))
y_list = [max(y, Fraction(0)) for y in y_list]
return (x_list, y_list) |
class AttrVI_ATTR_USB_BULK_IN_STATUS(RangeAttribute):
resources = [(constants.InterfaceType.usb, 'RAW')]
py_name = ''
visa_name = 'VI_ATTR_USB_BULK_IN_STATUS'
visa_type = 'ViInt16'
default = NotAvailable
(read, write, local) = (True, True, True)
(min_value, max_value, values) = ((- 32768), 32767, None) |
def id_to_probs(probs, ids, id_to_vocab, SOFTMAX=False):
if SOFTMAX:
probs = softmax(probs)
else:
pass
product = 1
for id in ids:
if (id_to_vocab[id] == '</s>'):
break
elif (id_to_vocab[id] == '<s>'):
pass
elif id:
product *= probs[id]
else:
print('')
raise Exception('id is empty!')
return product |
class Effect2734(BaseEffect):
type = 'passive'
def handler(fit, ship, context, projectionRange, **kwargs):
for type in ('Gravimetric', 'Ladar', 'Radar', 'Magnetometric'):
fit.modules.filteredItemBoost((lambda mod: (mod.item.group.name == 'ECM')), 'scan{0}StrengthBonus'.format(type), ship.getModifiedItemAttr('shipBonusCF'), skill='Caldari Frigate', **kwargs) |
def input_fn_builder(features, seq_length, is_training, drop_remainder):
all_input_ids = []
all_input_mask = []
all_segment_ids = []
all_label_ids = []
for feature in features:
all_input_ids.append(feature.input_ids)
all_input_mask.append(feature.input_mask)
all_segment_ids.append(feature.segment_ids)
all_label_ids.append(feature.label_id)
def input_fn(params):
batch_size = params['batch_size']
num_examples = len(features)
d = tf.data.Dataset.from_tensor_slices({'input_ids': tf.constant(all_input_ids, shape=[num_examples, seq_length], dtype=tf.int32), 'input_mask': tf.constant(all_input_mask, shape=[num_examples, seq_length], dtype=tf.int32), 'segment_ids': tf.constant(all_segment_ids, shape=[num_examples, seq_length], dtype=tf.int32), 'label_ids': tf.constant(all_label_ids, shape=[num_examples], dtype=tf.int32)})
if is_training:
d = d.repeat()
d = d.shuffle(buffer_size=100)
d = d.batch(batch_size=batch_size, drop_remainder=drop_remainder)
return d
return input_fn |
class FixFilter(fixer_base.ConditionalFix):
BM_compatible = True
PATTERN = "\n filter_lambda=power<\n 'filter'\n trailer<\n '('\n arglist<\n lambdef< 'lambda'\n (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any\n >\n ','\n it=any\n >\n ')'\n >\n [extra_trailers=trailer*]\n >\n |\n power<\n 'filter'\n trailer< '(' arglist< none='None' ',' seq=any > ')' >\n [extra_trailers=trailer*]\n >\n |\n power<\n 'filter'\n args=trailer< '(' [any] ')' >\n [extra_trailers=trailer*]\n >\n "
skip_on = 'future_builtins.filter'
def transform(self, node, results):
if self.should_skip(node):
return
trailers = []
if ('extra_trailers' in results):
for t in results['extra_trailers']:
trailers.append(t.clone())
if ('filter_lambda' in results):
xp = results.get('xp').clone()
if (xp.type == syms.test):
xp.prefix = ''
xp = parenthesize(xp)
new = ListComp(results.get('fp').clone(), results.get('fp').clone(), results.get('it').clone(), xp)
new = Node(syms.power, ([new] + trailers), prefix='')
elif ('none' in results):
new = ListComp(Name('_f'), Name('_f'), results['seq'].clone(), Name('_f'))
new = Node(syms.power, ([new] + trailers), prefix='')
else:
if in_special_context(node):
return None
args = results['args'].clone()
new = Node(syms.power, [Name('filter'), args], prefix='')
new = Node(syms.power, ([Name('list'), ArgList([new])] + trailers))
new.prefix = ''
new.prefix = node.prefix
return new |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.