code stringlengths 281 23.7M |
|---|
class OptionSeriesWordcloudDataDatalabelsFilter(Options):
def operator(self):
return self._config_get(None)
def operator(self, value: Any):
self._config(value, js_type=False)
def property(self):
return self._config_get(None)
def property(self, text: str):
self._config(text, js_type=False) |
def delete_obsolete_rows(source: ETLObjectBase, destination: ETLWritableObjectBase) -> int:
sql = '\n delete from {destination_object_representation}\n where not exists (\n select from {source_object_representation} s where {join}\n )\n '
sql = SQL(sql).format(destination_object_representation=destination.object_representation, source_object_representation=source.object_representation, join=primatives.make_join_to_table_conditional(destination.key_columns, 's', destination.object_representation))
return sql_helpers.execute_dml_sql(sql) |
def test_requeuing_checkpointable(tmp_path: Path, fast_forward_clock) -> None:
usr_sig = submitit.JobEnvironment._usr_sig()
fs0 = helpers.FunctionSequence()
fs0.add(test_core._three_time, 10)
assert isinstance(fs0, helpers.Checkpointable)
with mocked_slurm():
executor = slurm.SlurmExecutor(folder=tmp_path, max_num_timeout=1)
executor.update_parameters(time=60)
job = executor.submit(fs0)
sig = get_signal_handler(job)
fast_forward_clock(minutes=30)
with pytest.raises(SystemExit), mock_requeue(called_with=1):
sig.checkpoint_and_try_requeue(usr_sig)
sig = get_signal_handler(job)
fast_forward_clock(minutes=50)
with pytest.raises(SystemExit), mock_requeue(called_with=0):
sig.checkpoint_and_try_requeue(usr_sig)
sig = get_signal_handler(job)
fast_forward_clock(minutes=55)
usr_sig = slurm.SlurmJobEnvironment._usr_sig()
with mock_requeue(not_called=True), pytest.raises(utils.UncompletedJobError, match='timed-out too many times.'):
sig.checkpoint_and_try_requeue(usr_sig) |
def query_cache(cls, pk=None, pks=None):
(ls, d) = _get_cached_collections()
if ((not ls[cls]) or (not d[cls])):
update_cache(cls)
(ls, d) = _get_cached_collections()
if (pk is not None):
return d[cls].get(str(pk))
elif (pks is not None):
return [d[cls].get(str(pk)) for pk in split_pks(pks)]
else:
return list(ls[cls]) |
def test():
assert (len(pattern) == 3), 'The pattern should describe three tokens (three dictionaries).'
assert (isinstance(pattern[0], dict) and isinstance(pattern[1], dict) and isinstance(pattern[2], dict)), 'Each entry in a pattern should be a dictionary.'
assert ((len(pattern[0]) == 1) and (len(pattern[1]) == 1)), 'The first two pattern entries should have only one key.'
assert (len(pattern[2]) == 2), 'The third pattern entry should have two keys.'
assert any(((pattern[0].get(key) == 'ADJ') for key in ['pos', 'POS'])), "Are you matching on the first token's part-of-speech tag with the correct label?"
assert any(((pattern[1].get(key) == 'NOUN') for key in ['pos', 'POS'])), "Are you matching on the second token's part-of-speech tag with the correct label?"
assert any(((pattern[2].get(key) == 'NOUN') for key in ['pos', 'POS'])), "Are you matching on the third token's part-of-speech tag with the correct label?"
assert (pattern[2].get('OP') == '?'), 'Are you using the correct operator for the third token?'
__msg__.good("Great work those were some pretty complex patterns! Let's move on to the next chapter and take a look at how to use spaCy for more advanced text analysis.") |
class PatchEmbed(nn.Module):
def __init__(self, img_size=224, patch_size=16, in_chans=3, embed_dim=768, norm_layer=None, flatten=True):
super().__init__()
self.img_size = img_size
self.patch_size = patch_size
self.grid_size = ((img_size // patch_size), (img_size // patch_size))
self.num_patches = (self.grid_size[0] * self.grid_size[1])
self.flatten = flatten
self.embed_dim = embed_dim
conv_op = (nn.Conv2dBiasFewChannels if (detect_target().name() == 'cuda') else nn.Conv2dBias)
self.proj = conv_op(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size)
self.proj_norm = (norm_layer(embed_dim) if norm_layer else nn.Identity())
def forward(self, x):
(B, H, W, C) = get_shape(x)
x = self.proj(x)
if self.flatten:
x = ops.reshape()(x, [B, (- 1), self.embed_dim])
x = self.proj_norm(x)
return x |
class _BlockStore():
_block_present: Set[BlockUid]
def __init__(self, directory: str) -> None:
self._directory = directory
self._block_present = set()
def _cache_filename(self, block_uid: BlockUid) -> str:
assert ((block_uid.left is not None) and (block_uid.right is not None))
filename = '{:016x}-{:016x}'.format(block_uid.left, block_uid.right)
digest = hashlib.md5(filename.encode('ascii')).hexdigest()
return os.path.join(self._directory, '{}/{}/{}'.format(digest[0:2], digest[2:4], filename))
def read(self, block_uid: BlockUid, offset: int=0, length: int=None) -> bytes:
filename = self._cache_filename(block_uid)
with open(filename, 'rb', buffering=0) as f:
f.seek(offset)
if (length is None):
return f.read()
else:
return f.read(length)
def write(self, block_uid: BlockUid, data) -> None:
filename = self._cache_filename(block_uid)
try:
with open(filename, 'wb', buffering=0) as f:
f.write(data)
except FileNotFoundError:
os.makedirs(os.path.dirname(filename), exist_ok=True)
with open(filename, 'wb', buffering=0) as f:
f.write(data)
self._block_present.add(block_uid)
def update(self, block_uid: BlockUid, offset: int, data: bytes) -> None:
filename = self._cache_filename(block_uid)
with open(filename, 'r+b', buffering=0) as f:
f.seek(offset)
f.write(data)
def rm(self, block_uid: BlockUid) -> None:
try:
self._block_present.remove(block_uid)
except KeyError:
pass
filename = self._cache_filename(block_uid)
try:
os.unlink(filename)
except FileNotFoundError:
pass
def present(self, block_uid: BlockUid) -> bool:
return (block_uid in self._block_present) |
def transform_award_data(worker: TaskSpec, records: List[dict]) -> List[dict]:
converters = {'covid_spending_by_defc': convert_json_data_to_dict, 'iija_spending_by_defc': convert_json_data_to_dict}
agg_key_creations = {'funding_subtier_agency_agg_key': (lambda x: x['funding_subtier_agency_code']), 'funding_toptier_agency_agg_key': (lambda x: x['funding_toptier_agency_code']), 'pop_congressional_agg_key': (lambda x: x['pop_congressional_code']), 'pop_congressional_cur_agg_key': (lambda x: x['pop_congressional_code_current']), 'pop_county_agg_key': (lambda x: x['pop_county_code']), 'pop_state_agg_key': (lambda x: x['pop_state_code']), 'recipient_agg_key': funcs.award_recipient_agg_key, 'recipient_location_congressional_agg_key': (lambda x: x['recipient_location_congressional_code']), 'recipient_location_congressional_cur_agg_key': (lambda x: x['recipient_location_congressional_code_current']), 'recipient_location_county_agg_key': (lambda x: x['recipient_location_county_code']), 'recipient_location_state_agg_key': (lambda x: x['recipient_location_state_code'])}
drop_fields = ['recipient_levels', 'funding_toptier_agency_id', 'funding_subtier_agency_id', 'recipient_location_state_name', 'recipient_location_state_fips', 'recipient_location_state_population', 'recipient_location_county_population', 'recipient_location_congressional_population', 'pop_state_name', 'pop_state_fips', 'pop_state_population', 'pop_county_population', 'pop_congressional_population']
return transform_data(worker, records, converters, agg_key_creations, drop_fields, settings.ES_ROUTING_FIELD) |
class FifteenAPI(BikeShareSystem):
sync = True
meta = {'system': 'Fifteen', 'company': ['Fifteen SAS']}
def __init__(self, tag, feed_url, meta):
super(FifteenAPI, self).__init__(tag, meta)
self.feed_url = feed_url
def update(self, scraper=None):
scraper = (scraper or PyBikesScraper())
response = json.loads(scraper.request(self.feed_url))
stations = []
if isinstance(response, dict):
if (response['statusCode'] != 200):
raise Exception(('response status: %d' % response['statusCode']))
data = response['data']['stations']
else:
data = response
seen_ids = set()
for s in data:
if (s['parent_id'] in seen_ids):
continue
seen_ids.add(s['parent_id'])
lat = float(s['location']['coordinates'][1])
lng = float(s['location']['coordinates'][0])
name = s['label']
bikes = int(s['info']['number_of_bikes'])
extra = {'bike_state_of_charge': int(s['info'].get('bike_state_of_charge', 0)), 'bike_autonomy': int(s['info']['bike_autonomy']), 'uid': s['parent_id'], 'distance': int(s['distance'])}
station = BikeShareStation(name, lat, lng, bikes, None, extra)
stations.append(station)
self.stations = stations |
class RelationshipServices(ModelNormal):
allowed_values = {}
validations = {}
_property
def additional_properties_type():
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type)
_nullable = False
_property
def openapi_types():
lazy_import()
return {'services': (RelationshipServicesServices,)}
_property
def discriminator():
return None
attribute_map = {'services': 'services'}
read_only_vars = {}
_composed_schemas = {}
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
return self
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
for (var_name, var_value) in kwargs.items():
if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)):
continue
setattr(self, var_name, var_value)
if (var_name in self.read_only_vars):
raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.') |
def test_with_initialize() -> None:
with initialize(version_base=None, config_path='../hydra_app/conf'):
cfg = compose(config_name='config', overrides=['app.user=test_user'])
assert (cfg == {'app': {'user': 'test_user', 'num1': 10, 'num2': 20}, 'db': {'host': 'localhost', 'port': 3306}}) |
def test_trees_to_dot():
(X, Y) = datasets.make_classification(n_classes=2, n_samples=10, random_state=1)
model = RandomForestClassifier(n_estimators=3, max_depth=5, random_state=1)
model.fit(X, Y)
trees = emlearn.convert(model)
dot = trees.to_dot(name='ffoo')
with open('tmp/trees.dot', 'w') as f:
f.write(dot) |
def test_websocket(dash_duo, server):
app = DashProxy(blueprint=ws_example_bp(), prevent_initial_callbacks=True)
dash_duo.start_server(app)
time.sleep(0.01)
assert (dash_duo.find_element('#msg').text == '')
name = 'x'
dash_duo.find_element('#input').send_keys(name)
dash_duo.wait_for_text_to_equal('#msg', ws_response(name), timeout=2) |
class stackoverflow_question__test_case(unittest.TestCase):
def test_stackoverflow_question_(self):
from benedict import benedict as bdict
data_source = '\nRecordID,kingdom,phylum,class,order,family,genus,species\n1,Animalia,Chordata,Mammalia,Primates,Hominidae,Homo,Homo sapiens\n2,Animalia,Chordata,Mammalia,Carnivora,Canidae,Canis,Canis\n3,Plantae,nan,Magnoliopsida,Brassicales,Brassicaceae,Arabidopsis,Arabidopsis thaliana\n4,Plantae,nan,Magnoliopsida,Fabales,Fabaceae,Phaseoulus,Phaseolus vulgaris\n'
data_input = bdict.from_csv(data_source)
data_output = bdict()
ancestors_hierarchy = ['kingdom', 'phylum', 'class', 'order', 'family', 'genus', 'species']
for value in data_input['values']:
data_output['.'.join([value[ancestor] for ancestor in ancestors_hierarchy])] = bdict()
keypaths = sorted(data_output.keypaths(), key=(lambda item: len(item.split('.'))), reverse=True)
data_output['children'] = []
def transform_data(d, key, value):
if isinstance(value, dict):
value.update({'name': key, 'children': []})
data_output.traverse(transform_data)
for keypath in keypaths:
target_keypath = '.'.join((keypath.split('.')[:(- 1)] + ['children']))
data_output[target_keypath].append(data_output.pop(keypath)) |
('rocm.groupnorm.gen_profiler')
def groupnorm_gen_profiler(func_attrs: Dict[(str, Any)], workdir: str, indent: str=' ', use_swish: bool=False) -> str:
shapes = func_attrs['inputs'][0]._attrs['shape']
for dim_idx in (1, 2, 3):
assert isinstance(shapes[dim_idx], IntImm), f'groupnorm requires reduction dim dim_idx={dim_idx!r} to be static'
return norm_common.gen_profiler(func_attrs, workdir, 5, SHAPE_EVAL_TEMPLATE, EXEC_TEMPLATE, TENSOR_DECL_TEMPLATE, EXTRA_HEADERS, get_func_signature_profiler, EXTRA_CODE_TEMPLATE.render(use_swish=use_swish), PROFILER_FUNC_CALL_TEMPLATE, indent) |
class TestAllTypesFromS3MockYAMLMissingS3():
def test_missing_s3_raise(self, monkeypatch, s3):
with monkeypatch.context() as m:
(aws_session, s3_client) = s3
mock_s3_bucket = 'spock-test'
mock_s3_object = 'fake/test/bucket/pytest.load.yaml'
s3_client.create_bucket(Bucket=mock_s3_bucket)
s3_client.upload_file('./tests/conf/yaml/test.yaml', mock_s3_bucket, mock_s3_object)
m.setattr(sys, 'argv', ['', '--config', f's3://{mock_s3_bucket}/{mock_s3_object}'])
with pytest.raises(ValueError):
config = ConfigArgBuilder(*all_configs, desc='Test Builder') |
def test_wfo_compare_butadiene_cc2():
in_path = (THIS_DIR / 'butadiene_cc2')
geom = geom_from_xyz_file((in_path / 'buta.xyz'))
turbo = Turbomole(in_path, track=True, wfo_basis='def2-svp')
geom.set_calculator(turbo)
opt_kwargs = {'max_cycles': 1, 'dump': True}
opt = SteepestDescent(geom, **opt_kwargs)
opt.run()
wfow = turbo.wfow
wfow.compare(wfow) |
class CustomStringField(marshmallow.fields.String):
def _deserialize(self, value: object, attr: Optional[str], data: Optional[Mapping[(str, object)]], **kwargs: Dict[(str, object)]) -> str:
if isinstance(value, int):
value = str(value)
return super()._deserialize(value, attr, data, **kwargs) |
def test_text_store__write_version_incompatible(tmp_path) -> None:
wallet_path = os.path.join(tmp_path, 'database')
store = TextStore(wallet_path)
try:
store.put('seed_version', (TextStore.FINAL_SEED_VERSION + 1))
with pytest.raises(IncompatibleWalletError):
store.write()
finally:
store.close() |
def debug_flag():
logging.basicConfig(format=consts.LOGGING_FORMAT)
parser = argparse.ArgumentParser(description=debug_flag.__doc__)
add_debug(parser)
(args, _extra_args) = parser.parse_known_args()
(package, *_) = __package__.split('.')
logging.getLogger(package).setLevel(args.debug) |
def test_stdout_report(monkeypatch):
monkeypatch.setenv('MYTHX_API_KEY', 'foo')
submission = SubmissionPipeline({'test': {'sourcePath': 'contracts/SafeMath.sol', 'contractName': 'SafeMath'}})
submission.reports = {'contracts/SafeMath.sol': DetectedIssuesResponse.from_dict(TEST_REPORT)}
submission.generate_stdout_report()
assert (submission.stdout_report == {'contracts/SafeMath.sol': {'LOW': 3}})
monkeypatch.delenv('MYTHX_API_KEY') |
class OptionYaxisTitleStyle(Options):
def color(self):
return self._config_get('#666666')
def color(self, text: str):
self._config(text, js_type=False)
def fontSize(self):
return self._config_get('0.8em')
def fontSize(self, num: float):
self._config(num, js_type=False) |
class BaseTenantForm(CSRFBaseForm):
name = StringField('Name', validators=[validators.InputRequired()])
registration_allowed = BooleanField('Registration allowed', default=True)
logo_url = URLField('Logo URL', validators=[validators.Optional(), validators.URL(require_tld=False)], filters=[empty_string_to_none], description='It will be shown on the top left of authentication pages.')
application_url = URLField('Application URL', validators=[validators.Optional(), validators.URL(require_tld=False)], filters=[empty_string_to_none], description='URL to your application. Used to show a link going back to your application on the user dashboard.')
theme = ComboboxSelectField('UI Theme', query_endpoint_path='/admin/customization/themes/', validators=[validators.Optional(), validators.UUID()], filters=[empty_string_to_none], description='If left empty, the default theme will be used.')
oauth_providers = ComboboxSelectMultipleField('OAuth Providers', query_endpoint_path='/admin/oauth-providers/', label_attr='display_name', choices=[], validate_choice=False, description='OAuth Providers users should be allowed to use to login.') |
def matmul_tile(gemmini):
gemmini = divide_loop(gemmini, 'j', 4, ['jo', 'ji'], perfect=True)
gemmini = divide_loop(gemmini, 'i', 8, ['io', 'i'], perfect=True)
gemmini = divide_loop(gemmini, 'io', 2, ['ioo', 'io'], perfect=True)
gemmini = old_reorder(gemmini, 'i jo')
gemmini = old_reorder(gemmini, 'io jo')
return gemmini |
def decode_i2c(frames):
results = []
x_scale = frames[0].sx
x_trans = frames[0].tx
scl = frames[0].to_ttl()
sda = frames[1].to_ttl()
scl_diff = (scl[1:] - scl[:(- 1)])
sda_diff = (sda[1:] - sda[:(- 1)])
edges = [i for i in range(len(sda_diff)) if (sda_diff[i] and scl[i] and (not scl_diff[i]))]
for (j, start) in enumerate(edges):
if (sda_diff[start] < 0):
try:
stop = edges[(j + 1)]
except IndexError:
stop = len(scl_diff)
mbits = [sda[i] for i in range(start, stop) if (scl_diff[i] < 0)]
mbytes = [_pack_msb(mbits, i, 8) for i in range(1, len(mbits), 9)]
x_start = (x_trans + (x_scale * start))
x_stop = (x_trans + (x_scale * stop))
if len(mbytes):
msg = I2C(x_start, x_stop, bytes(mbits), bytes(mbytes))
results.append(msg)
return results |
class TargetingGeoLocationElectoralDistrict(AbstractObject):
def __init__(self, api=None):
super(TargetingGeoLocationElectoralDistrict, self).__init__()
self._isTargetingGeoLocationElectoralDistrict = True
self._api = api
class Field(AbstractObject.Field):
country = 'country'
deprecation_code = 'deprecation_code'
electoral_district = 'electoral_district'
key = 'key'
name = 'name'
_field_types = {'country': 'string', 'deprecation_code': 'string', 'electoral_district': 'string', 'key': 'string', 'name': 'string'}
def _get_field_enum_info(cls):
field_enum_info = {}
return field_enum_info |
def extractEarthviewloungeWordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_get_complete_object(backend_db, common_db):
(fw, parent_fo, child_fo) = create_fw_with_parent_and_child()
fw.processed_analysis['test_plugin'] = generate_analysis_entry(summary=['entry0'])
parent_fo.processed_analysis['test_plugin'] = generate_analysis_entry(summary=['entry1', 'entry2'])
child_fo.processed_analysis['test_plugin'] = generate_analysis_entry(summary=['entry2', 'entry3'])
backend_db.insert_multiple_objects(fw, parent_fo, child_fo)
result = common_db.get_complete_object_including_all_summaries(fw.uid)
assert isinstance(result, Firmware)
assert (result.uid == fw.uid)
expected_summary = {'entry0': [fw.uid], 'entry1': [parent_fo.uid], 'entry2': [parent_fo.uid, child_fo.uid], 'entry3': [child_fo.uid]}
_summary_is_equal(expected_summary, result.processed_analysis['test_plugin']['summary'])
result = common_db.get_complete_object_including_all_summaries(parent_fo.uid)
assert isinstance(result, FileObject)
expected_summary = {'entry1': [parent_fo.uid], 'entry2': [parent_fo.uid, child_fo.uid], 'entry3': [child_fo.uid]}
_summary_is_equal(expected_summary, result.processed_analysis['test_plugin']['summary']) |
class InlineQueryResultArticle(InlineQueryResultBase):
def __init__(self, id, title, input_message_content, reply_markup=None, url=None, hide_url=None, description=None, thumbnail_url=None, thumbnail_width=None, thumbnail_height=None):
super().__init__('article', id, title=title, input_message_content=input_message_content, reply_markup=reply_markup)
self.url = url
self.hide_url = hide_url
self.description = description
self.thumbnail_url = thumbnail_url
self.thumbnail_width = thumbnail_width
self.thumbnail_height = thumbnail_height
def thumb_url(self):
logger.warning('The parameter "thumb_url" is deprecated, use "thumbnail_url" instead')
return self.thumbnail_url
def thumb_width(self):
logger.warning('The parameter "thumb_width" is deprecated, use "thumbnail_width" instead')
return self.thumbnail_width
def thumb_height(self):
logger.warning('The parameter "thumb_height" is deprecated, use "thumbnail_height" instead')
return self.thumbnail_height
def to_dict(self):
json_dict = super().to_dict()
if self.url:
json_dict['url'] = self.url
if self.hide_url:
json_dict['hide_url'] = self.hide_url
if self.description:
json_dict['description'] = self.description
if self.thumbnail_url:
json_dict['thumbnail_url'] = self.thumbnail_url
if self.thumbnail_width:
json_dict['thumbnail_width'] = self.thumbnail_width
if self.thumbnail_height:
json_dict['thumbnail_height'] = self.thumbnail_height
return json_dict |
class RCareWorld(RCareWorldBaseEnv):
def __init__(self, executable_file: str=None, scene_file: str=None, custom_channels: list=[], assets: list=[], **kwargs):
super().__init__(executable_file=executable_file, scene_file=scene_file, custom_channels=custom_channels, assets=assets, **kwargs)
self.robot_dict = {}
self.object_dict = {}
self.camera_dict = {}
self.human_dict = {}
self.lighting_dict = {}
self.sensor_dict = {}
self._step()
def create_robot(self, id: int, gripper_list: list=None, robot_name: str=None, urdf_path: str=None, base_pos: list=[0, 0, 0], base_orn=[(- 0.707107), (- 0.707107), (- 0.707107), 0.707107]) -> None:
if (urdf_path is None):
self.robot_dict[id] = Robot(self, id=id, gripper_id=gripper_list, robot_name=robot_name, base_pose=base_pos, base_orientation=base_orn)
else:
self.robot_dict[id] = Robot(self, id=id, gripper_id=gripper_list, urdf_path=urdf_path, base_pose=base_pos, base_orientation=base_orn)
this_robot = self.robot_dict[id]
return this_robot
def create_object(self, id: int, name: str, is_in_scene: bool):
self.object_dict[id] = RCareWorldBaseObject(self, id, name, is_in_scene)
this_object = self.object_dict[id]
return this_object
def create_human(self, id: int, name: str, is_in_scene: bool):
self.human_dict[id] = Human(self, id, name, is_in_scene)
this_human = self.human_dict[id]
return this_human
def create_skin(self, id: int, name: str, is_in_scene: bool):
self.skin = Skin(self, id, name, is_in_scene)
return self.skin
def create_camera(self, id: int, name: str, intrinsic_matrix: list=[600, 0, 0, 0, 600, 0, 240, 240, 1], width: int=480, height: int=480, fov: float=60, is_in_scene: bool=False):
self.camera_dict[id] = Camera(self, id, name, intrinsic_matrix, width, height, fov, is_in_scene)
this_camera = self.camera_dict[id]
return this_camera
def close(self):
super().close()
def ignoreLayerCollision(self, layer1: int, layer2: int, ignore: bool):
self.asset_channel.set_action('IgnoreLayerCollision', layer1=layer1, layer2=layer2, ignore=ignore)
def getCurrentCollisionPairs(self):
self.asset_channel.set_action('GetCurrentCollisionPairs')
self._step()
result = self.asset_channel.data['collision_pairs']
return result
def setGravity(self, x: float, y: float, z: float):
self.asset_channel.set_action('SetGravity', x=x, y=y, z=z)
def setGroundPhysicMaterial(self, bounciness: float=0, dynamic_friction: float=1, static_friction: float=1, friction_combine: int=0, bounce_combine: int=0):
self.asset_channel.set_action('SetGroundPhysicMaterial', bounciness=bounciness, dynamic_friction=dynamic_friction, static_friction=static_friction, friction_combine=friction_combine, bounce_combine=bounce_combine)
def setTimeScale(self, time_scale: float):
self.asset_channel.set_action('SetTimeScale', time_scale=time_scale)
def stepSeveralSteps(self, steps: int):
for i in range(steps):
self._step()
def debugObjectPose(self):
self.debug_channel.set_action('DebugObjectPose')
self._step()
def debugObjectID(self):
self.debug_channel.set_action('DebugObjectID')
self._step() |
class ProgressReporting():
def __init__(self, process_name: str) -> None:
self._process_name = process_name
self._old_proctitle = ''
self.reset()
def _setproctitle(self, proctitle: str='') -> None:
if proctitle:
new_proctitle = '{} [{}]'.format(self._process_name, proctitle)
else:
new_proctitle = self._process_name
if (self._old_proctitle != new_proctitle):
self._old_proctitle = new_proctitle
setproctitle.setproctitle(new_proctitle)
def reset(self):
self._setproctitle()
def task(self, task: str):
logger.info(task)
self._setproctitle(task)
def task_with_version(self, task: str, *, version_uid: str) -> None:
logger.info(task)
self._setproctitle('{} - {}'.format(task, version_uid))
def task_with_blocks(self, task: str, *, version_uid: str, blocks_done: int, blocks_count: int, per_thousand: int=1000) -> None:
log_every_blocks = max(1, (blocks_count // max(1, int((1000 / per_thousand)))))
if ((per_thousand == 1000) or ((blocks_done % log_every_blocks) == 0) or (blocks_done == 1) or (blocks_done == blocks_count)):
message = '{} {}/{} blocks ({:.1f}%)'.format(task, blocks_done, blocks_count, ((blocks_done / blocks_count) * 100))
logger.info(message)
self._setproctitle('{} - {}'.format(message, version_uid)) |
(x_input=INTEGER_ST)
(deadline=DEADLINE, max_examples=5)
def test_eager_workflow_with_dynamic_exception(x_input: int):
async def eager_wf(x: int) -> typing.List[int]:
return (await dynamic_wf(x=x))
with pytest.raises(EagerException, match='Eager workflows currently do not work with dynamic workflows'):
asyncio.run(eager_wf(x=x_input)) |
def test_order_availability_normalize_int():
decorators = [availability(C2), normalize('step', type=int, multiple=True), normalize('param', type=str, multiple=True), normalize('level', type=int, multiple=False)]
g = level_param_step_no_default
for order in itertools.permutations(decorators):
print(order)
for decorator in order:
g = decorator(g)
print('---', g('1000', 'a', '24'))
assert (g('1000', 'a', '24') == (1000, ['a'], [24])) |
def maskView(viewOrLayer, color, alpha):
unmaskView(viewOrLayer)
window = fb.evaluateExpression('(UIWindow *)[[UIApplication sharedApplication] keyWindow]')
origin = convertPoint(0, 0, viewOrLayer, window)
size = fb.evaluateExpressionValue(('(CGSize)((CGRect)[(id)%s frame]).size' % viewOrLayer))
rectExpr = ('(CGRect){{%s, %s}, {%s, %s}}' % (origin.GetChildMemberWithName('x').GetValue(), origin.GetChildMemberWithName('y').GetValue(), size.GetChildMemberWithName('width').GetValue(), size.GetChildMemberWithName('height').GetValue()))
mask = fb.evaluateExpression(('(id)[[UIView alloc] initWithFrame:%s]' % rectExpr))
fb.evaluateEffect(('[%s setTag:(NSInteger)%s]' % (mask, viewOrLayer)))
fb.evaluateEffect(('[%s setBackgroundColor:[UIColor %sColor]]' % (mask, color)))
fb.evaluateEffect(('[%s setAlpha:(CGFloat)%s]' % (mask, alpha)))
fb.evaluateEffect(('[%s addSubview:%s]' % (window, mask)))
flushCoreAnimationTransaction() |
('/<int:key>/', methods=['GET', 'PUT', 'DELETE'])
def notes_detail(key):
if (request.method == 'PUT'):
note = str(request.data.get('text', ''))
notes[key] = note
return note_repr(key)
elif (request.method == 'DELETE'):
notes.pop(key, None)
return ('', status.HTTP_204_NO_CONTENT)
if (key not in notes):
raise exceptions.NotFound()
return note_repr(key) |
_cache(maxsize=1024)
def parse_packet_in_pkt(data, max_len, eth_pkt=None, vlan_pkt=None):
pkt = None
eth_type = None
vlan_vid = None
if max_len:
data = data[:max_len]
try:
if (vlan_pkt is None):
if (eth_pkt is None):
pkt = packet.Packet(data[:ETH_HEADER_SIZE])
eth_pkt = parse_eth_pkt(pkt)
eth_type = eth_pkt.ethertype
if (eth_type == valve_of.ether.ETH_TYPE_8021Q):
(pkt, vlan_pkt) = packet.Packet(data[:ETH_VLAN_HEADER_SIZE])
if vlan_pkt:
vlan_vid = vlan_pkt.vid
eth_type = vlan_pkt.ethertype
if (len(data) > ETH_VLAN_HEADER_SIZE):
pkt = packet.Packet(data)
except (AttributeError, AssertionError, StreamParser.TooSmallException):
pass
return (pkt, eth_pkt, eth_type, vlan_pkt, vlan_vid) |
def test_import_objects_expectedValuesFromStandardDataSet(testdata_ldapsearchbof_beacon_257_objects):
adds = ADDS()
adds.import_objects(testdata_ldapsearchbof_beacon_257_objects)
assert (len(adds.SID_MAP) == 68)
assert (len(adds.DN_MAP) == 68)
assert (len(adds.DOMAIN_MAP) == 1)
assert (len(adds.users) == 5)
assert (len(adds.computers) == 4)
assert (len(adds.groups) == 53)
assert (len(adds.domains) == 1)
assert (len(adds.schemas) == 0)
assert (len(adds.trustaccounts) == 0)
assert (len(adds.ous) == 1)
assert (len(adds.gpos) == 4)
assert (len(adds.unknown_objects) == 189) |
def extract_instances(data: List[List[Tuple[(str, str)]]], label_map, feature_map, training=False):
instances = []
for sentence in data:
for i in range(len(sentence)):
c = sentence[i]
p = (sentence[(i - 1)] if ((i - 1) >= 0) else None)
n = (sentence[(i + 1)] if ((i + 1) < len(sentence)) else None)
if training:
y = label_map.setdefault(c[1], len(label_map))
else:
y = label_map.get(c[1], (- 1))
if (y < 0):
continue
fs = []
fs.append(('f0' + c[0]))
if p:
fs.append(('f1' + p[0]))
if n:
fs.append(('f2' + n[0]))
if p:
fs.append(('f3' + p[1]))
if training:
features = [feature_map.setdefault(f, (len(feature_map) + 1)) for f in fs]
else:
features = [feature_map[f] for f in fs if (f in feature_map)]
features.append(0)
x = np.array(sorted(features))
instances.append((x, y))
return instances |
class MockStrategyDestroyNotSupported(Strategy, Generic[models.UP]):
async def read_token(self, token: Optional[str], user_manager: BaseUserManager[(models.UP, models.ID)]) -> Optional[models.UP]:
return None
async def write_token(self, user: models.UP) -> str:
return 'TOKEN'
async def destroy_token(self, token: str, user: models.UP) -> None:
raise StrategyDestroyNotSupportedError |
.xfail(raises=ImageComparisonFailure, reason='Matplotlib plots for reasons a different image size.')
.skipif((LOW_MEMORY > memory), reason='Travis has too less memory to run it.')
def test_hicPlotMatrix_region_start_end_pca1_colormap_bigwig():
outfile = NamedTemporaryFile(suffix='.png', prefix='hicexplorer_test', delete=False)
args = '--matrix {0}/hicTransform/pearson_perChromosome.h5 --region chr2L:- --outFileName {1} --bigwig {2} --colorMap hot'.format(ROOT, outfile.name, (ROOT + 'hicPCA/pca1.bw')).split()
compute(hicexplorer.hicPlotMatrix.main, args, 5)
res = compare_images(((ROOT + 'hicPlotMatrix') + '/small_test_50kb_pearson_pca1_plot_region__colormap_hot_chr2L_15mb-20mb_bw.png'), outfile.name, tol=tolerance)
assert (res is None), res
if REMOVE_OUTPUT:
os.remove(outfile.name) |
def html_meta_to_nodes(data: dict[(str, Any)], document: nodes.document, line: int, reporter: Reporter) -> list[(nodes.pending | nodes.system_message)]:
if (not data):
return []
try:
meta_cls = nodes.meta
except AttributeError:
from docutils.parsers.rst.directives.html import MetaBody
meta_cls = MetaBody.meta
output = []
for (key, value) in data.items():
content = str((value or ''))
meta_node = meta_cls(content)
meta_node.source = document['source']
meta_node.line = line
meta_node['content'] = content
try:
if (not content):
raise ValueError('No content')
for (i, key_part) in enumerate(key.split()):
if (('=' not in key_part) and (i == 0)):
meta_node['name'] = key_part
continue
if ('=' not in key_part):
raise ValueError(f"no '=' in {key_part}")
(attr_name, attr_val) = key_part.split('=', 1)
if (not (attr_name and attr_val)):
raise ValueError(f'malformed {key_part}')
meta_node[attr_name.lower()] = attr_val
except ValueError as error:
msg = reporter.error(f'Error parsing meta tag attribute "{key}": {error}.')
output.append(msg)
continue
pending = nodes.pending(Filter, {'component': 'writer', 'format': 'html', 'nodes': [meta_node]})
document.note_pending(pending)
output.append(pending)
return output |
class JMSCheckin(AnswerBotCheckin):
ocr = ''
idioms = None
lock = asyncio.Lock()
name = ''
bot_username = 'jmsembybot'
max_retries = 2
async def start(self):
async with self.lock:
if (self.idioms is None):
file = (await get_data(self.basedir, '.txt', proxy=self.proxy, caller=self.name))
if (not file):
raise FileNotFoundError('')
with open(file, encoding='utf-8') as f:
self.__class__.idioms = [i for i in f.read().splitlines() if (len(i) == 4)]
return (await super().start())
def to_idiom(self, captcha: str):
(phrase, score) = process.extractOne(captcha, self.idioms)
if ((score > 70) or (len(captcha) < 4)):
result = phrase
self.log.debug(f'[gray50] "{captcha}" -> "{result}".[/]')
else:
result = captcha
self.log.debug(f'[gray50] "{captcha}" , .[/]')
return result
async def on_captcha(self, message: Message, captcha: str):
captcha = self.to_idiom(captcha)
async with self.operable:
if (not self.message):
(await self.operable.wait())
(await asyncio.sleep(random.uniform(3, 5)))
for l in captcha:
try:
(await self.message.click(l))
(await asyncio.sleep(random.uniform(3, 5)))
except ValueError:
self.log.info(f' "{l}" , .')
(await self.retry())
break |
class InvocationsFetcher(FetcherClient):
def get_test_last_invocation(self, macro_args: Optional[dict]=None) -> DbtInvocationSchema:
invocation_response = self.dbt_runner.run_operation(macro_name='elementary_cli.get_test_last_invocation', macro_args=macro_args)
invocation = (json.loads(invocation_response[0]) if invocation_response else None)
if invocation:
return DbtInvocationSchema(**invocation[0])
else:
logger.warning(f'Could not find invocation by filter: {macro_args}')
return DbtInvocationSchema()
def get_models_latest_invocations_data(self) -> List[DbtInvocationSchema]:
invocations_response = self.dbt_runner.run_operation(macro_name='elementary_cli.get_models_latest_invocations_data')
invocation_results = (json.loads(invocations_response[0]) if invocations_response else [])
invocation_results = [DbtInvocationSchema(**invocation_result) for invocation_result in invocation_results]
return invocation_results
def get_models_latest_invocation(self) -> Dict[(str, str)]:
response = self.dbt_runner.run_operation(macro_name='elementary_cli.get_models_latest_invocation')
models_latest_invocation_results = (json.loads(response[0]) if response else [])
models_latest_invocation_map = {result['unique_id']: result['invocation_id'] for result in models_latest_invocation_results}
return models_latest_invocation_map |
class TestPCAttestationService(unittest.TestCase):
('onedocker.gateway.repository_service.RepositoryServiceGateway')
def setUp(self, MockRepositoryServiceGateway) -> None:
self.pc_attestation_svc = PCAttestationService()
('onedocker.gateway.repository_service.RepositoryServiceGateway.get_measurements')
def test_binary_match_return_true(self, mock_get_measurements) -> None:
mock_get_measurements.return_value = TEST_MEASUREMENTS
result = self.pc_attestation_svc.binary_match(package_name=TEST_PACKAGE, version=TEST_VERSION, measurements=TEST_MEASUREMENT1)
mock_get_measurements.assert_called_once_with(TEST_PACKAGE, TEST_VERSION)
self.assertEqual(result, True)
('onedocker.gateway.repository_service.RepositoryServiceGateway.get_measurements')
def test_binary_match_false_value(self, mock_get_measurements) -> None:
mock_get_measurements.return_value = TEST_MEASUREMENT1
result = self.pc_attestation_svc.binary_match(package_name=TEST_PACKAGE, version=TEST_VERSION, measurements={TEST_MEASUREMENT_KEY1: 'false-value'})
mock_get_measurements.assert_called_once_with(TEST_PACKAGE, TEST_VERSION)
self.assertEqual(result, False)
('onedocker.gateway.repository_service.RepositoryServiceGateway.get_measurements')
def test_binary_match_not_found_key(self, mock_get_measurements) -> None:
mock_get_measurements.return_value = TEST_MEASUREMENT1
result = self.pc_attestation_svc.binary_match(package_name=TEST_PACKAGE, version=TEST_VERSION, measurements={'not-found-key': TEST_MEASUREMENT_VALUE1})
mock_get_measurements.assert_called_once_with(TEST_PACKAGE, TEST_VERSION)
self.assertEqual(result, False)
('onedocker.service.attestation_pc.PCAttestationService.binary_match')
def test_validate_binary_match_policy(self, mock_binary_match) -> None:
test_policy = AttestationPolicy(policy_name=PolicyName.BINARY_MATCH, params=PolicyParams(package_name=TEST_PACKAGE, version=TEST_VERSION))
test_measurements = TEST_MEASUREMENT1
mock_binary_match.return_value = True
result = self.pc_attestation_svc.validate(test_policy, test_measurements)
mock_binary_match.assert_called_once_with(TEST_PACKAGE, TEST_VERSION, test_measurements)
self.assertEqual(result, True)
def test_validate_invalid_params(self) -> None:
test_params = PolicyParams(package_name=None, version=None)
test_policy = AttestationPolicy(policy_name=PolicyName.BINARY_MATCH, params=test_params)
with self.assertRaises(InvalidParameterError):
self.pc_attestation_svc.validate(test_policy, TEST_MEASUREMENT1) |
def get_all_board_names() -> List[str]:
local_cached = local_cache.get('all_board_names')
if local_cached:
return local_cached
all_board_names_cached = cache.get(cache_key('all_board_names'))
if (all_board_names_cached is not None):
res = all_board_names_cached
else:
with session() as s:
q = s.query(BoardOrmModel).options(load_only(BoardOrmModel.name)).order_by(BoardOrmModel.name)
res = list(map((lambda i: i.name), q.all()))
s.commit()
cache.set(cache_key('all_board_names'), res)
local_cache.set('all_board_names', res)
return res |
class Hotel(AbstractCrudObject):
def __init__(self, fbid=None, parent_id=None, api=None):
self._isHotel = True
super(Hotel, self).__init__(fbid, parent_id, api)
class Field(AbstractObject.Field):
address = 'address'
applinks = 'applinks'
brand = 'brand'
category = 'category'
category_specific_fields = 'category_specific_fields'
currency = 'currency'
description = 'description'
guest_ratings = 'guest_ratings'
hotel_id = 'hotel_id'
id = 'id'
image_fetch_status = 'image_fetch_status'
images = 'images'
lowest_base_price = 'lowest_base_price'
loyalty_program = 'loyalty_program'
margin_level = 'margin_level'
name = 'name'
phone = 'phone'
sale_price = 'sale_price'
sanitized_images = 'sanitized_images'
star_rating = 'star_rating'
unit_price = 'unit_price'
url = 'url'
visibility = 'visibility'
base_price = 'base_price'
class ImageFetchStatus():
direct_upload = 'DIRECT_UPLOAD'
fetched = 'FETCHED'
fetch_failed = 'FETCH_FAILED'
no_status = 'NO_STATUS'
outdated = 'OUTDATED'
partial_fetch = 'PARTIAL_FETCH'
class Visibility():
published = 'PUBLISHED'
staging = 'STAGING'
def get_endpoint(cls):
return 'hotels'
def api_create(self, parent_id, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.adobjects.productcatalog import ProductCatalog
return ProductCatalog(api=self._api, fbid=parent_id).create_hotel(fields, params, batch, success, failure, pending)
def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Hotel, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {'address': 'Object', 'applinks': 'Object', 'base_price': 'unsigned int', 'brand': 'string', 'currency': 'string', 'description': 'string', 'guest_ratings': 'list<Object>', 'images': 'list<Object>', 'name': 'string', 'phone': 'string', 'star_rating': 'float', 'url': 'string'}
enums = {}
request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Hotel, api_type='NODE', response_parser=ObjectParser(reuse_object=self))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_augmented_realities_metadata(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/augmented_realities_metadata', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_channels_to_integrity_status(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.catalogitemchannelstointegritystatus import CatalogItemChannelsToIntegrityStatus
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/channels_to_integrity_status', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=CatalogItemChannelsToIntegrityStatus, api_type='EDGE', response_parser=ObjectParser(target_class=CatalogItemChannelsToIntegrityStatus, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_hotel_rooms(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.hotelroom import HotelRoom
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/hotel_rooms', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=HotelRoom, api_type='EDGE', response_parser=ObjectParser(target_class=HotelRoom, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
def get_videos_metadata(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False):
from facebook_business.utils import api_utils
if ((batch is None) and ((success is not None) or (failure is not None))):
api_utils.warning('`success` and `failure` callback only work for batch call.')
from facebook_business.adobjects.dynamicvideometadata import DynamicVideoMetadata
param_types = {}
enums = {}
request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/videos_metadata', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=DynamicVideoMetadata, api_type='EDGE', response_parser=ObjectParser(target_class=DynamicVideoMetadata, api=self._api))
request.add_params(params)
request.add_fields(fields)
if (batch is not None):
request.add_to_batch(batch, success=success, failure=failure)
return request
elif pending:
return request
else:
self.assure_call()
return request.execute()
_field_types = {'address': 'string', 'applinks': 'CatalogItemAppLinks', 'brand': 'string', 'category': 'string', 'category_specific_fields': 'CatalogSubVerticalList', 'currency': 'string', 'description': 'string', 'guest_ratings': 'string', 'hotel_id': 'string', 'id': 'string', 'image_fetch_status': 'ImageFetchStatus', 'images': 'list<string>', 'lowest_base_price': 'string', 'loyalty_program': 'string', 'margin_level': 'unsigned int', 'name': 'string', 'phone': 'string', 'sale_price': 'string', 'sanitized_images': 'list<string>', 'star_rating': 'float', 'unit_price': 'Object', 'url': 'string', 'visibility': 'Visibility', 'base_price': 'unsigned int'}
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['ImageFetchStatus'] = Hotel.ImageFetchStatus.__dict__.values()
field_enum_info['Visibility'] = Hotel.Visibility.__dict__.values()
return field_enum_info |
class Select(Options):
def activate(self):
self.info = False
self.items = 'row'
return self
def info(self):
return self._config_get()
def info(self, val):
self._config(val)
def blurable(self):
return self._config_get()
def blurable(self, val):
self._config(val)
def items(self):
return self._config_get()
def items(self, val):
self._config(val)
def style(self):
return self._config_get()
def style(self, val):
self._config(val)
def toggleable(self):
return self._config_get()
def toggleable(self, val):
self._config(val) |
def get_formatted_value_for_table_field(items, df):
child_meta = frappe.get_meta(df.options)
table_head = ''
table_row = ''
html = ''
create_head = True
for item in items:
table_row += '<tr>'
for cdf in child_meta.fields:
if cdf.in_list_view:
if create_head:
table_head += (('<td>' + cdf.label) + '</td>')
if item.get(cdf.fieldname):
table_row += (('<td>' + str(item.get(cdf.fieldname))) + '</td>')
else:
table_row += '<td></td>'
create_head = False
table_row += '</tr>'
html += ((("<table class='table table-condensed table-bordered'>" + table_head) + table_row) + '</table>')
return html |
def test_build_post_request(server):
url = server.url.copy_with(path='/echo_headers')
headers = {'Custom-header': 'value'}
with as client:
request = client.build_request('POST', url)
request.headers.update(headers)
response = client.send(request)
assert (response.status_code == 200)
assert (response.url == url)
assert (response.json()['Content-length'] == '0')
assert (response.json()['Custom-header'] == 'value') |
def test_mixed_vector_copy():
mesh = UnitIntervalMesh(2)
V = FunctionSpace(mesh, 'CG', 1)
W = (V * V)
f = Function(W)
f.assign(1)
v = f.vector()
assert np.allclose(v.array(), 1.0)
copy = v.copy()
assert isinstance(copy, Vector)
assert np.allclose(copy.array(), 1.0)
local = copy.get_local()
local[:] = 10.0
copy.set_local(local)
assert np.allclose(copy.array(), 10.0)
assert np.allclose(v.array(), 1.0) |
class TypeInvitation(ModelSimple):
allowed_values = {('value',): {'INVITATION': 'invitation'}}
validations = {}
additional_properties_type = None
_nullable = False
_property
def openapi_types():
return {'value': (str,)}
_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes'])
_js_args_to_python_args
def __init__(self, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
value = 'invitation'
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if ('value' in kwargs):
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
value = 'invitation'
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = (_visited_composed_classes + (self.__class__,))
self.value = value
if kwargs:
raise ApiTypeError(('Invalid named arguments=%s passed to %s. Remove those invalid named arguments.' % (kwargs, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,))
return self |
.django_db
def test_spending_by_award_recipient_zip_filter(client, monkeypatch, elasticsearch_award_index, test_data):
setup_elasticsearch_test(monkeypatch, elasticsearch_award_index)
resp = client.post('/api/v2/search/spending_by_award/', content_type='application/json', data=json.dumps({'fields': ['Place of Performance Zip5'], 'filters': {'award_type_codes': ['A', 'B', 'C', 'D'], 'recipient_locations': [{'country': 'USA', 'zip': '00501'}]}}))
assert (len(resp.data['results']) == 1)
assert (resp.data['results'][0] == {'internal_id': 1, 'Place of Performance Zip5': '00001', 'generated_internal_id': 'CONT_AWD_1'})
resp = client.post('/api/v2/search/spending_by_award/', content_type='application/json', data=json.dumps({'fields': ['Place of Performance Zip5'], 'filters': {'award_type_codes': ['A', 'B', 'C', 'D'], 'recipient_locations': [{'country': 'USA', 'zip': '00501'}, {'country': 'USA', 'zip': '10000'}]}}))
assert (len(resp.data['results']) == 1)
assert (resp.data['results'][0] == {'internal_id': 1, 'Place of Performance Zip5': '00001', 'generated_internal_id': 'CONT_AWD_1'})
resp = client.post('/api/v2/search/spending_by_award/', content_type='application/json', data=json.dumps({'fields': ['Place of Performance Zip5'], 'filters': {'award_type_codes': ['A', 'B', 'C', 'D'], 'recipient_locations': [{'country': 'USA', 'zip': '00501'}, {'country': 'USA', 'zip': '00502'}]}}))
possible_results = ({'internal_id': 1, 'Place of Performance Zip5': '00001', 'generated_internal_id': 'CONT_AWD_1'}, {'internal_id': 2, 'Place of Performance Zip5': '00002', 'generated_internal_id': 'CONT_AWD_2'})
assert (len(resp.data['results']) == 2)
assert (resp.data['results'][0] in possible_results)
assert (resp.data['results'][1] in possible_results)
assert (resp.data['results'][0] != resp.data['results'][1]) |
def construct_latest_index(session: Session) -> dict[(date, str)]:
index = {}
latest_index_page = session.get(LATEST_URL)
soup = BeautifulSoup(latest_index_page.text, 'html.parser')
for a in soup.find_all('a', href=True):
if a['href'].startswith(DATA_DOC_PREFIX):
dt = pd.to_datetime(a.find('div', {'class': _DT_CLASS}).text)
index[dt.date()] = a['href']
return index |
def migrate_case(storage: StorageAccessor, path: Path) -> None:
logger.info(f"Migrating case '{path.name}'")
time_map = _load_timestamps((path / 'files/time-map'))
state_map = _load_states((path / 'files/state-map'))
parameter_files = [DataFile(x) for x in path.glob('Ensemble/mod_*/PARAMETER.data_0')]
response_files = [DataFile(x) for x in path.glob('Ensemble/mod_*/FORECAST.data_0')]
ensemble_size = _guess_ensemble_size(*parameter_files, *response_files)
ert_config = local_storage_get_ert_config()
ens_config = ert_config.ensemble_config
if (ensemble_size == 0):
return
parameter_configs: List[ParameterConfig] = []
for data_file in parameter_files:
parameter_configs.extend(_migrate_field_info(data_file, ens_config))
parameter_configs.extend(_migrate_gen_kw_info(data_file, ens_config))
parameter_configs.extend(_migrate_surface_info(data_file, ens_config))
response_configs: List[ResponseConfig] = []
for data_file in response_files:
response_configs.extend(_migrate_summary_info(data_file, ens_config))
response_configs.extend(_migrate_gen_data_info(data_file, ens_config))
iteration = 0
if ((match := re.search('_(\\d+)$', path.name)) is not None):
iteration = int(match[1])
experiment = storage.create_experiment(parameters=parameter_configs, responses=response_configs)
ensemble = experiment.create_ensemble(name=path.name, ensemble_size=ensemble_size, iteration=iteration)
_copy_state_map(ensemble, state_map)
for data_file in parameter_files:
_migrate_field(ensemble, data_file, ens_config)
_migrate_gen_kw(ensemble, data_file, ens_config)
_migrate_surface(ensemble, data_file, ens_config)
for data_file in response_files:
_migrate_summary(ensemble, data_file, time_map)
_migrate_gen_data(ensemble, data_file) |
class TestUpdateUserPassword():
(scope='function')
def url_no_id(self) -> str:
return (V1_URL_PREFIX + USERS)
def test_update_different_user_password(self, api_client, db, url_no_id, user, application_user) -> None:
OLD_PASSWORD = 'oldpassword'
NEW_PASSWORD = 'newpassword'
application_user.update_password(db=db, new_password=OLD_PASSWORD)
auth_header = generate_auth_header_for_user(user=application_user, scopes=[])
resp = api_client.post(f'{url_no_id}/{user.id}/reset-password', headers=auth_header, json={'old_password': str_to_b64_str(OLD_PASSWORD), 'new_password': str_to_b64_str(NEW_PASSWORD)})
assert (resp.status_code == HTTP_401_UNAUTHORIZED)
assert (resp.json()['detail'] == 'You are only authorised to update your own user data.')
db.expunge(application_user)
application_user = application_user.refresh_from_db(db=db)
assert application_user.credentials_valid(password=OLD_PASSWORD)
def test_update_user_password_invalid(self, api_client, db, url_no_id, application_user) -> None:
OLD_PASSWORD = 'oldpassword'
NEW_PASSWORD = 'newpassword'
application_user.update_password(db=db, new_password=OLD_PASSWORD)
auth_header = generate_auth_header_for_user(user=application_user, scopes=[])
resp = api_client.post(f'{url_no_id}/{application_user.id}/reset-password', headers=auth_header, json={'old_password': str_to_b64_str('mismatching password'), 'new_password': str_to_b64_str(NEW_PASSWORD)})
assert (resp.status_code == HTTP_401_UNAUTHORIZED)
assert (resp.json()['detail'] == 'Incorrect password.')
db.expunge(application_user)
application_user = application_user.refresh_from_db(db=db)
assert application_user.credentials_valid(password=OLD_PASSWORD)
def test_update_user_password(self, api_client, db, url_no_id, application_user) -> None:
OLD_PASSWORD = 'oldpassword'
NEW_PASSWORD = 'newpassword'
application_user.update_password(db=db, new_password=OLD_PASSWORD)
auth_header = generate_auth_header_for_user(user=application_user, scopes=[])
resp = api_client.post(f'{url_no_id}/{application_user.id}/reset-password', headers=auth_header, json={'old_password': str_to_b64_str(OLD_PASSWORD), 'new_password': str_to_b64_str(NEW_PASSWORD)})
assert (resp.status_code == HTTP_200_OK)
db.expunge(application_user)
application_user = application_user.refresh_from_db(db=db)
assert application_user.credentials_valid(password=NEW_PASSWORD)
def test_force_update_different_user_password_without_scope(self, api_client, db, url_no_id, user, application_user) -> None:
NEW_PASSWORD = 'newpassword'
old_hashed_password = user.hashed_password
auth_header = generate_auth_header_for_user(user=application_user, scopes=[])
resp = api_client.post(f'{url_no_id}/{user.id}/force-reset-password', headers=auth_header, json={'new_password': str_to_b64_str(NEW_PASSWORD)})
assert (resp.status_code == HTTP_403_FORBIDDEN)
db.expunge(user)
user = user.refresh_from_db(db=db)
assert (user.hashed_password == old_hashed_password), 'Password changed on the user'
def test_force_update_different_user_password(self, api_client, db, url_no_id, user, application_user) -> None:
NEW_PASSWORD = 'newpassword'
auth_header = generate_auth_header_for_user(user=application_user, scopes=[USER_PASSWORD_RESET])
resp = api_client.post(f'{url_no_id}/{user.id}/force-reset-password', headers=auth_header, json={'new_password': str_to_b64_str(NEW_PASSWORD)})
assert (resp.status_code == HTTP_200_OK)
db.expunge(user)
user = user.refresh_from_db(db=db)
assert user.credentials_valid(password=NEW_PASSWORD)
def test_force_update_non_existent_user(self, api_client, url_no_id, application_user) -> None:
NEW_PASSWORD = 'newpassword'
auth_header = generate_auth_header_for_user(user=application_user, scopes=[USER_PASSWORD_RESET])
user_id = 'fake_user_id'
resp = api_client.post(f'{url_no_id}/{user_id}/force-reset-password', headers=auth_header, json={'new_password': str_to_b64_str(NEW_PASSWORD)})
assert (resp.status_code == HTTP_404_NOT_FOUND) |
class PrimeField():
def __init__(self, modulus):
assert (pow(2, modulus, modulus) == 2)
self.modulus = modulus
def add(self, x, y):
return ((x + y) % self.modulus)
def sub(self, x, y):
return ((x - y) % self.modulus)
def mul(self, x, y):
return ((x * y) % self.modulus)
def exp(self, x, p):
return pow(x, p, self.modulus)
def inv(self, a):
if (a == 0):
return 0
(lm, hm) = (1, 0)
(low, high) = ((a % self.modulus), self.modulus)
while (low > 1):
r = (high // low)
(nm, new) = ((hm - (lm * r)), (high - (low * r)))
(lm, low, hm, high) = (nm, new, lm, low)
return (lm % self.modulus)
def multi_inv(self, values):
partials = [1]
for i in range(len(values)):
partials.append(self.mul(partials[(- 1)], (values[i] or 1)))
inv = self.inv(partials[(- 1)])
outputs = ([0] * len(values))
for i in range(len(values), 0, (- 1)):
outputs[(i - 1)] = (self.mul(partials[(i - 1)], inv) if values[(i - 1)] else 0)
inv = self.mul(inv, (values[(i - 1)] or 1))
return outputs
def div(self, x, y):
return self.mul(x, self.inv(y))
def eval_poly_at(self, p, x):
y = 0
power_of_x = 1
for (i, p_coeff) in enumerate(p):
y += (power_of_x * p_coeff)
power_of_x = ((power_of_x * x) % self.modulus)
return (y % self.modulus)
def add_polys(self, a, b):
return [(((a[i] if (i < len(a)) else 0) + (b[i] if (i < len(b)) else 0)) % self.modulus) for i in range(max(len(a), len(b)))]
def sub_polys(self, a, b):
return [(((a[i] if (i < len(a)) else 0) - (b[i] if (i < len(b)) else 0)) % self.modulus) for i in range(max(len(a), len(b)))]
def mul_by_const(self, a, c):
return [((x * c) % self.modulus) for x in a]
def mul_polys(self, a, b):
o = ([0] * ((len(a) + len(b)) - 1))
for (i, aval) in enumerate(a):
for (j, bval) in enumerate(b):
o[(i + j)] += (a[i] * b[j])
return [(x % self.modulus) for x in o]
def div_polys(self, a, b):
assert (len(a) >= len(b))
a = [x for x in a]
o = []
apos = (len(a) - 1)
bpos = (len(b) - 1)
diff = (apos - bpos)
while (diff >= 0):
quot = self.div(a[apos], b[bpos])
o.insert(0, quot)
for i in range(bpos, (- 1), (- 1)):
a[(diff + i)] -= (b[i] * quot)
apos -= 1
diff -= 1
return [(x % self.modulus) for x in o]
def mod_polys(self, a, b):
return self.sub_polys(a, self.mul_polys(b, self.div_polys(a, b)))[:(len(b) - 1)]
def sparse(self, coeff_dict):
o = ([0] * (max(coeff_dict.keys()) + 1))
for (k, v) in coeff_dict.items():
o[k] = (v % self.modulus)
return o
def zpoly(self, xs):
root = [1]
for x in xs:
root.insert(0, 0)
for j in range((len(root) - 1)):
root[j] -= (root[(j + 1)] * x)
return [(x % self.modulus) for x in root]
def lagrange_interp(self, xs, ys):
root = self.zpoly(xs)
assert (len(root) == (len(ys) + 1))
nums = [self.div_polys(root, [(- x), 1]) for x in xs]
denoms = [self.eval_poly_at(nums[i], xs[i]) for i in range(len(xs))]
invdenoms = self.multi_inv(denoms)
b = [0 for y in ys]
for i in range(len(xs)):
yslice = self.mul(ys[i], invdenoms[i])
for j in range(len(ys)):
if (nums[i][j] and ys[i]):
b[j] += (nums[i][j] * yslice)
return [(x % self.modulus) for x in b]
def eval_quartic(self, p, x):
xsq = ((x * x) % self.modulus)
xcb = (xsq * x)
return ((((p[0] + (p[1] * x)) + (p[2] * xsq)) + (p[3] * xcb)) % self.modulus)
def lagrange_interp_4(self, xs, ys):
(x01, x02, x03, x12, x13, x23) = ((xs[0] * xs[1]), (xs[0] * xs[2]), (xs[0] * xs[3]), (xs[1] * xs[2]), (xs[1] * xs[3]), (xs[2] * xs[3]))
m = self.modulus
eq0 = [(((- x12) * xs[3]) % m), ((x12 + x13) + x23), (((- xs[1]) - xs[2]) - xs[3]), 1]
eq1 = [(((- x02) * xs[3]) % m), ((x02 + x03) + x23), (((- xs[0]) - xs[2]) - xs[3]), 1]
eq2 = [(((- x01) * xs[3]) % m), ((x01 + x03) + x13), (((- xs[0]) - xs[1]) - xs[3]), 1]
eq3 = [(((- x01) * xs[2]) % m), ((x01 + x02) + x12), (((- xs[0]) - xs[1]) - xs[2]), 1]
e0 = self.eval_poly_at(eq0, xs[0])
e1 = self.eval_poly_at(eq1, xs[1])
e2 = self.eval_poly_at(eq2, xs[2])
e3 = self.eval_poly_at(eq3, xs[3])
e01 = (e0 * e1)
e23 = (e2 * e3)
invall = self.inv((e01 * e23))
inv_y0 = ((((ys[0] * invall) * e1) * e23) % m)
inv_y1 = ((((ys[1] * invall) * e0) * e23) % m)
inv_y2 = ((((ys[2] * invall) * e01) * e3) % m)
inv_y3 = ((((ys[3] * invall) * e01) * e2) % m)
return [(((((eq0[i] * inv_y0) + (eq1[i] * inv_y1)) + (eq2[i] * inv_y2)) + (eq3[i] * inv_y3)) % m) for i in range(4)]
def lagrange_interp_2(self, xs, ys):
m = self.modulus
eq0 = [((- xs[1]) % m), 1]
eq1 = [((- xs[0]) % m), 1]
e0 = self.eval_poly_at(eq0, xs[0])
e1 = self.eval_poly_at(eq1, xs[1])
invall = self.inv((e0 * e1))
inv_y0 = ((ys[0] * invall) * e1)
inv_y1 = ((ys[1] * invall) * e0)
return [(((eq0[i] * inv_y0) + (eq1[i] * inv_y1)) % m) for i in range(2)]
def multi_interp_4(self, xsets, ysets):
data = []
invtargets = []
for (xs, ys) in zip(xsets, ysets):
(x01, x02, x03, x12, x13, x23) = ((xs[0] * xs[1]), (xs[0] * xs[2]), (xs[0] * xs[3]), (xs[1] * xs[2]), (xs[1] * xs[3]), (xs[2] * xs[3]))
m = self.modulus
eq0 = [(((- x12) * xs[3]) % m), ((x12 + x13) + x23), (((- xs[1]) - xs[2]) - xs[3]), 1]
eq1 = [(((- x02) * xs[3]) % m), ((x02 + x03) + x23), (((- xs[0]) - xs[2]) - xs[3]), 1]
eq2 = [(((- x01) * xs[3]) % m), ((x01 + x03) + x13), (((- xs[0]) - xs[1]) - xs[3]), 1]
eq3 = [(((- x01) * xs[2]) % m), ((x01 + x02) + x12), (((- xs[0]) - xs[1]) - xs[2]), 1]
e0 = self.eval_quartic(eq0, xs[0])
e1 = self.eval_quartic(eq1, xs[1])
e2 = self.eval_quartic(eq2, xs[2])
e3 = self.eval_quartic(eq3, xs[3])
data.append([ys, eq0, eq1, eq2, eq3])
invtargets.extend([e0, e1, e2, e3])
invalls = self.multi_inv(invtargets)
o = []
for (i, (ys, eq0, eq1, eq2, eq3)) in enumerate(data):
invallz = invalls[(i * 4):((i * 4) + 4)]
inv_y0 = ((ys[0] * invallz[0]) % m)
inv_y1 = ((ys[1] * invallz[1]) % m)
inv_y2 = ((ys[2] * invallz[2]) % m)
inv_y3 = ((ys[3] * invallz[3]) % m)
o.append([(((((eq0[i] * inv_y0) + (eq1[i] * inv_y1)) + (eq2[i] * inv_y2)) + (eq3[i] * inv_y3)) % m) for i in range(4)])
return o |
def run():
sites = list(gen_sites())
print('\n`define N_DI {}\n\nmodule top(input wire [`N_DI-1:0] di);\n wire [`N_DI-1:0] di_buf;\n '.format(len(sites)))
params = {}
print('\n (* KEEP, DONT_TOUCH *)\n LUT6 dummy_lut();')
for (idx, ((tile_name, site_name), isone)) in enumerate(zip(sites, util.gen_fuzz_states(len(sites)))):
params[tile_name] = (site_name, isone, ('di[%u]' % idx))
print('\n (* KEEP, DONT_TOUCH *)\n IBUF #(\n ) ibuf_{site_name} (\n .I(di[{idx}]),\n .O(di_buf[{idx}])\n );'.format(site_name=site_name, idx=idx))
if isone:
print('\n (* KEEP, DONT_TOUCH *)\n PULLUP #(\n ) pullup_{site_name} (\n .O(di[{idx}])\n );'.format(site_name=site_name, idx=idx))
print('endmodule')
write_params(params) |
class Max(DCCBase):
name = ('3dsMax%s' % get_max_version())
extensions = ['.max']
def get_current_version(self):
version = None
full_path = MaxPlus.FileManager.GetFileNameAndPath()
if (full_path != ''):
version = self.get_version_from_full_path(full_path)
return version
def open(self, version, force=False, representation=None, reference_depth=0, skip_update_check=False):
self.set_system_units()
self.set_gamma_settings()
self.set_project(version)
MaxPlus.FileManager.Open(version.absolute_full_path, True)
if (not skip_update_check):
return self.check_referenced_versions()
else:
from anima.dcc import empty_reference_resolution
return empty_reference_resolution()
def save_as(self, version, run_pre_publishers=True):
current_version = self.get_current_version()
version.update_paths()
version.extension = self.extensions[0]
version.created_with = self.name
project = version.task.project
self.set_project(version)
is_shot_related_task = False
shot = None
from stalker import Shot
for task in version.task.parents:
if isinstance(task, Shot):
is_shot_related_task = True
shot = task
break
if is_shot_related_task:
self.set_fps(shot.fps)
self.set_resolution(shot.image_format.width, shot.image_format.height, shot.image_format.pixel_aspect)
if (version.version_number == 1):
self.set_frame_range(shot.cut_in, shot.cut_out)
else:
if (version.version_number == 1):
self.set_resolution(project.image_format.width, project.image_format.height, project.image_format.pixel_aspect)
self.set_fps(project.fps)
self.set_render_filename(version)
try:
import os
os.makedirs(version.absolute_path)
except OSError:
pass
MaxPlus.FileManager.Save(version.absolute_full_path)
if (version != current_version):
version.parent = current_version
from stalker.db.session import DBSession
DBSession.add(version)
self.append_to_recent_files(version.absolute_full_path)
DBSession.commit()
self.create_local_copy(version)
return True
def export_as(self, version):
import MaxPlus
if (MaxPlus.SelectionManager.GetCount() < 1):
raise RuntimeError('There is nothing selected to export')
if (version.is_published and (not self.allow_publish_on_export)):
raise RuntimeError('It is not allowed to Publish while export!!!<br><br>Export it normally. Then open the file and publish it.')
version.update_paths()
version.extension = self.extensions[0]
version.created_with = self.name
import os
try:
os.makedirs(version.absolute_path)
except OSError:
pass
workspace_path = version.absolute_path
MaxPlus.FileManager.SaveSelected(version.absolute_full_path)
from stalker.db.session import DBSession
DBSession.add(version)
DBSession.commit()
self.create_local_copy(version)
return True
def import_(self, version, use_namespace=True):
from pymxs import runtime as rt
rt.mergeMAXFile(version.absolute_full_path)
return True
def reference(self, version, use_namespace=True):
import os
from anima.representation import Representation
import pymxs
rt = pymxs.runtime
file_full_path = version.absolute_full_path
namespace = os.path.basename(version.nice_name)
namespace = namespace.split(Representation.repr_separator)[0]
xref_objects = rt.getMAXFileObjectNames(file_full_path)
xref = rt.xrefs.addNewXRefObject(file_full_path, xref_objects)
current_version = self.get_current_version()
if current_version:
current_version.inputs.append(version)
from stalker.db.session import DBSession
DBSession.commit()
self.append_to_recent_files(file_full_path)
return xref
def deep_version_inputs_update(self):
self.update_version_inputs()
def get_referenced_versions(self, parent_ref=None):
from pymxs import runtime as rt
xref_file_names = []
versions = []
record_count = rt.objXRefMgr.recordCount
references = []
if (not parent_ref):
for i in range(record_count):
record = rt.objXRefMgr.GetRecord((i + 1))
if (not record.nested):
file_name = record.srcFileName
if (file_name not in xref_file_names):
xref_file_names.append(file_name)
else:
for record in parent_ref.GetChildRecords():
file_name = record.srcFileName
if (file_name not in xref_file_names):
xref_file_names.append(file_name)
for path in xref_file_names:
version = self.get_version_from_full_path(path)
if (version and (version not in versions)):
versions.append(version)
return versions
def update_versions(self, reference_resolution):
from pymxs import runtime as rt
record_count = rt.objXRefMgr.recordCount
references = []
for i in range(record_count):
record = rt.objXRefMgr.GetRecord((i + 1))
if (not record.nested):
references.append(record)
previous_ref_path = None
previous_full_path = None
for reference in references:
path = reference.srcFileName
if (path == previous_ref_path):
full_path = previous_full_path
else:
version = self.get_version_from_full_path(path)
if (version in reference_resolution['update']):
latest_published_version = version.latest_published_version
full_path = latest_published_version.absolute_full_path
else:
full_path = None
if full_path:
reference.srcFileName = full_path
return []
def remove_empty_records(cls):
from pymxs import runtime as rt
record_count = rt.objXRefMgr.recordCount
print(('record count: %s' % record_count))
records = []
for i in range(record_count):
records.append(rt.objXRefMgr.GetRecord((i + 1)))
for record in records:
if record.empty:
rt.objXRefMgr.RemoveRecordFromScene(record)
def set_resolution(cls, width, height, pixel_aspect=1.0):
rs = MaxPlus.RenderSettings
rs.SetWidth(width)
rs.SetHeight(height)
rs.SetPixelAspectRatio(pixel_aspect)
rs.UpdateDialogParameters()
def set_render_filename(self, version):
import os
render_output_folder = os.path.join(version.absolute_path, 'Outputs', 'v{:03d}'.format(version.version_number), 'renders').replace('\\', '/')
version_sig_name = self.get_significant_name(version)
render_file_full_path = ('%(render_output_folder)s/masterLayer/%(version_sig_name)s.0000.exr' % {'render_output_folder': render_output_folder, 'version_sig_name': version_sig_name})
rs = MaxPlus.RenderSettings
rs.SetTimeType(2)
rs.SetSaveFile(True)
rs.SetOutputFile(render_file_full_path)
from pymxs import runtime as rt
rem = rt.maxOps.GetCurRenderElementMgr()
if rem:
num_res = rem.NumRenderElements()
for i in range(num_res):
rem.SetRenderElementFilename(i, render_file_full_path)
rs.UpdateDialogParameters()
import os
try:
os.makedirs(os.path.dirname(render_file_full_path))
except OSError:
pass
def set_frame_range(self, start_frame=0, end_frame=100, adjust_frame_range=False):
anim = MaxPlus.Animation
ticks_per_frame = anim.GetTicksPerFrame()
anim.SetStartTime((start_frame * ticks_per_frame))
anim.SetEndTime((end_frame * ticks_per_frame))
rs = MaxPlus.RenderSettings
rs.SetTimeType(2)
rs.SetStart(start_frame)
rs.SetEnd(end_frame)
def get_fps(self):
anim = MaxPlus.Animation
return anim.GetFrameRate()
def set_fps(cls, fps=25.0):
anim = MaxPlus.Animation
anim.SetFrameRate(int(fps))
def set_project(self, version):
project_dir = version.absolute_path
pm = MaxPlus.PathManager
pm.SetProjectFolderDir(project_dir)
project_structure = {'Animation': 'Outputs/sceneassets/animations', 'Archives': 'Outputs/archives', 'Autoback': 'Outputs/autoback', 'CFD': 'Outputs/sceneassets/CFD', 'Download': 'Outputs/downloads', 'Export': 'Outputs/export', 'Expression': 'Outputs/express', 'Image': 'Outputs/sceneassets/images', 'Import': 'Outputs/import', 'Matlib': 'Outputs/materiallibraries', 'Photometric': 'Outputs/sceneassets/photometric', 'Preview': 'Outputs/previews', 'ProjectFolder': '', 'Proxies': 'Outputs/proxies', 'RenderAssets': 'Outputs/sceneassets/renderassets', 'RenderOutput': 'Outputs/renderoutput', 'RenderPresets': 'Outputs/renderpresets', 'Scene': '', 'Sound': 'Outputs/sceneassets/sounds', 'UserStartupTemplates': 'Outputs/startuptemplates', 'Vpost': 'Outputs/vpost'}
def set_system_units(self):
import pymxs
rt = pymxs.runtime
metric = rt.name('metric')
rt.units.SystemType = metric
rt.units.DisplayType = metric
rt.units.MetricType = rt.name('centimeters')
def set_gamma_settings(self, in_=2.2, out=1.0):
gamma_mgr = MaxPlus.GammaMgr
gamma_mgr.SetFileInGamma(in_)
gamma_mgr.SetFileOutGamma(out)
gamma_mgr.SetDisplayGamma(2.2)
gamma_mgr.Enable(True) |
class OptionPlotoptionsDependencywheelSonificationDefaultinstrumentoptionsMappingPan(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class FileLoaderTest(ForsetiTestCase):
def test_get_filetype_parser_works(self):
self.assertIsNotNone(file_loader._get_filetype_parser('file.yaml', 'string'))
self.assertIsNotNone(file_loader._get_filetype_parser('file.yaml', 'file'))
self.assertIsNotNone(file_loader._get_filetype_parser('file.json', 'string'))
self.assertIsNotNone(file_loader._get_filetype_parser('file.json', 'file'))
def test_get_filetype_parser_raises_errors_for_invalid_ext(self):
with self.assertRaises(errors.InvalidFileExtensionError):
file_loader._get_filetype_parser('invalid/path', 'string')
def test_get_filetype_parser_raises_errors_for_invalid_parser(self):
with self.assertRaises(errors.InvalidParserTypeError):
file_loader._get_filetype_parser('path/to/file.yaml', 'asdf')
.object(google.auth, 'default', return_value=(mock.Mock(spec_set=credentials.Credentials), 'test-project'))
def test_read_file_from_gcs_json(self, mock_default_credential):
mock_responses = [({'status': '200', 'content-range': '0-10/11'}, b'{"test": 1}')]
expected_dict = {'test': 1}
return_dict = file_loader._read_file_from_gcs('gs://fake/file.json')
self.assertEqual(expected_dict, return_dict)
.object(google.auth, 'default', return_value=(mock.Mock(spec_set=credentials.Credentials), 'test-project'))
def test_read_file_from_gcs_yaml(self, mock_default_credential):
mock_responses = [({'status': '200', 'content-range': '0-6/7'}, b'test: 1')]
expected_dict = {'test': 1}
return_dict = file_loader._read_file_from_gcs('gs://fake/file.yaml')
self.assertEqual(expected_dict, return_dict)
def test_raise_on_json_string_error(self):
with self.assertRaises(ValueError):
file_loader._parse_json_string('')
.object(google.auth, 'default', return_value=(mock.Mock(spec_set=credentials.Credentials), 'test-project'))
def test_copy_file_from_gcs(self, mock_default_credentials):
mock_responses = [({'status': '200', 'content-range': '0-10/11'}, b'{"test": 1}')]
try:
file_path = file_loader.copy_file_from_gcs('gs://fake/file.json')
with open(file_path, 'rb') as f:
self.assertEqual(b'{"test": 1}', f.read())
finally:
os.unlink(file_path)
.object(google.auth, 'default', return_value=(mock.Mock(spec_set=credentials.Credentials), 'test-project'))
def test_isfile_in_gcs(self, mock_default_credentials):
mock_responses = [({'status': '200', 'content-range': '0-10/11'}, b'{"test": 1}')]
self.assertTrue(file_loader.isfile('gs://fake/file.json'))
.object(google.auth, 'default', return_value=(mock.Mock(spec_set=credentials.Credentials), 'test-project'))
def test_isfile_not_in_gcs(self, mock_default_credentials):
mock_responses = [({'status': '404', 'content-range': '0-10/11'}, b'{"test": 1}')]
self.assertFalse(file_loader.isfile('gs://fake/file.json'))
.object(google.auth, 'default', return_value=(mock.Mock(spec_set=credentials.Credentials), 'test-project'))
def test_file_accessible_in_gcs(self, mock_default_credentials):
mock_responses = [({'status': '200', 'content-range': '0-10/11'}, b'{"test": 1}')]
self.assertTrue(file_loader.access('gs://fake/file.json'))
.object(google.auth, 'default', return_value=(mock.Mock(spec_set=credentials.Credentials), 'test-project'))
def test_file_not_accessible_in_gcs(self, mock_default_credentials):
mock_responses = [({'status': '403', 'content-range': '0-10/11'}, b'{"test": 1}')]
self.assertFalse(file_loader.access('gs://fake/file.json'))
('os.path.isfile')
def test_isfile_in_local(self, mock_isfile):
mock_isfile.return_value = True
self.assertTrue(file_loader.isfile('test_file.yaml'))
('os.path.isfile')
def test_isfile_not_in_local(self, mock_isfile):
mock_isfile.return_value = False
self.assertFalse(file_loader.isfile('test_file.yaml'))
('os.access')
def test_local_file_is_accessilble(self, mock_isfile):
mock_isfile.return_value = True
self.assertTrue(file_loader.access('test_file.yaml'))
('os.access')
def test_local_file_is_not_accessilble(self, mock_isfile):
mock_isfile.return_value = False
self.assertFalse(file_loader.access('test_file.yaml')) |
def basic_types():
global _basic_types
if (_basic_types is None):
_basic_types = [(type(None), NoneNode), (str, StringNode), (str, StringNode), (bool, BoolNode), (int, IntNode), (float, FloatNode), (complex, ComplexNode), (tuple, TupleNode), (list, ListNode), (set, SetNode), (dict, DictNode), (FunctionType, FunctionNode), (MethodType, MethodNode), (HasTraits, TraitsNode)]
try:
from numpy import array
_basic_types.append((type(array([1])), ArrayNode))
except ImportError:
pass
return _basic_types |
def extractDarkness7913WordpressCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if (item['tags'] == ['Uncategorized']):
titlemap = [('TIDSBW', 'Transmigrating into a demon spirit to blow up the entire world', 'translated'), ('A crowd of', 'A crowd of evil spirit lines up to confess to me', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')]
for (titlecomponent, name, tl_type) in titlemap:
if (titlecomponent.lower() in item['title'].lower()):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def test_not_transonified():
path_for_test = (Path(__file__).parent.parent / '_transonic_testing/for_test_init.py')
path_output = (path_for_test.parent / f'__{backend_default}__')
if (path_output.exists() and (mpi.rank == 0)):
rmtree(path_output)
mpi.barrier()
from _transonic_testing import for_test_init
importlib.reload(for_test_init)
from _transonic_testing.for_test_init import func, func1, check_class
func(1, 3.14)
func1(1.1, 2.2)
check_class() |
class MDRenderer():
__output__ = 'md'
def __init__(self, parser: Any=None):
def render(self, tokens: Sequence[Token], options: Mapping[(str, Any)], env: MutableMapping, *, finalize: bool=True) -> str:
tree = RenderTreeNode(tokens)
return self.render_tree(tree, options, env, finalize=finalize)
def render_tree(self, tree: RenderTreeNode, options: Mapping[(str, Any)], env: MutableMapping, *, finalize: bool=True) -> str:
self._prepare_env(env)
updated_renderers = {}
postprocessors: dict[(str, tuple[(Postprocess, ...)])] = {}
for plugin in options.get('parser_extension', []):
for (syntax_name, renderer_func) in plugin.RENDERERS.items():
if (syntax_name in updated_renderers):
LOGGER.warning(f'Plugin conflict. More than one plugin defined a renderer for "{syntax_name}" syntax.')
else:
updated_renderers[syntax_name] = renderer_func
for (syntax_name, pp) in getattr(plugin, 'POSTPROCESSORS', {}).items():
if (syntax_name not in postprocessors):
postprocessors[syntax_name] = (pp,)
else:
postprocessors[syntax_name] += (pp,)
renderer_map = MappingProxyType({**DEFAULT_RENDERERS, **updated_renderers})
postprocessor_map = MappingProxyType(postprocessors)
render_context = RenderContext(renderer_map, postprocessor_map, options, env)
text = tree.render(render_context)
if finalize:
if env['used_refs']:
text += '\n\n'
text += self._write_references(env)
if text:
text += '\n'
assert ('\x00' not in text), 'null bytes should be removed by now'
return text
def _write_references(env: MutableMapping) -> str:
def label_sort_key(label: str) -> str:
assert label, 'link label cannot be empty string'
if all(((c in string.digits) for c in label)):
label_max_len = 999
return label.rjust(label_max_len, '0')
return label
ref_list = []
for label in sorted(env['used_refs'], key=label_sort_key):
ref = env['references'][label]
destination = (ref['href'] if ref['href'] else '<>')
item = f'[{label.lower()}]: {destination}'
title = ref['title']
if title:
title = title.replace('"', '\\"')
item += f' "{title}"'
ref_list.append(item)
return '\n'.join(ref_list)
def _prepare_env(self, env: MutableMapping) -> None:
env['indent_width'] = 0
env['used_refs'] = set() |
def extractBooksnailTumblrCom(item):
(vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title'])
if ((not (chp or vol)) or ('preview' in item['title'].lower())):
return None
tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')]
for (tagname, name, tl_type) in tagmap:
if (tagname in item['tags']):
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False |
def upload_presentations():
table = Client('hscic').get_table('presentation')
presentations = [('0703021Q0AAAAAA', 'Desogestrel_Tab 75mcg'), ('0703021Q0BBAAAA', 'Cerazette_Tab 75mcg'), ('AAAAAA', 'Etynodiol Diacet_Tab 500mcg'), ('0407010Q0AAAAAA', 'Co-Proxamol_Tab 32.5mg/325mg'), ('0904010AUBBAAAA', "Mrs Crimble's_G/F W/F Cheese Bites Orig")]
with tempfile.NamedTemporaryFile('wt') as f:
writer = csv.writer(f)
for presentation in presentations:
row = (presentation + (None, None))
writer.writerow(row)
f.seek(0)
table.insert_rows_from_csv(f.name, schemas.PRESENTATION_SCHEMA) |
class SendMixin(object):
def send(self, qb=None, send_to=None):
if (not qb):
qb = QuickBooks()
end_point = '{0}/{1}/send'.format(self.qbo_object_name.lower(), self.Id)
if send_to:
send_to = quote(send_to, safe='')
end_point = '{0}?sendTo={1}'.format(end_point, send_to)
results = qb.misc_operation(end_point, None, 'application/octet-stream')
return results |
def interpret_path(path, pwd='.'):
result = path.strip().replace('$(dirname)', pwd)
pkg_pattern = re.compile('\\$\\(find (.*?)\\)/|pkg:\\/\\/(.*?)/|package:\\/\\/(.*?)/')
for groups in pkg_pattern.finditer(path):
for index in range(groups.lastindex):
pkg_name = groups.groups()[index]
if pkg_name:
pkg = get_pkg_path(pkg_name)
rospy.logdebug(("rospkg.RosPack.get_path for '%s': %s" % (pkg_name, pkg)))
path_suffix = path[groups.end():].rstrip("'")
if path_suffix.startswith('/'):
paths = roslib.packages._find_resource(pkg, path_suffix.strip(os.path.sep))
rospy.logdebug((" search for resource with roslib.packages._find_resource, suffix '%s': %s" % (path_suffix.strip(os.path.sep), paths)))
if (len(paths) > 0):
return paths[0]
full_path = os.path.normpath(os.path.join(pkg, path_suffix))
if path_suffix:
if (not os.path.exists(full_path)):
try:
from catkin.find_in_workspaces import find_in_workspaces
global SOURCE_PATH_TO_PACKAGES
paths = find_in_workspaces(['share'], project=pkg_name, path=path_suffix.strip(os.path.sep), first_matching_workspace_only=True, first_match_only=True, source_path_to_packages=SOURCE_PATH_TO_PACKAGES)
if paths:
return paths[0]
except Exception:
import traceback
rospy.logwarn(('search in install/devel space failed: %s' % traceback.format_exc()))
return full_path
else:
return ('%s%s' % (os.path.normpath(pkg), os.path.sep))
if path.startswith('file://'):
result = path[7:]
return os.path.normpath(os.path.join(pwd, result)) |
class UNet(torch.nn.Module):
def __init__(self):
super().__init__()
self.layer1 = ConvBlock(1, 64)
self.layer2 = ConvBlock(64, 128)
self.layer3 = ConvBlock(128, 256)
self.layer4 = ConvBlock(256, 512)
self.layer5 = ConvBlock((256 + 512), 256)
self.layer6 = ConvBlock((128 + 256), 128)
self.layer7 = ConvBlock((64 + 128), 64)
self.layer8 = torch.nn.Conv2d(in_channels=64, out_channels=1, kernel_size=1, padding=0, stride=1)
self.maxpool = torch.nn.MaxPool2d(kernel_size=2)
self.upsample = torch.nn.Upsample(scale_factor=2, mode='bilinear')
self.sigmoid = torch.nn.Sigmoid()
def forward(self, x):
x1 = self.layer1(x)
x1_p = self.maxpool(x1)
x2 = self.layer2(x1_p)
x2_p = self.maxpool(x2)
x3 = self.layer3(x2_p)
x3_p = self.maxpool(x3)
x4 = self.layer4(x3_p)
x5 = self.upsample(x4)
x5 = torch.cat([x5, x3], dim=1)
x5 = self.layer5(x5)
x6 = self.upsample(x5)
x6 = torch.cat([x6, x2], dim=1)
x6 = self.layer6(x6)
x7 = self.upsample(x6)
x7 = torch.cat([x7, x1], dim=1)
x7 = self.layer7(x7)
x8 = self.layer8(x7)
return x8 |
_event
class PlanResponded(ThreadEvent):
plan = attr.ib(type='_models.PlanData')
take_part = attr.ib(type=bool)
at = attr.ib(type=datetime.datetime)
def _parse(cls, session, data):
(author, thread, at) = cls._parse_metadata(session, data)
plan = _models.PlanData._from_pull(session, data['untypedData'])
take_part = (data['untypedData']['guest_status'] == 'GOING')
return cls(author=author, thread=thread, plan=plan, take_part=take_part, at=at) |
_latency_scorer('LAAL')
class LAALScorer(ALScorer):
def compute(self, ins: Instance):
(delays, source_length, target_length) = self.get_delays_lengths(ins)
if (delays[0] > source_length):
return delays[0]
LAAL = 0
gamma = (max(len(delays), target_length) / source_length)
tau = 0
for (t_minus_1, d) in enumerate(delays):
LAAL += (d - (t_minus_1 / gamma))
tau = (t_minus_1 + 1)
if (d >= source_length):
break
LAAL /= tau
return LAAL |
def get_addr_for_intercepts(intercepts, firmware):
funct_2_addr = get_functions_and_addresses(firmware)
mapped_intercepts = []
for inter in intercepts:
funct = inter['function']
if (funct in funct_2_addr):
inter['addr'] = funct_2_addr[funct]
mapped_intercepts.append(inter)
else:
print('WARNING: No address found for: ', funct)
return mapped_intercepts |
class NetCDFField(Field):
def __init__(self, owner, ds, variable, slices, non_dim_coords):
data_array = ds[variable]
(self.north, self.west, self.south, self.east) = ds.bbox(variable)
self.owner = owner
self.variable = variable
self.slices = slices
self.non_dim_coords = non_dim_coords
self.shape = (data_array.shape[(- 2)], data_array.shape[(- 1)])
self.name = self.variable
self._cache = {}
self.title = getattr(data_array, 'long_name', getattr(data_array, 'standard_name', self.variable))
self.time = non_dim_coords.get('valid_time', non_dim_coords.get('time'))
for s in self.slices:
if isinstance(s, TimeSlice):
self.time = s.value
if s.is_info:
self.title += ((((' (' + s.name) + '=') + str(s.value)) + ')')
def grid_points(self):
return DataSet(self.owner.dataset).grid_points(self.variable)
def to_numpy(self, *args, **kwargs):
raise Exception(self.owner.path, self.variable, self.slices)
def plot_map(self, backend):
dimensions = dict(((s.name, s.index) for s in self.slices))
backend.bounding_box(north=self.north, south=self.south, west=self.west, east=self.east)
backend.plot_netcdf(self.owner.path, self.variable, dimensions)
def __repr__(self):
return ('NetCDFField[%r,%r]' % (self.variable, self.slices))
def to_bounding_box(self):
return BoundingBox(north=self.north, south=self.south, east=self.east, west=self.west)
def metadata(self, name):
if (name not in self._cache):
self._cache[name] = self.owner.flavour.metadata(self, name)
return self._cache[name]
def resolution(self):
return 'unknown' |
class TestWait(object):
('copr.v3.proxies.build.BuildProxy.get')
def test_wait(self, mock_get):
build = MunchMock(id=1, state='importing')
mock_get.return_value = MunchMock(id=1, state='succeeded')
assert wait(build)
mock_get.return_value = MunchMock(id=1, state='unknown')
with pytest.raises(CoprException) as ex:
wait(build)
assert ('Unknown status' in str(ex))
('copr.v3.proxies.build.BuildProxy.get')
def test_wait_list(self, mock_get):
builds = [MunchMock(id=1, state='succeeded'), MunchMock(id=2, state='failed')]
mock_get.side_effect = (lambda id: builds[(id - 1)])
assert wait(builds)
('copr.v3.proxies.build.BuildProxy.get')
def test_wait_custom_list(self, mock_get):
builds = List([Munch(id=1, state='succeeded'), Munch(id=2, state='failed')], proxy=BuildProxy({'copr_url': ' 'login': 'test', 'token': 'test'}))
mock_get.side_effect = (lambda id: builds[(id - 1)])
assert wait(builds)
('time.time')
('copr.v3.proxies.build.BuildProxy.get')
def test_wait_timeout(self, mock_get, mock_time):
build = MunchMock(id=1, state='importing')
mock_get.return_value = MunchMock(id=1, state='running')
mock_time.return_value = 0
with pytest.raises(CoprException) as ex:
wait(build, interval=0, timeout=(- 10))
assert ('Timeouted' in str(ex))
('copr.v3.proxies.build.BuildProxy.get')
def test_wait_callback(self, mock_get):
build = MunchMock(id=1, state='importing')
callback = mock.Mock()
mock_get.return_value = MunchMock(id=1, state='failed')
wait(build, interval=0, callback=callback)
assert callback.called |
class BatchItemResponseTests(unittest.TestCase):
def test_set_object(self):
obj = FaultError()
batch_item = BatchItemResponse()
batch_item.set_object(obj)
self.assertEqual(batch_item._original_object, obj)
self.assertEqual(batch_item.Error, obj)
def test_get_object(self):
obj = Fault()
batch_item = BatchItemResponse()
batch_item.set_object(obj)
self.assertEqual(batch_item.get_object(), obj) |
.django_db
def test_agency_failure(client):
resp = client.post('/api/v2/search/spending_over_time/', content_type='application/json', data=json.dumps({}))
assert (resp.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY)
resp = client.post('/api/v2/spending/', content_type='application/json', data=json.dumps({'type': 'agency', 'filters': {'fy': '23', 'quarter': '3'}}))
assert (resp.status_code == status.HTTP_400_BAD_REQUEST) |
class OptionSeriesSunburstSonificationDefaultspeechoptionsMappingVolume(Options):
def mapFunction(self):
return self._config_get(None)
def mapFunction(self, value: Any):
self._config(value, js_type=False)
def mapTo(self):
return self._config_get(None)
def mapTo(self, text: str):
self._config(text, js_type=False)
def max(self):
return self._config_get(None)
def max(self, num: float):
self._config(num, js_type=False)
def min(self):
return self._config_get(None)
def min(self, num: float):
self._config(num, js_type=False)
def within(self):
return self._config_get(None)
def within(self, value: Any):
self._config(value, js_type=False) |
class Console2(Boxes):
ui_group = 'Box'
description = "\nThis box is designed as a housing for electronic projects. It has hatches that can be re-opened with simple tools. It intentionally cannot be opened with bare hands - if build with thin enough material.\n\n#### Caution\nThere is a chance that the latches of the back wall or the back wall itself interfere with the front panel or it's mounting frame/lips. The generator does not check for this. So depending on the variant chosen you might need to make the box deeper (increase y parameter) or the panel angle steeper (increase angle parameter) until there is enough room.\n\nIt's also possible that the frame of the panel interferes with the floor if the hi parameter is too small.\n\n#### Assembly instructions\nThe main body is easy to assemble by starting with the floor and then adding the four walls and (if present) the top piece.\n\nIf the back wall is removable you need to add the lips and latches. The U-shaped clamps holding the latches in place need to be clued in place without also gluing the latches themselves. Make sure the springs on the latches point inwards and the angled ends point to the side walls as shown here:\n\n\n\nIf the panel is removable you need to add the springs with the tabs to the side lips. This photo shows the variant which has the panel glued to the frame:\n\n\n\nIf space is tight you may consider not gluing the cross pieces in place and remove them after the glue-up. This may prevent the latches of the back wall and the panel from interfering with each other.\n\nThe variant using finger joints only has the two side lips without the cross bars.\n\n#### Re-Opening\n\nThe latches at the back wall lock in place when closed. To open them they need to be pressed in and can then be moved aside.\n\nTo remove the panel you have to press in the four tabs at the side. It is easiest to push them in and then pull the panel up a little bit so the tabs stay in.\n"
def __init__(self) -> None:
Boxes.__init__(self)
self.addSettingsArgs(edges.FingerJointSettings, surroundingspaces=0.5)
self.addSettingsArgs(edges.StackableSettings)
self.buildArgParser(x=100, y=100, h=100, bottom_edge='s', outside=False)
self.argparser.add_argument('--front_height', action='store', type=float, default=30, help='height of the front below the panel (in mm)')
self.argparser.add_argument('--angle', action='store', type=float, default=50, help='angle of the front panel (90=upright)')
self.argparser.add_argument('--removable_backwall', action='store', type=boolarg, default=True, help='have latches at the backwall')
self.argparser.add_argument('--removable_panel', action='store', type=boolarg, default=True, help='The panel is held by tabs and can be removed')
self.argparser.add_argument('--glued_panel', action='store', type=boolarg, default=True, help='the panel is glued and not held by finger joints')
def borders(self):
(x, y, h, fh) = (self.x, self.y, self.h, self.front_height)
t = self.thickness
panel = min(((h - fh) / math.cos(math.radians((90 - self.angle)))), (y / math.cos(math.radians(self.angle))))
top = (y - (panel * math.cos(math.radians(self.angle))))
h = (fh + (panel * math.sin(math.radians(self.angle))))
if (top > (0.1 * t)):
borders = [y, 90, fh, (90 - self.angle), panel, self.angle, top, 90, h, 90]
else:
borders = [y, 90, fh, (90 - self.angle), panel, (self.angle + 90), h, 90]
return borders
def latch(self, move=None):
t = self.thickness
s = (0.1 * t)
(tw, th) = ((8 * t), (3 * t))
if self.move(tw, th, move, True):
return
self.moveTo(0, (1.2 * t))
self.polyline(t, (- 90), (0.2 * t), 90, (2 * t), (- 90), t, 90, t, 90, t, (- 90), (3 * t), 90, t, (- 90), t, 90, t, 90, (2 * t), 90, (0.5 * t), (- 94), (4.9 * t), 94, (0.5 * t), 86, (4.9 * t), (- 176), (5 * t), (- 90), (1.0 * t), 90, t, 90, (1.8 * t), 90)
self.move(tw, th, move)
def latch_clamp(self, move=None):
t = self.thickness
s = (0.1 * t)
(tw, th) = ((4 * t), (4 * t))
if self.move(tw, th, move, True):
return
self.moveTo((0.5 * t))
self.polyline((t - (0.5 * s)), 90, ((2.5 * t) + (0.5 * s)), (- 90), (t + s), (- 90), ((2.5 * t) + (0.5 * s)), 90, (t - (0.5 * s)), 90, t, (- 90), (0.5 * t), 90, (2 * t), 45, ((2 ** 0.5) * t), 45, (2 * t), 45, ((2 ** 0.5) * t), 45, (2 * t), 90, (0.5 * t), (- 90), t, 90)
self.move(tw, th, move)
def latch_hole(self, posx):
t = self.thickness
s = (0.1 * t)
self.moveTo(posx, (2 * t), 180)
path = [(1.5 * t), (- 90), t, (- 90), (t - (0.5 * s)), 90]
path = ((path + [(2 * t)]) + list(reversed(path)))
path = ((path[:(- 1)] + [(3 * t)]) + list(reversed(path[:(- 1)])))
self.polyline(*path)
def panel_side(self, l, move=None):
t = self.thickness
s = (0.1 * t)
(tw, th) = (l, (3 * t))
if (not self.glued_panel):
th += t
if self.move(tw, th, move, True):
return
self.rectangularHole((3 * t), (1.5 * t), (3 * t), (1.05 * t))
self.rectangularHole((l - (3 * t)), (1.5 * t), (3 * t), (1.05 * t))
self.rectangularHole((l / 2), (1.5 * t), (2 * t), t)
if self.glued_panel:
self.polyline(*([l, 90, t, 90, t, (- 90), t, (- 90), t, 90, t, 90] * 2))
else:
self.polyline(l, 90, (3 * t), 90)
self.edges['f'](l)
self.polyline(0, 90, (3 * t), 90)
self.move(tw, th, move)
def panel_lock(self, l, move=None):
t = self.thickness
l -= (4 * t)
(tw, th) = (l, (2.5 * t))
if self.move(tw, th, move, True):
return
end = [((l / 2) - (3 * t)), (- 90), (1.5 * t), (90, (0.5 * t)), t, (90, (0.5 * t)), t, 90, (0.5 * t), (- 90), (0.5 * t), (- 90), 0, (90, (0.5 * t)), 0, 90]
self.moveTo(((l / 2) - t), (2 * t), (- 90))
self.polyline(*((([t, 90, (2 * t), 90, t, (- 90)] + end) + [l]) + list(reversed(end))))
self.move(tw, th, move)
def panel_cross_beam(self, l, move=None):
t = self.thickness
(tw, th) = ((l + (2 * t)), (3 * t))
if self.move(tw, th, move, True):
return
self.moveTo(t, 0)
self.polyline(*([l, 90, t, (- 90), t, 90, t, 90, t, (- 90), t, 90] * 2))
self.move(tw, th, move)
def side(self, borders, bottom='s', move=None, label=''):
t = self.thickness
bottom = self.edges.get(bottom, bottom)
tw = (borders[0] + (2 * self.edges['f'].spacing()))
th = ((borders[(- 2)] + bottom.spacing()) + self.edges['f'].spacing())
if self.move(tw, th, move, True):
return
d1 = (t * math.cos(math.radians(self.angle)))
d2 = (t * math.sin(math.radians(self.angle)))
self.moveTo(t, 0)
bottom(borders[0])
self.corner(90)
self.edges['f'](((borders[2] + bottom.endwidth()) - d1))
self.edge(d1)
self.corner(borders[3])
if self.removable_panel:
self.rectangularHole((3 * t), (1.5 * t), (2.5 * t), (1.05 * t))
if ((not self.removable_panel) and (not self.glued_panel)):
self.edges['f'](borders[4])
else:
self.edge(borders[4])
if self.removable_panel:
self.rectangularHole(((- 3) * t), (1.5 * t), (2.5 * t), (1.05 * t))
if (len(borders) == 10):
self.corner(borders[5])
self.edge(d2)
self.edges['f']((borders[6] - d2))
self.corner(borders[(- 3)])
if self.removable_backwall:
self.rectangularHole(self.latchpos, (1.55 * t), (1.1 * t), (1.1 * t))
self.edge((borders[(- 2)] - t))
self.edges['f']((t + bottom.startwidth()))
else:
self.edges['f']((borders[(- 2)] + bottom.startwidth()))
self.corner(borders[(- 1)])
self.move(tw, th, move, label=label)
def render(self):
(x, y, h) = (self.x, self.y, self.h)
t = self.thickness
bottom = self.edges.get(self.bottom_edge)
if self.outside:
self.x = x = self.adjustSize(x)
self.y = y = self.adjustSize(y)
self.h = h = self.adjustSize(h, bottom)
d1 = (t * math.cos(math.radians(self.angle)))
d2 = (t * math.sin(math.radians(self.angle)))
self.latchpos = latchpos = (6 * t)
borders = self.borders()
self.side(borders, bottom, move='right', label='Left Side')
self.side(borders, bottom, move='right', label='Right Side')
self.rectangularWall(borders[0], x, 'ffff', move='right', label='Floor')
self.rectangularWall((borders[2] - d1), x, ('F', 'e', 'F', bottom), ignore_widths=[7, 4], move='right', label='Front')
if self.glued_panel:
self.rectangularWall(borders[4], x, 'EEEE', move='right', label='Panel')
elif self.removable_panel:
self.rectangularWall(borders[4], (x - (2 * t)), 'hEhE', move='right', label='Panel')
else:
self.rectangularWall(borders[4], x, 'FEFE', move='right', label='Panel')
if (len(borders) == 10):
self.rectangularWall((borders[6] - d2), x, 'FEFe', move='right', label='Top')
if self.removable_backwall:
self.rectangularWall((borders[(- 2)] - (1.05 * t)), x, 'EeEe', callback=[(lambda : self.latch_hole(latchpos)), (lambda : self.fingerHolesAt((0.5 * t), 0, ((borders[(- 2)] - (4.05 * t)) - latchpos))), (lambda : self.latch_hole(((borders[(- 2)] - (1.2 * t)) - latchpos))), (lambda : self.fingerHolesAt((0.5 * t), ((3.05 * t) + latchpos), ((borders[(- 2)] - (4.05 * t)) - latchpos)))], move='right', label='Back Wall')
self.rectangularWall((2 * t), ((borders[(- 2)] - (4.05 * t)) - latchpos), 'EeEf', move='right', label='Guide')
self.rectangularWall((2 * t), ((borders[(- 2)] - (4.05 * t)) - latchpos), 'EeEf', move='right', label='Guide')
self.rectangularWall(t, x, ('F', bottom, 'F', 'e'), ignore_widths=[0, 3], move='right', label='Bottom Back')
else:
self.rectangularWall(borders[(- 2)], x, ('F', bottom, 'F', 'e'), ignore_widths=[0, 3], move='right', label='Back Wall')
if self.removable_panel:
if self.glued_panel:
self.panel_cross_beam((x - (2.05 * t)), 'rotated right')
self.panel_cross_beam((x - (2.05 * t)), 'rotated right')
self.panel_lock(borders[4], 'up')
self.panel_lock(borders[4], 'up')
self.panel_side(borders[4], 'up')
self.panel_side(borders[4], 'up')
if self.removable_backwall:
self.latch(move='up')
self.latch(move='up')
self.partsMatrix(4, 2, 'up', self.latch_clamp) |
def sft_set():
with open('dataset_hhrlhf_train.json', 'w') as fp:
AnthropicHHRLHFDataset.save('train', fp)
with open('dataset_hhrlhf_test.json', 'w') as fp:
AnthropicHHRLHFDataset.save('test', fp)
with open('dataset_rmstatic_train.json', 'w') as fp:
DahoasRMStaticDataset.save('train', fp)
with open('dataset_rmstatic_test.json', 'w') as fp:
DahoasRMStaticDataset.save('test', fp)
with open('dataset_rmstatic_train.json') as fp:
rmtrain = set(json.load(fp))
with open('dataset_rmstatic_test.json') as fp:
rmtest = set(json.load(fp))
sft_train = []
with open('dataset_hhrlhf_train.json') as fp:
hhtrain = json.load(fp)
for h in hhtrain:
if (h not in rmtrain):
sft_train.append(h)
sft_test = []
with open('dataset_hhrlhf_test.json') as fp:
hhtest = json.load(fp)
for h in hhtest:
if (h not in rmtest):
sft_test.append(h)
with open('sft_train.json', 'w') as fp:
json.dump(sft_train, fp)
print(len(sft_train))
print(sft_train[(- 1)])
with open('sft_test.json', 'w') as fp:
json.dump(sft_test, fp)
print(len(sft_test))
print(sft_test[(- 1)]) |
def test_dev_deployment_map_clear_on_remove(testproject, BrownieTester, config, accounts):
config.settings['dev_deployment_artifacts'] = True
BrownieTester.deploy(True, {'from': accounts[0]})
BrownieTester.remove(BrownieTester[(- 1)])
assert (len(get_dev_artifacts(testproject)) == 0)
content = get_map(testproject)
assert (not content) |
class TradingMethod():
def __init__(self, symbols, freq, lookback, strategy, variables, weight_btc=None, weight=None, weight_unit=None, filters=None, name='', execution_price='close'):
self.symbols = symbols
self.freq = freq
self.lookback = lookback
self.strategy = strategy
self.variables = variables
self.weight_btc = weight_btc
self.weight = weight
self.weight_unit = weight_unit
self.filters = filters
self.name = name
self.execution_price = execution_price
if ((self.weight_btc is None) and (self.weight is None)):
raise Exception('weight_btc or weight is missing.')
if ((self.weight_btc is not None) and (self.weight is not None)):
raise Exception('weight_btc and weight should not be assigned at the same time')
if self.weight_btc:
self.weight = self.weight_btc
self.weight_unit = 'BTC' |
def test_migrating_simple_asset_1(migration_test_data):
data = migration_test_data
migration_recipe = {data['asset2'].id: {'new_name': 'Asset 2A', 'new_code': 'asset2a', 'new_parent_id': data['assets_task2'].id}, data['asset2_model'].id: {'new_parent_id': data['asset2'].id, 'takes': {'Main': {'new_name': 'Main', 'versions': [data['asset2_model_main_v003'].version_number]}}}}
assert (data['assets_task2'].children == [])
amt = AssetMigrationTool()
amt.migration_recipe = migration_recipe
amt.migrate()
assert (data['assets_task2'].children != [])
new_asset = data['assets_task2'].children[0]
assert isinstance(new_asset, Asset)
assert (new_asset.name == 'Asset 2A')
assert (new_asset.code == 'asset2a')
assert (new_asset.type == data['asset2'].type)
assert (new_asset.children != [])
model_task = new_asset.children[0]
assert isinstance(model_task, Task)
assert (model_task.name == 'Model')
assert (model_task.versions != [])
assert (len(model_task.versions) == 1)
version = model_task.versions[0]
assert (version.take_name == 'Main') |
class PHYSink(Module):
def __init__(self):
self.sink = stream.Endpoint(phy_description(32))
self.dword = PHYDword()
def receive(self):
self.dword.done = 0
while (self.dword.done == 0):
(yield)
def generator(self):
while True:
self.dword.done = 0
(yield self.sink.ready.eq(1))
if (yield self.sink.valid):
self.dword.done = 1
self.dword.dat = (yield self.sink.data)
(yield) |
class StartUpChecker():
def __init__(self, app):
A = QObject()
serverName = 'jamtoolsserver'
self.ssocket = QLocalSocket(A)
self.ssocket.connectToServer(serverName)
if self.ssocket.waitForConnected(500):
print('connected server')
self.ssocket.write(str(sys.argv).encode('utf-8'))
self.ssocket.waitForBytesWritten()
print('another instance is existed! ')
time.sleep(0.5)
app.quit()
sys.exit()
else:
if (self.ssocket.error() == 0):
QLocalServer.removeServer(serverName)
print(self.ssocket.errorString(), ',Remove it')
print('no server')
self.localServer = QLocalServer()
self.localServer.listen(serverName)
self.localServer.newConnection.connect(self.connection_callback)
def connection_callback(self):
self.client = self.localServer.nextPendingConnection()
self.client.readyRead.connect(self.read_)
print('read server', self.client.readAll().data())
def read_(self):
data = self.client.readAll().data().decode('utf-8')
data = data.replace('[', '').replace(']', '').replace("'", '').replace('"', '').replace(' ', '').split(',')
print(data, len(data))
if (len(data) >= 2):
print(sys.argv)
if (os.path.splitext(data[1].lower())[(- 1)] == '.jam'):
jamtools.start_action_run(data[1])
print('start')
jamtools.hide()
else:
QSettings('Fandes', 'jamtools').setValue('S_SIMPLE_MODE', False)
jamtools.setWindowFlag(Qt.WindowStaysOnTopHint, True)
jamtools.show()
jamtools.setWindowFlag(Qt.WindowStaysOnTopHint, False)
jamtools.show()
jamtools.activateWindow() |
def test_custom_virtual_machines():
if (not is_supported_pyevm_version_available()):
pytest.skip('PyEVM is not available')
backend = PyEVMBackend(vm_configuration=((0, FrontierVM), (3, ParisVM)))
VM_at_2 = backend.chain.get_vm_class_for_block_number(2)
VM_at_3 = backend.chain.get_vm_class_for_block_number(3)
assert (FrontierVM.__name__ == 'FrontierVM')
assert (VM_at_2.__name__ == FrontierVM.__name__)
assert (ParisVM.__name__ == 'ParisVM')
assert (VM_at_3.__name__ == ParisVM.__name__)
EthereumTester(backend=backend) |
def test_proj_monitors():
dipole_center = [0, 0, 0]
domain_size = (5 * WAVELENGTH)
buffer_mon = (1 * WAVELENGTH)
grid_spec = td.GridSpec.auto(min_steps_per_wvl=20)
boundary_spec = td.BoundarySpec.all_sides(boundary=td.PML())
sim_size = (domain_size, domain_size, domain_size)
fwidth = (F0 / 10.0)
offset = 4.0
gaussian = td.GaussianPulse(freq0=F0, fwidth=fwidth, offset=offset)
source = td.PointDipole(center=dipole_center, source_time=gaussian, polarization='Ez')
run_time = (40 / fwidth)
freqs = [(0.9 * F0), F0, (1.1 * F0)]
mon_size = ([buffer_mon] * 3)
proj_monitors = make_proj_monitors(dipole_center, mon_size, freqs)
near_monitors = td.FieldMonitor.surfaces(center=dipole_center, size=mon_size, freqs=freqs, name='near')
all_monitors = (near_monitors + list(proj_monitors))
_ = td.Simulation(size=sim_size, grid_spec=grid_spec, structures=[], sources=[source], monitors=all_monitors, run_time=run_time, boundary_spec=boundary_spec, medium=MEDIUM) |
def get_detected_external_identifier_type_and_value_for_text(text: str) -> Tuple[(Optional[str], str)]:
value = re.sub('\\s', '', text)
m = re.search(DOI_PATTERN, value)
if m:
value = m.group(1)
return (SemanticExternalIdentifierTypes.DOI, value)
m = re.search(PMCID_PATTERN, value)
if m:
value = ('PMC' + m.group(1))
return (SemanticExternalIdentifierTypes.PMCID, value)
m = re.search(ARXIV_PATTERN, value)
if m:
value = (m.group(1) or m.group(2))
return (SemanticExternalIdentifierTypes.ARXIV, value)
m = re.match(PMID_PATTERN, value)
if m:
value = m.group(1)
return (SemanticExternalIdentifierTypes.PMID, value)
m = re.search(PII_PATTERN, value)
if m:
value = m.group(1)
return (SemanticExternalIdentifierTypes.PII, value)
return (None, value) |
class _InteractiveConsole(code.InteractiveInterpreter):
def __init__(self, globals, locals):
code.InteractiveInterpreter.__init__(self, locals)
self.globals = dict(globals)
self.globals['dump'] = dump
self.globals['help'] = helper
self.globals['__loader__'] = self.loader = _ConsoleLoader()
self.more = False
self.buffer = []
_wrap_compiler(self)
def runsource(self, source):
source = (source.rstrip() + '\n')
ThreadedStream.push()
prompt = ((self.more and '... ') or '>>> ')
try:
source_to_eval = ''.join((self.buffer + [source]))
if code.InteractiveInterpreter.runsource(self, source_to_eval, '<debugger>', 'single'):
self.more = True
self.buffer.append(source)
else:
self.more = False
del self.buffer[:]
finally:
output = ThreadedStream.fetch()
return ((prompt + source) + output)
def runcode(self, code):
try:
eval(code, self.globals, self.locals)
except Exception:
self.showtraceback()
def showtraceback(self):
from werkzeug.debug.tbtools import get_current_traceback
tb = get_current_traceback(skip=1)
sys.stdout._write(tb.render_summary())
def showsyntaxerror(self, filename=None):
from werkzeug.debug.tbtools import get_current_traceback
tb = get_current_traceback(skip=4)
sys.stdout._write(tb.render_summary())
def write(self, data):
sys.stdout.write(data) |
class MiningHeader(rlp.Serializable, MiningHeaderAPI):
fields = [('parent_hash', hash32), ('uncles_hash', hash32), ('coinbase', address), ('state_root', trie_root), ('transaction_root', trie_root), ('receipt_root', trie_root), ('bloom', uint256), ('difficulty', big_endian_int), ('block_number', big_endian_int), ('gas_limit', big_endian_int), ('gas_used', big_endian_int), ('timestamp', big_endian_int), ('extra_data', binary)] |
def _parse_commandline():
parser = argparse.ArgumentParser()
parser.add_argument('wallet_address', help='main wallet address')
parser.add_argument('task_wallet_address', help='wallet address that will perform transactions')
parser.add_argument('top_up_amount', type=int, nargs='?', default=, help='top-up amount from wallet address to task_wallet address')
parser.add_argument('minimum_balance', type=int, nargs='?', default=, help='minimum task_wallet address balance that will trigger top-up')
parser.add_argument('interval_time', type=int, nargs='?', default=5, help="interval time in seconds to query task_wallet's balance")
return parser.parse_args() |
class Date(fields.Str):
def _validate(self, value):
value = value.strip()
super()._validate(value)
try:
datetime.datetime.strptime(value, '%Y-%m-%d')
except (TypeError, ValueError):
try:
datetime.datetime.strptime(value, '%m/%d/%Y')
except (TypeError, ValueError):
raise exceptions.ApiError(exceptions.DATE_ERROR, status_code=422) |
.parametrize('input_features', [None, variables_arr, np.array(variables_arr), variables_str, np.array(variables_str), variables_user])
def test_with_pipeline_and_array(df_vartypes, input_features):
pipe = Pipeline([('transformer', MockTransformer())])
pipe.fit(df_vartypes.to_numpy())
if (input_features is None):
assert (pipe.get_feature_names_out(input_features=input_features) == variables_arr)
else:
assert (pipe.get_feature_names_out(input_features=input_features) == list(input_features)) |
class ContainerRunningChecker(BaseHealthChecker):
def check(self, model_instance: dm.ModelInstance) -> Status:
reasoning_output = self.cloud_client.is_instance_running(model_instance)
status = Status(model_instance, reasoning_output.output, reason=reasoning_output.reason)
return status |
class ZipkinDataBuilder():
def build_span(name, trace_id, span_id, parent_id, annotations, bannotations):
return ttypes.Span(name=name, trace_id=trace_id, id=span_id, parent_id=parent_id, annotations=annotations, binary_annotations=bannotations)
def build_annotation(value, endpoint=None):
if isinstance(value, unicode):
value = value.encode('utf-8')
return ttypes.Annotation(((time.time() * 1000) * 1000), str(value), endpoint)
def build_binary_annotation(key, value, endpoint=None):
annotation_type = ttypes.AnnotationType.STRING
return ttypes.BinaryAnnotation(key, value, annotation_type, endpoint)
def build_endpoint(ipv4=None, port=None, service_name=None):
if (ipv4 is not None):
ipv4 = ZipkinDataBuilder._ipv4_to_int(ipv4)
if (service_name is None):
service_name = ZipkinDataBuilder._get_script_name()
return ttypes.Endpoint(ipv4=ipv4, port=port, service_name=service_name)
def _ipv4_to_int(ipv4):
return struct.unpack('!i', socket.inet_aton(ipv4))[0]
def _get_script_name():
return os.path.basename(sys.argv[0]) |
class table_features_failed_error_msg(error_msg):
version = 5
type = 1
err_type = 13
def __init__(self, xid=None, code=None, data=None):
if (xid != None):
self.xid = xid
else:
self.xid = None
if (code != None):
self.code = code
else:
self.code = 0
if (data != None):
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack('!B', self.version))
packed.append(struct.pack('!B', self.type))
packed.append(struct.pack('!H', 0))
packed.append(struct.pack('!L', self.xid))
packed.append(struct.pack('!H', self.err_type))
packed.append(struct.pack('!H', self.code))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed[2] = struct.pack('!H', length)
return ''.join(packed)
def unpack(reader):
obj = table_features_failed_error_msg()
_version = reader.read('!B')[0]
assert (_version == 5)
_type = reader.read('!B')[0]
assert (_type == 1)
_length = reader.read('!H')[0]
orig_reader = reader
reader = orig_reader.slice(_length, 4)
obj.xid = reader.read('!L')[0]
_err_type = reader.read('!H')[0]
assert (_err_type == 13)
obj.code = reader.read('!H')[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if (type(self) != type(other)):
return False
if (self.xid != other.xid):
return False
if (self.code != other.code):
return False
if (self.data != other.data):
return False
return True
def pretty_print(self, q):
q.text('table_features_failed_error_msg {')
with q.group():
with q.indent(2):
q.breakable()
q.text('xid = ')
if (self.xid != None):
q.text(('%#x' % self.xid))
else:
q.text('None')
q.text(',')
q.breakable()
q.text('code = ')
value_name_map = {0: 'OFPTFFC_BAD_TABLE', 1: 'OFPTFFC_BAD_METADATA', 5: 'OFPTFFC_EPERM'}
if (self.code in value_name_map):
q.text(('%s(%d)' % (value_name_map[self.code], self.code)))
else:
q.text(('%#x' % self.code))
q.text(',')
q.breakable()
q.text('data = ')
q.pp(self.data)
q.breakable()
q.text('}') |
def main():
N = 10
L = 30
heads = 8
H = 64
q = torch.rand(size=(N, L, H))
k = torch.rand(size=(N, L, H))
v = torch.rand(size=(N, L, H))
q = q.view(N, L, heads, (H // heads))
k = k.view(N, L, heads, (H // heads))
v = v.view(N, L, heads, (H // heads))
q = q.transpose(1, 2)
k = k.transpose(1, 2)
v = v.transpose(1, 2)
sims = (q k.transpose(2, 3))
attentions = torch.softmax(sims, dim=(- 1))
alignments = (attentions v)
concats = alignments.transpose(1, 2).contiguous().view(N, L, H)
print(concats.shape)
sims = torch.einsum('...qh,...kh->...qk', q, k)
attentions = torch.softmax(sims, dim=(- 1))
alignments = torch.einsum('...qk,...kh->...qh', attentions, v)
concats = alignments.transpose(1, 2).contiguous().view(N, L, H)
print(concats.shape) |
_component_ids.register(AgentConfig)
def _(arg: AgentConfig, replacements: Dict[(ComponentType, Dict[(PublicId, PublicId)])]) -> None:
_replace_component_id(arg, {ComponentType.PROTOCOL, ComponentType.CONNECTION, ComponentType.CONTRACT, ComponentType.SKILL}, replacements)
protocol_replacements = replacements.get(ComponentType.PROTOCOL, {})
connection_replacements = replacements.get(ComponentType.CONNECTION, {})
for (protocol_id, connection_id) in list(arg.default_routing.items()):
new_protocol_id = protocol_replacements.get(protocol_id, protocol_id)
old_value = arg.default_routing.pop(protocol_id)
arg.default_routing[new_protocol_id] = old_value
protocol_id = new_protocol_id
new_connection_id = connection_replacements.get(connection_id, connection_id)
arg.default_routing[protocol_id] = new_connection_id
if (arg.default_connection is not None):
default_connection_public_id = arg.default_connection
new_default_connection_public_id = replacements.get(ComponentType.CONNECTION, {}).get(default_connection_public_id, default_connection_public_id)
arg.default_connection = new_default_connection_public_id
for component_id in set(arg.component_configurations.keys()):
replacements_by_type = replacements.get(component_id.component_type, {})
if (component_id.public_id in replacements_by_type):
new_component_id = ComponentId(component_id.component_type, replacements_by_type[component_id.public_id])
old_config = arg.component_configurations.pop(component_id)
arg.component_configurations[new_component_id] = old_config |
def upgrade():
op.execute('alter type connectiontype rename to connectiontype_old')
op.execute("create type connectiontype as enum('postgres', 'mongodb', 'mysql', ' 'snowflake', 'redshift', 'mssql', 'mariadb', 'bigquery', 'saas', 'manual', 'email', 'manual_webhook', 'timescale', 'fides', 'sovrn')")
op.execute('alter table connectionconfig alter column connection_type type connectiontype using connection_type::text::connectiontype')
op.execute('drop type connectiontype_old')
op.add_column('privacyrequest', sa.Column('awaiting_consent_email_send_at', sa.DateTime(timezone=True), nullable=True))
op.execute("ALTER TYPE providedidentitytype ADD VALUE 'ljt_readerID'")
op.execute("alter type privacyrequeststatus add value 'awaiting_consent_email_send'") |
class Houdini(DCCBase):
name = 'Houdini'
extensions = {0: '.hiplc', hou.licenseCategoryType.Commercial: '.hip', hou.licenseCategoryType.Apprentice: '.hipnc', hou.licenseCategoryType.Indie: '.hiplc'}
def __init__(self, name='', version=None):
super(Houdini, self).__init__(name, version)
self.set_environment_variables()
self.name = ('%s%s.%s' % (self.name, hou.applicationVersion()[0], hou.applicationVersion()[1]))
def save_as(self, version, run_pre_publishers=True):
if (not version):
return
from stalker import Version
assert isinstance(version, Version)
current_version = self.get_current_version()
version.update_paths()
version.extension = self.extensions[hou.licenseCategory()]
version.created_with = self.name
try:
os.makedirs(os.path.dirname(version.absolute_full_path))
except OSError:
pass
if (hou.takes.currentTake() != hou.takes.rootTake()):
root_take = hou.takes.rootTake()
hou.takes.setCurrentTake(root_take)
self.set_environment_variables(version)
self.update_shot_node(version)
self.set_render_filename(version)
from stalker import Shot
shot = version.task.parent
if (version and isinstance(shot, Shot)):
self.set_fps(shot.fps)
self.set_frame_range(shot.cut_in, shot.cut_out)
else:
self.set_fps(version.task.project.fps)
self.update_flipbook_settings()
hou.hipFile.save(file_name=str(version.absolute_full_path))
self.set_environment_variables(version)
self.append_to_recent_files(version.absolute_full_path)
if current_version:
version.parent = current_version
from stalker.db.session import DBSession
DBSession.commit()
self.create_local_copy(version)
return True
def open(self, version, force=False, representation=None, reference_depth=0, skip_update_check=False):
if (not version):
return
if (hou.hipFile.hasUnsavedChanges() and (not force)):
raise RuntimeError
try:
hou.hipFile.load(file_name=str(version.absolute_full_path), suppress_save_prompt=True)
except hou.LoadWarning:
pass
self.set_environment_variables(version)
self.append_to_recent_files(version.absolute_full_path)
self.update_flipbook_settings()
from anima.dcc import empty_reference_resolution
return empty_reference_resolution()
def import_(self, version, use_namespace=True):
hou.hipFile.merge(str(version.absolute_full_path))
return True
def get_current_version(self):
version = None
full_path = hou.hipFile.name()
if (full_path != 'untitled.hip'):
version = self.get_version_from_full_path(full_path)
return version
def get_last_version(self):
version = self.get_current_version()
if (version is None):
version = self.get_version_from_recent_files()
return version
def set_environment_variables(self, version=None):
from stalker import Repository
for repo in Repository.query.all():
env_var_name = repo.env_var
value = repo.path
self.set_environment_variable(env_var_name, value)
if (not version):
return
logger.debug(('version: %s' % version))
logger.debug(('version.path: %s' % version.absolute_path))
logger.debug(('version.filename: %s' % version.filename))
logger.debug(('version.full_path: %s' % version.absolute_full_path))
logger.debug(('version.full_path (calculated): %s' % os.path.join(version.absolute_full_path, version.filename).replace('\\', '/')))
job = str(version.absolute_path)
hip = job
hip_name = os.path.splitext(os.path.basename(str(version.absolute_full_path)))[0]
logger.debug(('job : %s' % job))
logger.debug(('hip : %s' % hip))
logger.debug(('hipName : %s' % hip_name))
self.set_environment_variable('JOB', job)
self.set_environment_variable('HIP', hip)
self.set_environment_variable('HIPNAME', hip_name)
def set_environment_variable(cls, var, value):
logger.debug('setting {}={}'.format(var, value))
os.environ[var] = value
try:
hou.allowEnvironmentVariableToOverwriteVariable(var, True)
except AttributeError:
hou.allowEnvironmentToOverwriteVariable(var, True)
hscript_command = ("set -g %s = '%s'" % (var, value))
hou.hscript(str(hscript_command))
def update_flipbook_settings(cls):
from anima.dcc.houdini import auxiliary
scene_viewer = auxiliary.get_scene_viewer()
if (not scene_viewer):
return
fs = scene_viewer.flipbookSettings()
flipbook_path = '$HIP/Outputs/playblast'
fs.output(('%s/$HIPNAME.$F4.jpg' % flipbook_path))
import os
try:
os.makedirs(os.path.expandvars(flipbook_path))
except OSError:
pass
def get_recent_file_list(self):
file_history = FileHistory()
return file_history.get_recent_files('HIP')
def get_frame_range(self):
time_info = hou.hscript('tset')[0].split('\n')
pattern = '[-0-9\\.]+'
import re
start_frame = int(hou.timeToFrame(float(re.search(pattern, time_info[2]).group(0))))
duration = int(re.search(pattern, time_info[0]).group(0))
end_frame = ((start_frame + duration) - 1)
return (start_frame, end_frame)
def set_frame_range(self, start_frame=1, end_frame=100, adjust_frame_range=False):
current_frame = hou.frame()
if (current_frame < start_frame):
hou.setFrame(start_frame)
elif (current_frame > end_frame):
hou.setFrame(end_frame)
hou.hscript((((('tset `(' + str(start_frame)) + '-1)/$FPS` `') + str(end_frame)) + '/$FPS`'))
def get_output_nodes(cls):
rop_context = hou.node('/out')
out_nodes = rop_context.children()
exclude_node_types = [hou.nodeType(hou.nodeTypeCategories()['Driver'], 'wedge'), hou.nodeType(hou.nodeTypeCategories()['Driver'], 'fetch')]
new_out_nodes = [node for node in out_nodes if (node.type() not in exclude_node_types)]
return new_out_nodes
def get_fps(self):
return int(hou.fps())
def get_shot_node(self):
ql_shot_node_type = 'qLib::shot_ql::1'
obj_context = hou.node('/obj')
for child in obj_context.children():
if (child.type().name() == ql_shot_node_type):
return child
try:
shot_node = obj_context.createNode(ql_shot_node_type)
except hou.OperationFailed:
return
else:
return shot_node
def update_shot_node(self, version):
task = version.task
project = task.project
shot_node = self.get_shot_node()
if (not shot_node):
return
shot_node.parm('proj').set(project.name)
shot_node.parm('projs').set(project.code)
from stalker import Shot
image_format = project.image_format
if (task.parent and isinstance(task.parent, Shot)):
shot = task.parent
shot_node.parm('frangex').set(shot.cut_in)
shot_node.parm('frangey').set(shot.cut_out)
image_format = shot.image_format
shot_node.parm('shot').set(version.nice_name)
shot_node.setName('shotData')
try:
shot_node.parm('cam_resx').set(image_format.width)
except hou.PermissionError:
pass
try:
shot_node.parm('cam_resy').set(image_format.height)
except hou.PermissionError:
pass
def set_render_filename(self, version):
current_take = hou.takes.currentTake()
hou.takes.setCurrentTake(hou.takes.rootTake())
import os
output_filename = '$HIP/Outputs/renders/{}/v{:03d}/$OS/{}_$OS.$F4.exr'.format(version.take_name, version.version_number, os.path.splitext(version.filename)[0])
shot_node = self.get_shot_node()
output_nodes = self.get_output_nodes()
for output_node in output_nodes:
if (output_node.type().name() == 'ifd'):
try:
output_node.setParms({'vm_picture': str(output_filename)})
except hou.PermissionError:
pass
output_node.setParms({'vm_image_exr_compression': 'zips'})
output_file_full_path = output_node.evalParm('vm_picture')
output_file_path = os.path.dirname(output_file_full_path)
flat_output_file_path = output_file_path
while ('$' in flat_output_file_path):
flat_output_file_path = os.path.expandvars(flat_output_file_path)
elif (output_node.type().name() == 'Redshift_ROP'):
try:
output_node.setParms({'RS_outputFileNamePrefix': str(output_filename)})
except hou.PermissionError:
pass
try:
output_node.parm('RS_overrideCameraRes').set(True)
except hou.PermissionError:
pass
try:
output_node.parm('RS_overrideResScale').set(7)
except hou.PermissionError:
pass
if shot_node:
try:
output_node.parm('RS_renderCamera').setExpression(('chsop("%s/shotcam")' % shot_node.path()))
except hou.PermissionError:
pass
try:
output_node.parm('RS_overrideRes1').setExpression(('ch("%s/cam_resx")' % shot_node.path()))
output_node.parm('RS_overrideRes2').setExpression(('ch("%s/cam_resy")' % shot_node.path()))
except hou.PermissionError:
pass
else:
from stalker import Shot
shot = version.task.parent
project = version.task.project
imf = project.image_format
if (version and isinstance(shot, Shot)):
imf = shot.image_format
try:
output_node.parm('RS_overrideRes1').set(imf.width)
output_node.parm('RS_overrideRes2').set(imf.height)
except hou.PermissionError:
pass
aov_count = output_node.evalParm('RS_aov')
if aov_count:
for i in range(aov_count):
aov_index = (i + 1)
aov_custom_prefix_parm = ('RS_aovCustomPrefix_%s' % aov_index)
aov_custom_suffix_parm = ('RS_aovSuffix_%s' % aov_index)
try:
output_node.parm(aov_custom_prefix_parm).set(('`strreplace(chs("RS_outputFileNamePrefix"), ".$F4.exr", "_" + chs("%s") + ".$F4.exr")`' % aov_custom_suffix_parm))
except hou.PermissionError:
pass
output_file_full_path = output_node.evalParm('RS_outputFileNamePrefix')
output_file_path = os.path.dirname(output_file_full_path)
flat_output_file_path = output_file_path
while ('$' in flat_output_file_path):
flat_output_file_path = os.path.expandvars(flat_output_file_path)
output_node.parm('RS_outputSkipRendered').set(1)
if (not output_node.isBypassed()):
try:
os.makedirs(flat_output_file_path)
except OSError:
pass
hou.takes.setCurrentTake(current_take)
def set_fps(self, fps=25):
if (fps <= 0):
return
(start_frame, end_frame) = self.get_frame_range()
hou.setFps(fps)
self.set_frame_range(start_frame, end_frame)
def replace_paths(self):
pass
def get_aovs(cls, output_node):
aovs = []
return aovs |
class OptionPlotoptionsHistogramSonificationTracksMappingTremolo(Options):
def depth(self) -> 'OptionPlotoptionsHistogramSonificationTracksMappingTremoloDepth':
return self._config_sub_data('depth', OptionPlotoptionsHistogramSonificationTracksMappingTremoloDepth)
def speed(self) -> 'OptionPlotoptionsHistogramSonificationTracksMappingTremoloSpeed':
return self._config_sub_data('speed', OptionPlotoptionsHistogramSonificationTracksMappingTremoloSpeed) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.