code
stringlengths
281
23.7M
def test_broken_plugin(testbot): borken_plugin_dir = path.join(path.dirname(path.realpath(__file__)), 'borken_plugin') try: tempd = mkdtemp() tgz = os.path.join(tempd, 'borken.tar.gz') with tarfile.open(tgz, 'w:gz') as tar: tar.add(borken_plugin_dir, arcname='borken') assert ('Installing' in testbot.exec_command(('!repos install file://' + tgz), timeout=120)) assert ('import borken # fails' in testbot.pop_message()) assert ('as it did not load correctly.' in testbot.pop_message()) assert ("Error: Broken failed to activate: 'NoneType' object has no attribute 'is_activated'" in testbot.pop_message()) assert ('Plugins reloaded.' in testbot.pop_message()) finally: rmtree(tempd)
def get_version_from_git(): import subprocess for opts in [['--first-parent'], []]: try: p = subprocess.Popen((['git', 'describe', '--long', '--always'] + opts), cwd=package_root, stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError: return if (p.wait() == 0): break else: return description = p.communicate()[0].decode().strip('v').rstrip('\n').rsplit('-', 2) try: (release, dev, git) = description except ValueError: git = 'g{}'.format(*description) release = 'unknown' dev = None labels = [] if (dev == '0'): dev = None else: labels.append(git) try: p = subprocess.Popen(['git', 'diff', '--quiet'], cwd=package_root) except OSError: labels.append('confused') else: if (p.wait() == 1): labels.append('dirty') return Version(release, dev, labels)
class FlinkRowWriter(): def __init__(self, context: Context): self.java_file = JavaFile(context.from_java(), context.to_java()) def write(self, row: Row): csv_str = row_to_csv(row) data_len = len(csv_str) res = self.java_file.write(struct.pack('<i', data_len), 4) if (not res): raise IOError('Fail to write to Flink') res = self.java_file.write(csv_str, data_len) if (not res): raise IOError('Fail to write to Flink')
def main(): print('-- Classification Tree --') data = datasets.load_iris() X = data.data y = data.target (X_train, X_test, y_train, y_test) = train_test_split(X, y, test_size=0.4) clf = ClassificationTree() clf.fit(X_train, y_train) y_pred = clf.predict(X_test) accuracy = accuracy_score(y_test, y_pred) print('Accuracy:', accuracy) Plot().plot_in_2d(X_test, y_pred, title='Decision Tree', accuracy=accuracy, legend_labels=data.target_names)
('delete', cls=FandoghCommand) ('--name', '-n', 'secret_name', help='name of the secret to delete', prompt='Name for the secret') def delete(secret_name): if click.confirm("You are about to delete a secret named '{}', you cannot undo this action. Are sure?".format(secret_name)): result = delete_secret(secret_name) click.echo(result['message'])
def firewall_access_proxy_ssh_client_cert(data, fos): vdom = data['vdom'] state = data['state'] firewall_access_proxy_ssh_client_cert_data = data['firewall_access_proxy_ssh_client_cert'] filtered_data = underscore_to_hyphen(filter_firewall_access_proxy_ssh_client_cert_data(firewall_access_proxy_ssh_client_cert_data)) if ((state == 'present') or (state is True)): return fos.set('firewall', 'access-proxy-ssh-client-cert', data=filtered_data, vdom=vdom) elif (state == 'absent'): return fos.delete('firewall', 'access-proxy-ssh-client-cert', mkey=filtered_data['name'], vdom=vdom) else: fos._module.fail_json(msg='state must be present or absent!')
class LoggingCommonResponseAllOf1(ModelNormal): allowed_values = {('format_version',): {'v1': '1', 'v2': '2'}} validations = {} _property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): return {'format_version': (str,)} _property def discriminator(): return None attribute_map = {'format_version': 'format_version'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
def test_custom_field_definition_duplicate_name_different_resource_type_accepted(db): definition1 = CustomFieldDefinition.create(db=db, data={'name': 'test1', 'description': 'test', 'field_type': 'string', 'resource_type': 'system', 'field_definition': 'string'}) definition2 = CustomFieldDefinition.create(db=db, data={'name': 'test1', 'description': 'test', 'field_type': 'string', 'resource_type': 'privacy_declaration', 'field_definition': 'string'}) assert (len(CustomFieldDefinition.all(db)) == 2) assert (definition1.id != definition2.id)
def test_require5(evmtester, branch_results): evmtester.requireBranches(5, True, True, True, False) results = branch_results() for i in [1626, 1660, 1703, 1708, 1713, 1737, 1742, 1747]: assert ([i, (i + 1)] in results[True]) with pytest.raises(VirtualMachineError): evmtester.requireBranches(5, False, True, True, False) results = branch_results() for i in [1631, 1665]: assert ([i, (i + 1)] in results[True]) for i in [1626, 1660, 1703]: assert ([i, (i + 1)] in results[False]) with pytest.raises(VirtualMachineError): evmtester.requireBranches(5, True, False, True, False) results = branch_results() for i in [1626, 1660, 1703]: assert ([i, (i + 1)] in results[True]) assert ([1708, 1709] in results[False]) with pytest.raises(VirtualMachineError): evmtester.requireBranches(5, True, True, False, False) results = branch_results() for i in [1626, 1660, 1703, 1708]: assert ([i, (i + 1)] in results[True]) assert ([1713, 1714] in results[False])
def test_stalecheck_adds_block_to_cache(request_middleware, allowable_delay): with patch('web3.middleware.stalecheck._is_fresh', side_effect=[False, True, True]) as fresh_spy: block = object() request_middleware.web3.eth.get_block.return_value = block request_middleware('', []) (cache_call, live_call) = fresh_spy.call_args_list assert (fresh_spy.call_count == 2) assert (cache_call == ((None, allowable_delay),)) assert (live_call == ((block, allowable_delay),)) request_middleware('', []) assert (fresh_spy.call_count == 3) assert (fresh_spy.call_args == ((block, allowable_delay),))
class DarkStyle(QtWidgets.QProxyStyle): def __init__(self, *args, **kwargs): super(DarkStyle, self).__init__(*args, **kwargs) def standardPalette(self): palette = super(DarkStyle, self).standardPalette() print('DarkStyle.standardPalette is working 1') palette.setColor(QtGui.QPalette.Window, QtGui.QColor(53, 53, 53)) palette.setColor(QtGui.QPalette.WindowText, QtCore.Qt.white) palette.setColor(QtGui.QPalette.Base, QtGui.QColor(25, 25, 25)) palette.setColor(QtGui.QPalette.AlternateBase, QtGui.QColor(53, 53, 53)) palette.setColor(QtGui.QPalette.ToolTipBase, QtCore.Qt.white) palette.setColor(QtGui.QPalette.ToolTipText, QtCore.Qt.white) palette.setColor(QtGui.QPalette.Text, QtCore.Qt.white) palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.Text, QtGui.QColor(QtCore.Qt.darkGray)) palette.setColor(QtGui.QPalette.Button, QtGui.QColor(53, 53, 53)) palette.setColor(QtGui.QPalette.ButtonText, QtCore.Qt.white) palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, QtGui.QColor(QtCore.Qt.darkGray)) palette.setColor(QtGui.QPalette.BrightText, QtCore.Qt.red) palette.setColor(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, QtGui.QColor(255, 128, 128)) palette.setColor(QtGui.QPalette.Link, QtGui.QColor(42, 130, 218)) palette.setColor(QtGui.QPalette.Highlight, QtGui.QColor(42, 130, 218)) palette.setColor(QtGui.QPalette.HighlightedText, QtCore.Qt.black) return palette
class AbstractAccount(): _str_template = None __slots__ = ('_attrs',) def __init__(self, attributes): self._attrs = attributes def __getattr__(self, name): value = self._attrs.get(name) if ((not value) and (name not in self._attrs)): raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'") return value def __eq__(self, other): return (self._attrs == other._attrs) def __repr__(self): pairs = (f'{k}={repr(v)}' for (k, v) in self._attrs.items()) return f"Account({', '.join(pairs)})" def __str__(self): if (not self._str_template): raise NotImplementedError(f"'{type(self).__name__}' class has no variable 'str_template'") return self._str_template.format(**self._attrs)
class BlacklistRulesEngine(bre.BaseRulesEngine): def __init__(self, rules_file_path, snapshot_timestamp=None): super(BlacklistRulesEngine, self).__init__(rules_file_path=rules_file_path) self.rule_book = None def build_rule_book(self, global_configs=None): self.rule_book = BlacklistRuleBook(self._load_rule_definitions()) def find_violations(self, instance_network_interface, force_rebuild=False): violations = itertools.chain() if ((self.rule_book is None) or force_rebuild): self.build_rule_book() resource_rules = self.rule_book.get_resource_rules() for rule in resource_rules: violations = itertools.chain(violations, rule.find_violations(instance_network_interface)) return violations def add_rules(self, rules): if (self.rule_book is not None): self.rule_book.add_rules(rules)
def random_transform_as_dual_quaternion(enforce_positive_non_dual_scalar_sign=True): T_rand = rand_transform() dq_rand = DualQuaternion.from_transformation_matrix(T_rand) if enforce_positive_non_dual_scalar_sign: if (dq_rand.q_rot.w < 0): dq_rand.dq = (- dq_rand.dq.copy()) return dq_rand.copy()
def mark(editor: sublime.View, tracker: AbbreviationTracker): scope = get_settings('marker_scope', 'region.accent') editor.erase_regions(ABBR_REGION_ID) if tracker.valid_candidate: mark_opt = ((sublime.DRAW_SOLID_UNDERLINE | sublime.DRAW_NO_FILL) | sublime.DRAW_NO_OUTLINE) editor.add_regions(ABBR_REGION_ID, [tracker.region], scope, '', mark_opt) if (isinstance(tracker, AbbreviationTrackerValid) and tracker.forced): phantoms = [sublime.Phantom(tracker.region, forced_indicator('>'), sublime.LAYOUT_INLINE)] key = editor.id() if (key not in _forced_indicator): _forced_indicator[key] = sublime.PhantomSet(editor, ABBR_REGION_ID) _forced_indicator[key].update(phantoms)
def test_get_connection_types(): data = get_connection_types() assert (len(data) == ((len(ConnectionType) + len(ConnectorRegistry.connector_types())) - 4)) assert ({'identifier': ConnectionType.postgres.value, 'type': SystemType.database.value, 'human_readable': 'PostgreSQL', 'encoded_icon': None, 'authorization_required': False, 'user_guide': None, 'supported_actions': [ActionType.access.value, ActionType.erasure.value]} in data) first_saas_type = ConnectorRegistry.connector_types().pop() first_saas_template = ConnectorRegistry.get_connector_template(first_saas_type) assert ({'identifier': first_saas_type, 'type': SystemType.saas.value, 'human_readable': first_saas_template.human_readable, 'encoded_icon': first_saas_template.icon, 'authorization_required': first_saas_template.authorization_required, 'user_guide': first_saas_template.user_guide, 'supported_actions': [action.value for action in first_saas_template.supported_actions]} in data) assert ('saas' not in [item.identifier for item in data]) assert (' not in [item.identifier for item in data]) assert ('custom' not in [item.identifier for item in data]) assert ('manual' not in [item.identifier for item in data]) assert ({'identifier': ConnectionType.sovrn.value, 'type': SystemType.email.value, 'human_readable': 'Sovrn', 'encoded_icon': None, 'authorization_required': False, 'user_guide': None, 'supported_actions': [ActionType.consent.value]} in data)
class FaucetSingleStackAclControlTest(FaucetMultiDPTestBase): NUM_DPS = 3 NUM_HOSTS = 3 def acls(self): return {1: [{'rule': {'dl_type': IPV4_ETH, 'nw_dst': '10.1.0.2', 'actions': {'output': {'port': self.host_port_maps[1][0][0]}}}}, {'rule': {'dl_type': IPV4_ETH, 'dl_dst': 'ff:ff:ff:ff:ff:ff', 'actions': {'output': {'ports': [self.host_port_maps[1][0][0], self.link_port_maps[(0, 1)][0]]}}}}, {'rule': {'dl_type': IPV4_ETH, 'actions': {'output': {'port': self.link_port_maps[(0, 1)][0]}}}}, {'rule': {'actions': {'allow': 1}}}], 2: [{'rule': {'dl_type': IPV4_ETH, 'actions': {'output': {'port': self.link_port_maps[(1, 2)][0]}}}}, {'rule': {'actions': {'allow': 1}}}], 3: [{'rule': {'dl_type': IPV4_ETH, 'nw_dst': '10.1.0.7', 'actions': {'output': {'port': self.host_port_maps[6][2][0]}}}}, {'rule': {'dl_type': IPV4_ETH, 'dl_dst': 'ff:ff:ff:ff:ff:ff', 'actions': {'output': {'ports': [self.host_port_maps[6][2][0]]}}}}, {'rule': {'dl_type': IPV4_ETH, 'actions': {'allow': 0}}}, {'rule': {'actions': {'allow': 1}}}]} def link_acls(self): return {0: [1], (1, 0): [2], (2, 1): [3]} def setUp(self): super().set_up(stack=True, n_dps=self.NUM_DPS, n_untagged=self.NUM_HOSTS) def test_unicast(self): host0 = self.net.get(self.topo.hosts_by_id[0]) host1 = self.net.get(self.topo.hosts_by_id[1]) host3 = self.net.get(self.topo.hosts_by_id[3]) host6 = self.net.get(self.topo.hosts_by_id[6]) host7 = self.net.get(self.topo.hosts_by_id[7]) self.verify_stack_up() self.verify_tp_dst_notblocked(5000, host0, host1, table_id=None) self.verify_tp_dst_blocked(5000, host0, host3, table_id=None) self.verify_tp_dst_notblocked(5000, host0, host6, table_id=None) self.verify_tp_dst_blocked(5000, host0, host7, table_id=None) self.verify_no_cable_errors() def test_broadcast(self): host0 = self.net.get(self.topo.hosts_by_id[0]) host1 = self.net.get(self.topo.hosts_by_id[1]) host3 = self.net.get(self.topo.hosts_by_id[3]) host6 = self.net.get(self.topo.hosts_by_id[6]) host7 = self.net.get(self.topo.hosts_by_id[7]) self.verify_stack_up() self.verify_bcast_dst_notblocked(5000, host0, host1) self.verify_bcast_dst_blocked(5000, host0, host3) self.verify_bcast_dst_notblocked(5000, host0, host6) self.verify_bcast_dst_blocked(5000, host0, host7) self.verify_no_cable_errors()
def get_class_definition(name, base='Object', docstring=''): code = [] code.append(('%s = function () {' % name)) for line in docstring.splitlines(): code.append((' // ' + line)) code.append((' %sop_instantiate(this, arguments);' % stdlib.FUNCTION_PREFIX)) code.append('}') if (base != 'Object'): code.append(('%s.prototype = Object.create(%s);' % (name, base))) code.append(('%s.prototype._base_class = %s;' % (name, base))) code.append(('%s.prototype.__name__ = %s;' % (name, reprs(name.split('.')[(- 1)])))) code.append('') return code
def broadcast_fnc(input_size: Size, target_size: Size) -> typing.Optional[Callable]: if (input_size == target_size): return identity_fnc input_project_size = _normalize_size(input_size, target_size) assert (len(input_project_size) == len(target_size)) for i in range(0, len(target_size)): if ((input_project_size[i] != 1) and (target_size[i] != input_project_size[i])): return None group_size = [] current = 1 L = len(target_size) for k in range(0, L).__reversed__(): d = target_size[k] group_size.append(current) current = (current * d) target_index_to_composite = _create_target_index_to_composite(target_size, group_size) product_list = [] current = 1 for k in range(0, len(input_project_size)).__reversed__(): d = input_project_size[k] product_list.append(current) current = (current * d) input_list_from_target_list = _create_input_list_from_target_list(product_list, input_project_size) return (lambda target_index: input_list_from_target_list(target_index_to_composite(target_index)))
class StatsD(object): SECTION = 'secrets' HOST = ConfigEntry(LegacyConfigEntry(SECTION, 'host')) PORT = ConfigEntry(LegacyConfigEntry(SECTION, 'port', int)) DISABLED = ConfigEntry(LegacyConfigEntry(SECTION, 'disabled', bool)) DISABLE_TAGS = ConfigEntry(LegacyConfigEntry(SECTION, 'disable_tags', bool))
def extractNoobtransWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def classify_identity_type_for_privacy_center_consent_reporting(db: Session, provided_identity: ProvidedIdentity, browser_identity: Identity) -> Tuple[(Optional[ProvidedIdentity], Optional[ProvidedIdentity])]: if (not provided_identity.hashed_value): raise HTTPException(status_code=HTTP_404_NOT_FOUND, detail='Provided identity missing') if (provided_identity.field_name == ProvidedIdentityType.fides_user_device_id): fides_user_provided_identity = provided_identity provided_identity = None else: try: fides_user_provided_identity = get_or_create_fides_user_device_id_provided_identity(db=db, identity_data=browser_identity) except HTTPException: fides_user_provided_identity = None return (provided_identity, fides_user_provided_identity)
def extractConvallariaslibraryCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def action_to_fdeep(argc, argv): args = parse_args(argv) prj = Project(args.path) err = prj.load() if (err is not None): log.error('error while loading project: %s', err) quit() elif (not prj.is_trained()): log.error('no trained model found for this project') quit() convert(prj.weights_path, prj.fdeep_path, args.no_tests, args.metadata)
class Model(object): def __init__(self, phase, visualize, output_dir, batch_size, initial_learning_rate, steps_per_checkpoint, model_dir, target_embedding_size, attn_num_hidden, attn_num_layers, clip_gradients, max_gradient_norm, session, load_model, gpu_id, use_gru, use_distance=True, max_image_width=160, max_image_height=60, max_prediction_length=8, channels=1, reg_val=0): self.use_distance = use_distance max_resized_width = (((1.0 * max_image_width) / max_image_height) * DataGen.IMAGE_HEIGHT) self.max_original_width = max_image_width self.max_width = int(math.ceil(max_resized_width)) self.encoder_size = int(math.ceil(((1.0 * self.max_width) / 4))) self.decoder_size = (max_prediction_length + 2) self.buckets = [(self.encoder_size, self.decoder_size)] if (gpu_id >= 0): device_id = ('/gpu:' + str(gpu_id)) else: device_id = '/cpu:0' self.device_id = device_id if (not os.path.exists(model_dir)): os.makedirs(model_dir) if (phase == 'test'): batch_size = 1 logging.info('phase: %s', phase) logging.info('model_dir: %s', model_dir) logging.info('load_model: %s', load_model) logging.info('output_dir: %s', output_dir) logging.info('steps_per_checkpoint: %d', steps_per_checkpoint) logging.info('batch_size: %d', batch_size) logging.info('learning_rate: %f', initial_learning_rate) logging.info('reg_val: %d', reg_val) logging.info('max_gradient_norm: %f', max_gradient_norm) logging.info('clip_gradients: %s', clip_gradients) logging.info('max_image_width %f', max_image_width) logging.info('max_prediction_length %f', max_prediction_length) logging.info('channels: %d', channels) logging.info('target_embedding_size: %f', target_embedding_size) logging.info('attn_num_hidden: %d', attn_num_hidden) logging.info('attn_num_layers: %d', attn_num_layers) logging.info('visualize: %s', visualize) if use_gru: logging.info('using GRU in the decoder.') self.reg_val = reg_val self.sess = session self.steps_per_checkpoint = steps_per_checkpoint self.model_dir = model_dir self.output_dir = output_dir self.batch_size = batch_size self.global_step = tf.Variable(0, trainable=False) self.phase = phase self.visualize = visualize self.learning_rate = initial_learning_rate self.clip_gradients = clip_gradients self.channels = channels if (phase == 'train'): self.forward_only = False else: self.forward_only = True with tf.device(device_id): self.height = tf.constant(DataGen.IMAGE_HEIGHT, dtype=tf.int32) self.height_float = tf.constant(DataGen.IMAGE_HEIGHT, dtype=tf.float64) self.img_pl = tf.compat.v1.placeholder(tf.string, name='input_image_as_bytes') self.img_data = tf.cond(tf.less(tf.rank(self.img_pl), 1), (lambda : tf.expand_dims(self.img_pl, 0)), (lambda : self.img_pl)) self.img_data = tf.map_fn(self._prepare_image, self.img_data, dtype=tf.float32) num_images = tf.shape(self.img_data)[0] self.encoder_masks = [] for i in xrange((self.encoder_size + 1)): self.encoder_masks.append(tf.tile([[1.0]], [num_images, 1])) self.decoder_inputs = [] self.target_weights = [] for i in xrange((self.decoder_size + 1)): self.decoder_inputs.append(tf.tile([1], [num_images])) if (i < self.decoder_size): self.target_weights.append(tf.tile([1.0], [num_images])) else: self.target_weights.append(tf.tile([0.0], [num_images])) cnn_model = CNN(self.img_data, (not self.forward_only)) self.conv_output = cnn_model.tf_output() self.perm_conv_output = tf.transpose(self.conv_output, perm=[1, 0, 2]) self.attention_decoder_model = Seq2SeqModel(encoder_masks=self.encoder_masks, encoder_inputs_tensor=self.perm_conv_output, decoder_inputs=self.decoder_inputs, target_weights=self.target_weights, target_vocab_size=len(DataGen.CHARMAP), buckets=self.buckets, target_embedding_size=target_embedding_size, attn_num_layers=attn_num_layers, attn_num_hidden=attn_num_hidden, forward_only=self.forward_only, use_gru=use_gru) table = tf.lookup.experimental.MutableHashTable(key_dtype=tf.int64, value_dtype=tf.string, default_value='', checkpoint=True) insert = table.insert(tf.constant(list(range(len(DataGen.CHARMAP))), dtype=tf.int64), tf.constant(DataGen.CHARMAP)) with tf.control_dependencies([insert]): num_feed = [] prb_feed = [] for line in xrange(len(self.attention_decoder_model.output)): guess = tf.argmax(self.attention_decoder_model.output[line], axis=1) proba = tf.reduce_max(tf.nn.softmax(self.attention_decoder_model.output[line]), axis=1) num_feed.append(guess) prb_feed.append(proba) trans_output = tf.transpose(num_feed) trans_output = tf.map_fn((lambda m: tf.foldr((lambda a, x: tf.cond(tf.equal(x, DataGen.EOS_ID), (lambda : ''), (lambda : (table.lookup(x) + a)))), m, initializer='')), trans_output, dtype=tf.string) trans_outprb = tf.transpose(prb_feed) trans_outprb = tf.gather(trans_outprb, tf.range(tf.size(trans_output))) trans_outprb = tf.map_fn((lambda m: tf.foldr((lambda a, x: tf.multiply(tf.cast(x, tf.float64), a)), m, initializer=tf.cast(1, tf.float64))), trans_outprb, dtype=tf.float64) self.prediction = tf.cond(tf.equal(tf.shape(trans_output)[0], 1), (lambda : trans_output[0]), (lambda : trans_output)) self.probability = tf.cond(tf.equal(tf.shape(trans_outprb)[0], 1), (lambda : trans_outprb[0]), (lambda : trans_outprb)) self.prediction = tf.identity(self.prediction, name='prediction') self.probability = tf.identity(self.probability, name='probability') if (not self.forward_only): self.updates = [] self.summaries_by_bucket = [] params = tf.compat.v1.trainable_variables() opt = tf.compat.v1.train.AdadeltaOptimizer(learning_rate=initial_learning_rate) loss_op = self.attention_decoder_model.loss if (self.reg_val > 0): reg_losses = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.REGULARIZATION_LOSSES) logging.info('Adding %s regularization losses', len(reg_losses)) logging.debug('REGULARIZATION_LOSSES: %s', reg_losses) loss_op = ((self.reg_val * tf.reduce_sum(reg_losses)) + loss_op) (gradients, params) = list(zip(*opt.compute_gradients(loss_op, params))) if self.clip_gradients: (gradients, _) = tf.clip_by_global_norm(gradients, max_gradient_norm) summaries = [tf.compat.v1.summary.scalar('loss', loss_op), tf.compat.v1.summary.scalar('total_gradient_norm', tf.linalg.global_norm(gradients))] all_summaries = tf.compat.v1.summary.merge(summaries) self.summaries_by_bucket.append(all_summaries) update_ops = tf.compat.v1.get_collection(tf.compat.v1.GraphKeys.UPDATE_OPS) with tf.control_dependencies(update_ops): self.updates.append(opt.apply_gradients(list(zip(gradients, params)), global_step=self.global_step)) self.saver_all = tf.compat.v1.train.Saver(tf.compat.v1.all_variables()) self.checkpoint_path = os.path.join(self.model_dir, 'model.ckpt') ckpt = tf.train.get_checkpoint_state(model_dir) if (ckpt and load_model): logging.info('Reading model parameters from %s', ckpt.model_checkpoint_path) self.saver_all.restore(self.sess, ckpt.model_checkpoint_path) else: logging.info('Created model with fresh parameters.') self.sess.run(tf.compat.v1.initialize_all_variables()) def predict(self, image_file_data): input_feed = {} input_feed[self.img_pl.name] = image_file_data output_feed = [self.prediction, self.probability] outputs = self.sess.run(output_feed, input_feed) text = outputs[0] probability = outputs[1] if (sys.version_info >= (3,)): text = text.decode('iso-8859-1') return (text, probability) def test(self, data_path): current_step = 0 num_correct = 0.0 num_total = 0.0 s_gen = DataGen(data_path, self.buckets, epochs=1, max_width=self.max_original_width) for batch in s_gen.gen(1): current_step += 1 start_time = time.time() result = self.step(batch, self.forward_only) curr_step_time = (time.time() - start_time) num_total += 1 output = result['prediction'] ground = batch['labels'][0] comment = batch['comments'][0] if (sys.version_info >= (3,)): output = output.decode('iso-8859-1') ground = ground.decode('iso-8859-1') comment = comment.decode('iso-8859-1') probability = result['probability'] if self.use_distance: incorrect = distance.levenshtein(output, ground) if (not ground): if (not output): incorrect = 0 else: incorrect = 1 else: incorrect = (float(incorrect) / len(ground)) incorrect = min(1, incorrect) else: incorrect = (0 if (output == ground) else 1) num_correct += (1.0 - incorrect) if self.visualize: threshold = 0.5 normalize = True binarize = True attns_list = [[a.tolist() for a in step_attn] for step_attn in result['attentions']] attns = np.array(attns_list).transpose([1, 0, 2]) visualize_attention(batch['data'], 'out', attns, output, self.max_width, DataGen.IMAGE_HEIGHT, threshold=threshold, normalize=normalize, binarize=binarize, ground=ground, flag=None) step_accuracy = '{:>4.0%}'.format((1.0 - incorrect)) if incorrect: correctness = (step_accuracy + ' ({} vs {}) {}'.format(output, ground, comment)) else: correctness = (((step_accuracy + ' (') + ground) + ')') logging.info('Step {:.0f} ({:.3f}s). Accuracy: {:6.2%}, loss: {:f}, perplexity: {:0<7.6}, probability: {:6.2%} {}'.format(current_step, curr_step_time, (num_correct / num_total), result['loss'], (math.exp(result['loss']) if (result['loss'] < 300) else float('inf')), probability, correctness)) def train(self, data_path, num_epoch): logging.info('num_epoch: %d', num_epoch) s_gen = DataGen(data_path, self.buckets, epochs=num_epoch, max_width=self.max_original_width) step_time = 0.0 loss = 0.0 current_step = 0 skipped_counter = 0 writer = tf.compat.v1.summary.FileWriter(self.model_dir, self.sess.graph) logging.info('Starting the training process.') for batch in s_gen.gen(self.batch_size): current_step += 1 start_time = time.time() result = None try: result = self.step(batch, self.forward_only) except Exception as e: skipped_counter += 1 logging.info(('Step {} failed, batch skipped.' + ' Total skipped: {}'.format(current_step, skipped_counter))) logging.error('Step {} failed. Exception details: {}'.format(current_step, str(e))) continue loss += (result['loss'] / self.steps_per_checkpoint) curr_step_time = (time.time() - start_time) step_time += (curr_step_time / self.steps_per_checkpoint) writer.add_summary(result['summaries'], current_step) step_perplexity = (math.exp(result['loss']) if (result['loss'] < 300) else float('inf')) logging.info('Step %i: %.3fs, loss: %f, perplexity: %f.', current_step, curr_step_time, result['loss'], step_perplexity) if ((current_step % self.steps_per_checkpoint) == 0): perplexity = (math.exp(loss) if (loss < 300) else float('inf')) logging.info('Global step %d. Time: %.3f, loss: %f, perplexity: %.2f.', self.sess.run(self.global_step), step_time, loss, perplexity) logging.info('Saving the model at step %d.', current_step) self.saver_all.save(self.sess, self.checkpoint_path, global_step=self.global_step) (step_time, loss) = (0.0, 0.0) perplexity = (math.exp(loss) if (loss < 300) else float('inf')) logging.info('Global step %d. Time: %.3f, loss: %f, perplexity: %.2f.', self.sess.run(self.global_step), step_time, loss, perplexity) if skipped_counter: logging.info('Skipped {} batches due to errors.'.format(skipped_counter)) logging.info('Finishing the training and saving the model at step %d.', current_step) self.saver_all.save(self.sess, self.checkpoint_path, global_step=self.global_step) def step(self, batch, forward_only): img_data = batch['data'] decoder_inputs = batch['decoder_inputs'] target_weights = batch['target_weights'] input_feed = {} input_feed[self.img_pl.name] = img_data for idx in xrange(self.decoder_size): input_feed[self.decoder_inputs[idx].name] = decoder_inputs[idx] input_feed[self.target_weights[idx].name] = target_weights[idx] last_target = self.decoder_inputs[self.decoder_size].name input_feed[last_target] = np.zeros([self.batch_size], dtype=np.int32) output_feed = [self.attention_decoder_model.loss] if (not forward_only): output_feed += [self.summaries_by_bucket[0], self.updates[0]] else: output_feed += [self.prediction] output_feed += [self.probability] if self.visualize: output_feed += self.attention_decoder_model.attentions outputs = self.sess.run(output_feed, input_feed) res = {'loss': outputs[0]} if (not forward_only): res['summaries'] = outputs[1] else: res['prediction'] = outputs[1] res['probability'] = outputs[2] if self.visualize: res['attentions'] = outputs[3:] return res def _prepare_image(self, image): img = tf.image.decode_png(image, channels=self.channels) dims = tf.shape(img) width = self.max_width max_width = tf.cast(tf.math.ceil((tf.truediv(dims[1], dims[0]) * self.height_float)), dtype=tf.int32) max_height = tf.cast(tf.math.ceil((tf.truediv(width, max_width) * self.height_float)), dtype=tf.int32) resized = tf.cond(tf.greater_equal(width, max_width), (lambda : tf.cond(tf.less_equal(dims[0], self.height), (lambda : tf.cast(img, dtype=tf.float32)), (lambda : tf.image.resize(img, [self.height, max_width], method=tf.image.ResizeMethod.BICUBIC)))), (lambda : tf.image.resize(img, [max_height, width], method=tf.image.ResizeMethod.BICUBIC))) padded = tf.image.pad_to_bounding_box(resized, 0, 0, self.height, width) return padded
def test_nan_default_value(): ' schema = {'namespace': 'namespace', 'name': 'name', 'type': 'record', 'fields': [{'name': 'some_field', 'type': 'float', 'default': 'nan'}]} test_record = {} result_value = roundtrip(schema, [test_record])[0]['some_field'] assert math.isnan(result_value)
class CatalogDrawer(Catalog.CatalogGroup): def drawer(self) -> CssStylesDivDrawers.CssDrawer: return self._set_class(CssStylesDivDrawers.CssDrawer) def nav(self) -> CssStylesDivDrawers.CssDrawerNav: return self._set_class(CssStylesDivDrawers.CssDrawerNav) def handle(self) -> CssStylesDivDrawers.CssDrawerHandle: return self._set_class(CssStylesDivDrawers.CssDrawerHandle) def content(self) -> CssStylesDivDrawers.CssDrawerContent: return self._set_class(CssStylesDivDrawers.CssDrawerContent)
def test_perf_clip(repeat=10): exec_time_vals = np.zeros(repeat) for i in range(repeat): start_time = time.time() utils.clip(np.random.uniform(low=(- 10), high=10, size=[1000, 2]), val_min=(- 5), val_max=5) exec_time_vals[i] = (time.time() - start_time) print('\nutils.clip execution time over {} repetitions'.format(repeat)) print('\tmean = {:.3f} ms\n\tstd = {:.3f} ms'.format((np.mean(exec_time_vals) * 1000.0), (np.std(exec_time_vals) * 1000.0))) exec_time_vals = np.zeros(repeat) for i in range(repeat): start_time = time.time() np.clip(np.random.uniform(low=(- 10), high=10, size=[1000, 2]), a_min=(- 5), a_max=5) exec_time_vals[i] = (time.time() - start_time) print('\nnumpy.clip execution time over {} repetitions'.format(repeat)) print('\tmean = {:.3f} ms\n\tstd = {:.3f} ms'.format((np.mean(exec_time_vals) * 1000.0), (np.std(exec_time_vals) * 1000.0)))
def test_instance_location_mix(map_doc): inst = InstanceDescriptor(designLocation={'Weight': (60, 61)}, userLocation={'Width': 180}) assert (inst.getFullUserLocation(map_doc) == {'Weight': 600, 'Width': 180, 'Custom': 1.5}), 'instance location is a mix of design and user locations' assert (inst.getFullDesignLocation(map_doc) == {'Weight': (60, 61), 'Width': 18000, 'Custom': 1.5}), 'instance location is a mix of design and user location'
('flytekit.clients.friendly.SynchronousFlyteClient') ('click.get_current_context') def test_get_client(click_current_ctx, mock_flyte_client): class FlexiMock(mock.MagicMock): def __init__(self, *args, **kwargs): super(FlexiMock, self).__init__(*args, **kwargs) self.__getitem__ = (lambda obj, item: getattr(obj, item)) def get(self, x, default=None): return getattr(self, x, default) click_current_ctx = mock.MagicMock obj_mock = FlexiMock(config=PlatformConfig(auth_mode=AuthType.EXTERNAL_PROCESS), cacert=None) click_current_ctx.obj = obj_mock _ = _get_client(host='some-host:12345', insecure=False) expected_platform_config = PlatformConfig(endpoint='some-host:12345', insecure=False, auth_mode=AuthType.EXTERNAL_PROCESS) mock_flyte_client.assert_called_with(expected_platform_config)
class ConvTranspose2dBias(ConvTranspose2dBiasAct): def __init__(self, in_channels, out_channels, kernel_size, stride, padding=0, dilation=1, groups=1, dtype='float16'): super().__init__('transposed_conv2d_bias', in_channels, out_channels, kernel_size, stride, padding, dilation, groups, dtype)
def filter_log_syslogd4_override_setting_data(json): option_list = ['certificate', 'custom_field_name', 'enc_algorithm', 'facility', 'format', 'interface', 'interface_select_method', 'max_log_rate', 'mode', 'override', 'port', 'priority', 'server', 'source_ip', 'ssl_min_proto_version', 'status', 'syslog_type'] json = remove_invalid_fields(json) dictionary = {} for attribute in option_list: if ((attribute in json) and (json[attribute] is not None)): dictionary[attribute] = json[attribute] return dictionary
class TestPatternsPass(unittest.TestCase): base_path = (os.path.dirname(os.path.abspath(__file__)) + '**/') frame = inspect.currentframe() for filename in glob.iglob(f'{base_path}**/*.sol', recursive=True): path = Path(filename) test_name = str(path.relative_to(Path(os.path.abspath(__file__)).parent)).replace('.sol', '').replace('\\', '.').replace('/', '.') frame.f_locals[f'test_{test_name}'] = make_test_case(str(path))
class OptionPlotoptionsTilemapSonificationTracksMappingTremolo(Options): def depth(self) -> 'OptionPlotoptionsTilemapSonificationTracksMappingTremoloDepth': return self._config_sub_data('depth', OptionPlotoptionsTilemapSonificationTracksMappingTremoloDepth) def speed(self) -> 'OptionPlotoptionsTilemapSonificationTracksMappingTremoloSpeed': return self._config_sub_data('speed', OptionPlotoptionsTilemapSonificationTracksMappingTremoloSpeed)
def _config_from_hf(hf_config: Mapping[(str, Any)]) -> FalconConfig: model_type = hf_config['model_type'] if ('RefinedWeb' in model_type): return _config_from_hf_refined_web_model(hf_config) elif (model_type == 'falcon'): return _config_from_hf_falcon(hf_config) else: raise ValueError(f'Unknown type of Falcon model: {model_type}')
def run_async_bwd(folder_name: str, path_dir: str, callback_url: str, verbose: bool, num_workers: int, res: tuple, batch_data_vjp: Tuple[(JaxSimulationData, ...)]) -> Tuple[Dict[(str, JaxSimulation)]]: fwd_task_ids = res[0].fwd_task_ids sims_adj = [] jax_infos_adj = [] parent_tasks_adj = [] for (sim_data_vjp, fwd_task_id) in zip(batch_data_vjp, fwd_task_ids): parent_tasks_adj.append([str(fwd_task_id)]) fwidth_adj = sim_data_vjp.simulation._fwidth_adjoint run_time_adj = sim_data_vjp.simulation._run_time_adjoint jax_sim_adj = sim_data_vjp.make_adjoint_simulation(fwidth=fwidth_adj, run_time=run_time_adj) (sim_adj, jax_info_adj) = jax_sim_adj.to_simulation() sims_adj.append(sim_adj) jax_infos_adj.append(jax_info_adj) sims_vjp = webapi_run_async_adjoint_bwd(simulations=sims_adj, jax_infos=jax_infos_adj, folder_name=folder_name, path_dir=path_dir, callback_url=callback_url, verbose=verbose, parent_tasks=parent_tasks_adj) sims_vjp_updated = [] for (sim_vjp, sim_data_vjp) in zip(sims_vjp, batch_data_vjp): sim_vjp_orig = sim_data_vjp.simulation sim_vjp_updated = sim_vjp_orig.updated_copy(input_structures=sim_vjp.input_structures) sims_vjp_updated.append(sim_vjp_updated) return (sims_vjp_updated,)
.provider(fields.Dictionary({}, description='The default coroutine middleware has no constructor arguments')) class DefaultCoroutineMiddleware(CoroutineMiddleware): def coroutine(self, coroutine: MiddlewareCoroutine) -> MiddlewareCoroutine: async def handler(): try: return (await coroutine) except Exception: _logger.exception('Error occurred while awaiting coroutine in request.run_coroutine') raise return handler()
def find_item_locally(ctx: Context, item_type: str, item_public_id: PublicId) -> Tuple[(Path, ComponentConfiguration)]: item_type_plural = (item_type + 's') item_name = item_public_id.name try: registry_path = ctx.registry_path except ValueError as e: raise click.ClickException(str(e)) package_path = Path(registry_path, item_public_id.author, item_type_plural, item_name) config_file_name = _get_default_configuration_file_name_from_type(item_type) item_configuration_filepath = (package_path / config_file_name) if (not item_configuration_filepath.exists()): raise click.ClickException("Cannot find {}: '{}'.".format(item_type, item_public_id)) try: item_configuration_loader = ConfigLoader.from_configuration_type(PackageType(item_type)) with open_file(item_configuration_filepath) as fp: item_configuration = item_configuration_loader.load(fp) except ValidationError as e: raise click.ClickException('{} configuration file not valid: {}'.format(item_type.capitalize(), str(e))) version = item_configuration.version author = item_configuration.author if ((item_public_id.author != author) or ((not item_public_id.package_version.is_latest) and (item_public_id.version != version))): raise click.ClickException('Cannot find {} with author and version specified.'.format(item_type)) return (package_path, item_configuration)
_util.copy_func_kwargs(DatabaseOptions) def on_value_written(**kwargs) -> _typing.Callable[([_C1], _C1)]: options = DatabaseOptions(**kwargs) def on_value_written_inner_decorator(func: _C1): ref_pattern = _path_pattern.PathPattern(options.reference) instance_pattern = _path_pattern.PathPattern((options.instance if (options.instance is not None) else '*')) _functools.wraps(func) def on_value_written_wrapped(raw: _ce.CloudEvent): return _db_endpoint_handler(func, _event_type_written, ref_pattern, instance_pattern, raw) _util.set_func_endpoint_attr(on_value_written_wrapped, options._endpoint(event_type=_event_type_written, func_name=func.__name__, instance_pattern=instance_pattern)) return on_value_written_wrapped return on_value_written_inner_decorator
class BaseSoC(SoCSDRAM): def __init__(self): platform = arty.Platform() sys_clk_freq = int(.0) SoCSDRAM.__init__(self, platform, clk_freq=sys_clk_freq, ident='Minimal Arty DDR3 Design for tests with Project X-Ray', ident_version=True, cpu_type=None, l2_size=16, uart_name='bridge') self.submodules.crg = _CRG(platform, sys_clk_freq) if (not self.integrated_main_ram_size): self.submodules.ddrphy = s7ddrphy.A7DDRPHY(platform.request('ddram'), memtype='DDR3', nphases=4, sys_clk_freq=sys_clk_freq) self.add_csr('ddrphy') sdram_module = MT41K128M16(sys_clk_freq, '1:4') self.register_sdram(self.ddrphy, geom_settings=sdram_module.geom_settings, timing_settings=sdram_module.timing_settings) def generate_sdram_phy_py_header(self): f = open('sdram_init.py', 'w') f.write(get_sdram_phy_py_header(self.sdram.controller.settings.phy, self.sdram.controller.settings.timing)) f.close()
class EmbeddingOpenAI(Embedding): def __init__(self, model_name='openai'): super().__init__(model_name) self.instance = None if (openai.__version__ < '1.0.0'): self.instance = EmbeddingOpenAI_0x() else: self.instance = EmbeddingOpenAI_1x() print(f'Initialized EmbeddingOpenAI Interface: openai version {openai.__version__}') def dim(self): return self.instance.dim() def getname(self, start_date, prefix='news'): return self.instance.getname(start_date=start_date, prefix=prefix) def create(self, text: str, model_name='text-embedding-ada-002', num_retries=3): return self.instance.create(text=text, model_name=model_name, num_retries=num_retries) def get_or_create(self, text: str, source='', page_id='', db_client=None, key_ttl=(86400 * 30)): return self.instance.get_or_create(text=text, source=source, page_id=page_id, db_client=db_client, key_ttl=key_ttl)
def cache_secret(masking_secret_cache: MaskingSecretCache, request_id: str) -> None: cache: FidesopsRedis = get_cache() cache.set_with_autoexpire(get_masking_secret_cache_key(request_id, masking_strategy=masking_secret_cache.masking_strategy, secret_type=masking_secret_cache.secret_type), FidesopsRedis.encode_obj(masking_secret_cache.secret))
.parametrize('state', [AccountDB(MemoryDB())]) def test_balance(state): assert (state.get_balance(ADDRESS) == 0) state.set_balance(ADDRESS, 1) assert (state.get_balance(ADDRESS) == 1) assert (state.get_balance(OTHER_ADDRESS) == 0) with pytest.raises(ValidationError): state.get_balance(INVALID_ADDRESS) with pytest.raises(ValidationError): state.set_balance(INVALID_ADDRESS, 1) with pytest.raises(ValidationError): state.set_balance(ADDRESS, 1.0)
class OptionPlotoptionsAreasplineSonificationTracksMappingTime(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def block_detail(block_name, config, color=True): blocks_to_show = {} iterable_types = set([set, list, tuple, frozenset]) if (block_name not in config): try: next_block = [x.lstrip('') for x in config.get('genetree_meta_workflow', {})[block_name]] metaworkflow = True except Exception as e: print(e) raise ValueError(('block name not found [%s]' % block_name)) else: metaworkflow = False next_block = [block_name] pos = 0 while next_block: block = next_block.pop() blocks_to_show[block] = pos for (k1, v1) in config[block].items(): if (type(v1) in iterable_types): for v2 in v1: if (isinstance(v2, str) and v2.startswith('')): next_block.append(v2[1:]) elif (isinstance(v1, str) and v1.startswith('')): next_block.append(v1[1:]) pos += 1 if (metaworkflow and color): print(colorify('[genetree_meta_workflow]', 'yellow')) print(('%s = %s' % (block_name, ', '.join(config['genetree_meta_workflow'][block_name])))) print() elif metaworkflow: print('[genetree_meta_workflow]') print(('%s = %s' % (block_name, ', '.join(config['genetree_meta_workflow'][block_name])))) print() for (b, pos) in sorted(list(blocks_to_show.items()), key=(lambda x: x[1])): if (b == 'builtin_apps'): continue if color: print(colorify(('[%s]' % b), 'green')) else: print(('[%s]' % b)) for (k, v) in sorted(config[b].items()): if (k == '_inherits'): continue if (type(v) in iterable_types): v = (', '.join(map(str, v)) + ',') if color: if (k == '_app'): print(colorify(('% 35s = %s' % (k, v)), 'lblue')) else: print(('%s = %s' % (colorify(('% 35s' % k), 'orange'), v))) else: print(('% 40s = %s' % (k, v))) print()
('rocm.gemm_rcr_bias_add_add.gen_function') def gen_function(func_attrs, exec_cond_template, dim_info_dict): return common.gen_function(func_attrs, exec_cond_template, dim_info_dict, 'bias_add_add', extra_code=EXTRA_CODE.render(), input_addr_calculator=common.INPUT_ADDR_CALCULATOR.render(accessor_a=func_attrs['input_accessors'][0], accessor_b=func_attrs['input_accessors'][1]), output_addr_calculator=common.OUTPUT_ADDR_CALCULATOR.render(output_accessor=func_attrs['output_accessors'][0]))
class OptionPanelSliding(Options): component_properties = ('title_align',) def expanded(self): return self.get(True) def expanded(self, flag: bool): self.set(flag) def icon_expanded(self): if ('material-design-icons' in self.component.requirements): return self.get('material-icons') if ('office-ui-fabric-core' in self.component.requirements): return self.get('ms-Icon ms-Icon--CaretSolidDown') return self.get('fas fa-caret-down') _expanded.setter def icon_expanded(self, icon: str): self.set(icon) def icon_closed(self): if ('material-design-icons' in self.component.requirements): return self.get('material-icons') if ('office-ui-fabric-core' in self.component.requirements): return self.get('ms-Icon ms-Icon--CaretSolidUp') return self.get('fas fa-caret-up') _closed.setter def icon_closed(self, icon: str): self.set(icon) def icon_position(self): return self.get('left') _position.setter def icon_position(self, value: str): self.set(value) def title_align(self): return self.get('left') _align.setter def title_align(self, value: str): self.set(value) def click_type(self): return self.get('title') _type.setter def click_type(self, value: str): self.set(value)
def test_fixedlength(): s = BigEndianInt(4) for i in (0, 1, 255, 256, (256 ** 3), ((256 ** 4) - 1)): assert (len(s.serialize(i)) == 4) assert (s.deserialize(s.serialize(i)) == i) for i in ((256 ** 4), ((256 ** 4) + 1), (256 ** 5), (- 1), (- 256), 'asdf'): with pytest.raises(SerializationError): s.serialize(i)
('app_mention') def handle_mention(client, event, say): logging.info(f'handle_mention called: {event}') try: channel = event['channel'] session = sessions.get(channel, None) if (not session): session = Session(fixie_client) sessions[channel] = session client.chat_postMessage(channel=event['channel'], user=event['user'], thread_ts=event.get('thread_ts', event['ts']), text=random.choice(_THINKING_RESPONSES), reply_broadcast=False) for message in session.run(event['text']): logging.info(f'Got Fixie message: {message}') text = message['text'] if (('sentBy' in message) and ('handle' in message['sentBy'])): sentBy = (message['sentBy']['handle'] + ': ') else: sentBy = '' client.chat_postMessage(channel=event['channel'], thread_ts=event.get('thread_ts', event['ts']), text=f'{sentBy}{text}', reply_broadcast=False) except Exception as e: say(f'Sorry, I got an exception handling your query: {e}') say(e)
def test_repr(): assert (repr(UnaryOperation(neg, [BinaryOperation(add, [a, b])])) == 'negate [plus [a#0 (type: int aliased: False),b#1 (type: int aliased: False)] int] int') assert (repr(UnaryOperation(neg, [BinaryOperation(udiv, [a, b])])) == 'negate [divide_us [a#0 (type: int aliased: False),b#1 (type: int aliased: False)] int] int') assert (repr(UnaryOperation(neg, [BinaryOperation(div, [a, b])])) == 'negate [divide [a#0 (type: int aliased: False),b#1 (type: int aliased: False)] int] int') assert (repr(UnaryOperation(neg, [UnaryOperation(neg, [a])])) == 'negate [negate [a#0 (type: int aliased: False)] int] int') assert (repr(BinaryOperation(add, [a, BinaryOperation(add, [a, b])])) == 'plus [a#0 (type: int aliased: False),plus [a#0 (type: int aliased: False),b#1 (type: int aliased: False)] int] int') assert (repr(BinaryOperation(add, [Constant(2, Integer.int32_t()), BinaryOperation(add, [a, b])])) == 'plus [2 type: int,plus [a#0 (type: int aliased: False),b#1 (type: int aliased: False)] int] int') assert (repr(ListOperation([a, Constant(2, Integer.int32_t())])) == 'list_op [a#0 (type: int aliased: False),2 type: int] int') assert (repr(ListOperation([])) == 'list_op [] unknown type') assert (repr(ListOperation([a, BinaryOperation(add, [a, b])])) == 'list_op [a#0 (type: int aliased: False),plus [a#0 (type: int aliased: False),b#1 (type: int aliased: False)] int] int') assert (repr(UnaryOperation(cast, [a])) == 'cast [a#0 (type: int aliased: False)] int') assert (repr(UnaryOperation(cast, [a], contraction=True)) == 'cast [a#0 (type: int aliased: False)] int contract') assert (repr(UnaryOperation(OperationType.dereference, [BinaryOperation(add, [a, b])])) == 'dereference [plus [a#0 (type: int aliased: False),b#1 (type: int aliased: False)] int] int') assert (repr(UnaryOperation(OperationType.dereference, [BinaryOperation(add, [a, b])], array_info=ArrayInfo(a, b))) == 'dereference [plus [a#0 (type: int aliased: False),b#1 (type: int aliased: False)] int] int ArrayInfo(base=a#0 (type: int aliased: False), index=b#1 (type: int aliased: False), confidence=False)') assert (repr(UnaryOperation(OperationType.dereference, [BinaryOperation(add, [a, b])], array_info=ArrayInfo(a, b, True))) == 'dereference [plus [a#0 (type: int aliased: False),b#1 (type: int aliased: False)] int] int ArrayInfo(base=a#0 (type: int aliased: False), index=b#1 (type: int aliased: False), confidence=True)')
class SmartSymbolsPattern(HtmlInlineProcessor): def __init__(self, pattern, replace, md): super(SmartSymbolsPattern, self).__init__(pattern, md) self.replace = replace def handleMatch(self, m, data): return (self.md.htmlStash.store(m.expand((self.replace(m) if callable(self.replace) else self.replace))), m.start(0), m.end(0))
(tags=['audit'], description=docs.NAME_SEARCH) class AuditCommitteeNameSearch(utils.Resource): filter_fulltext_fields = [('q', models.AuditCommitteeSearch.fulltxt)] _kwargs(args.names) _with(schemas.AuditCommitteeSearchListSchema()) def get(self, **kwargs): query = filters.filter_fulltext(models.AuditCommitteeSearch.query, kwargs, self.filter_fulltext_fields) query = query.order_by(sa.desc(models.AuditCommitteeSearch.id)).limit(20) return {'results': query.all()}
def main(): v = viz.Visualizer() v.add_bounding_box('Box_1', position=np.array([0.0, 0.0, 1.0]), size=np.array([1, 1, 2])) v.add_bounding_box('Box_2', position=np.array([1, 0, 0.05]), size=np.array([2, 1, 0.1]), orientation=np.array([(math.pi / 6.0), 0.0, 0.0, 1.0]), color=np.array([0, 0, 255]), alpha=0.5, edge_width=0.01) v.add_bounding_box('Box_3', position=np.array([(- 1), 1, 0]), size=np.array([1, 2, 1]), orientation=np.array([(math.pi / 3.0), 0.0, 0.0, 1.0]), color=np.array([30, 255, 50]), edge_width=0.01) for i in range(4): v.add_bounding_box(f'Box;_{i}', position=np.array([1, 1, 0]), size=np.array([1, 2, 1]), orientation=q.as_float_array(q.from_euler_angles(0.0, 0.0, ((i * math.pi) / 4.0))).tolist(), color=np.array([130, 155, 50]), edge_width=0.02) v.save('example_bounding_boxes')
def test_comp_import_host_association(): string = write_rpc_request(1, 'initialize', {'rootPath': str(test_dir)}) file_path = (test_dir / 'test_import.f90') string += comp_request(file_path, 15, 20) (errcode, results) = run_request(string, ['--use_signature_help', '-n1']) assert (errcode == 0) exp_results = ([1, 'mytype', 'TYPE'],) assert (len(exp_results) == (len(results) - 1)) for (i, ref) in enumerate(exp_results): validate_comp(results[(i + 1)], ref)
.MeshTools .Archiver .Domain class TestInterpolatedBathy(object): def setup_class(cls): pass def teardown_class(cls): pass def setup_method(self, method): self.aux_names = [] def teardown_method(self, method): filenames = [] for aux_name in self.aux_names: filenames.extend([((aux_name + '.') + post) for post in ['xmf', 'h5', '2dm']]) filenames.extend([((aux_name + '0.') + post) for post in ['xmf', 'h5', '2dm']]) filenames.extend([(('tetgen' + '.') + post) for post in ['ele', 'node', 'face']]) filenames.extend(['proteus_default.log', 'interpolatedBathySimpleTest.poly']) for f in filenames: if os.path.exists(f): try: os.remove(f) except OSError as e: print(('Error: %s - %s.' % (e.filename, e.strerror))) else: pass def setupStepGauss(self): import numpy as np from math import sin, cos, pi, sqrt, exp nPoints_x = nPoints_y = 21 delta_x = old_div(2.0, float((nPoints_x - 1))) delta_y = old_div(2.0, float((nPoints_y - 1))) bathy = np.zeros(((nPoints_x * nPoints_y), 3), 'd') for i in range(nPoints_y): for j in range(nPoints_x): x = ((- 0.5) + (j * delta_x)) y = ((- 0.5) + (i * delta_y)) if (y > x): z = 1.0 elif (y < (x - 0.25)): r = sqrt((((y - 0.25) ** 2) + ((x - 0.8) ** 2))) z = exp(((- 50.0) * (r ** 2))) else: z = 0.0 bathy[(((i * nPoints_x) + j), 0)] = x bathy[(((i * nPoints_x) + j), 1)] = y bathy[(((i * nPoints_x) + j), 2)] = z domain = InterpolatedBathymetryDomain(vertices=[[0.0, 0.0], [0.0, 1.0], [0.5, 1.5], [1.0, 1.0], [1.5, (- 0.5)]], vertexFlags=[1, 2, 3, 2, 1], segments=[[0, 1], [1, 2], [2, 3], [3, 4], [4, 0]], segmentFlags=[1, 2, 3, 3, 1], regions=[(0.5, 0.5)], regionFlags=[1], name='interpolatedBathySimpleTest', units='m', bathy=bathy, bathyGridDim=(nPoints_y, nPoints_x)) domain.writePoly(domain.name) return domain def test_L1(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='points', bathyAssignmentScheme='localAveraging', errorNormType='L1') outfile = 'interpolatedBathySimpleTest_L1_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_L2(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='points', bathyAssignmentScheme='localAveraging', errorNormType='L2') outfile = 'interpolatedBathySimpleTest_L2_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_Linfty(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='points', bathyAssignmentScheme='localAveraging', errorNormType='Linfty') outfile = 'interpolatedBathySimpleTest_Linfty_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_L1_interp(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='points', bathyAssignmentScheme='interpolation', errorNormType='L1') outfile = 'interpolatedBathySimpleTest_L1_interp_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_L2_interp(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='points', bathyAssignmentScheme='interpolation', errorNormType='L2') outfile = 'interpolatedBathySimpleTest_L2_interp_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_Linfty_interp(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='points', bathyAssignmentScheme='interpolation', errorNormType='Linfty') outfile = 'interpolatedBathySimpleTest_Linfty_interp_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_L1_grid(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='grid', bathyAssignmentScheme='localAveraging', errorNormType='L1') outfile = 'interpolatedBathySimpleTest_grid_L1_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_L2_grid(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='grid', bathyAssignmentScheme='localAveraging', errorNormType='L2') outfile = 'interpolatedBathySimpleTest_grid_L2_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_Linfty_grid(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='grid', bathyAssignmentScheme='localAveraging', errorNormType='Linfty') outfile = 'interpolatedBathySimpleTest_grid_Linfty_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_L1_interp_grid(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='grid', bathyAssignmentScheme='interpolation', errorNormType='L1') outfile = 'interpolatedBathySimpleTest_grid_L1_interp_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_L2_interp_grid(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='grid', bathyAssignmentScheme='interpolation', errorNormType='L2') outfile = 'interpolatedBathySimpleTest_grid_L2_interp_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() self.aux_names.append(outfile) def test_Linfty_interp_grid(self): domain = self.setupStepGauss() mesh = InterpolatedBathymetryMesh(domain, triangleOptions=('gVApq30Dena%8.8f' % ((0.5 ** 3),)), atol=0.1, rtol=0.1, maxLevels=25, maxNodes=50000, bathyType='grid', bathyAssignmentScheme='interpolation', errorNormType='Linfty') outfile = 'interpolatedBathySimpleTest_grid_Linfty_interp_' archive = XdmfArchive(dataDir='.', filename=outfile, global_sync=False) archive.domain = ElementTree.SubElement(archive.tree.getroot(), 'Domain') mesh.meshList[(- 1)].writeMeshXdmf(ar=archive, init=True) archive.sync() archive.close() mesh.meshList[(- 1)].writeMeshADH('interpolatedBathySimpleTest_grid_Linfty_interp_') self.aux_names.append(outfile)
class OptionPlotoptionsColumnSonificationDefaultinstrumentoptionsMappingNoteduration(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_align_concatenate_fasta_to_phylip(o_dir, e_dir, request): program = 'bin/align/phyluce_align_concatenate_alignments' output = os.path.join(o_dir, 'mafft-gblocks-clean-fasta-concat') cmd = [os.path.join(request.config.rootdir, program), '--alignments', os.path.join(e_dir, 'mafft-gblocks-clean-fasta'), '--output', output, '--input-format', 'fasta', '--phylip'] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() assert (proc.returncode == 0), print('{}'.format(stderr.decode('utf-8'))) output_files = glob.glob(os.path.join(output, '*')) assert output_files, 'There are no output files' for output_file in output_files: name = os.path.basename(output_file) expected_file = os.path.join(e_dir, 'mafft-gblocks-clean-fasta-concat', name) observed = open(output_file).read() expected = open(expected_file).read() assert (observed == expected)
class ConfirmationAW2TestCase(BaseSkillTestCase): path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'confirmation_aw2') def setup(cls): cls.aw1_aea = 'some_aw1_aea' config_overrides = {'models': {'strategy': {'args': {'aw1_aea': cls.aw1_aea}}}} super().setup(config_overrides=config_overrides)
class MessageFrame(QFrame): accept_signal = Signal(int, MessageData) cancel_signal = Signal(int, MessageData) TYPE_INVALID = 0 TYPE_EMPTY = 1 TYPE_QUESTION = 2 TYPE_LAUNCH_FILE = 3 TYPE_DEFAULT_CFG = 4 TYPE_NODELET = 5 TYPE_TRANSFER = 6 TYPE_BINARY = 7 TYPE_NOSCREEN = 8 TYPE_NMD = 9 TYPE_NMD_RESTART = 10 TYPE_NODE_CFG = 11 ICON_SIZE = 32 def __init__(self, parent=None, info=False): QFrame.__init__(self, parent=parent) self.setObjectName('MessageFrame') self.questionid = self.TYPE_INVALID self.text = '' self.data = MessageData(None) self.IMAGES = {1: QPixmap(), 2: nm.settings().pixmap('question.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation), 3: nm.settings().pixmap('crystal_clear_launch_file.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation), 4: nm.settings().pixmap('default_cfg.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation), 5: nm.settings().pixmap('crystal_clear_nodelet_q.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation), 6: nm.settings().pixmap('crystal_clear_launch_file_transfer.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation), 7: nm.settings().pixmap('crystal_clear_binary.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation), 8: nm.settings().pixmap('crystal_clear_no_io.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation), 9: nm.settings().pixmap('crystal_clear_run_zeroconf.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation), 10: nm.settings().pixmap('crystal_clear_run_zeroconf.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation), 11: nm.settings().pixmap('sekkyumu_restart.png').scaled(self.ICON_SIZE, self.ICON_SIZE, Qt.IgnoreAspectRatio, Qt.SmoothTransformation)} self._new_request = False self._in_resp_process = False self.ui = QFrame() ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ui', 'MessageFrame.ui') loadUi(ui_file, self.ui) color = QColor(255, 207, 121) self.ui.questionOkButton.setIcon(nm.settings().icon('crystal_clear_button_apply.png')) self.ui.questionCancelButton.setIcon(nm.settings().icon('crystal_clear_button_close.png')) self.ui.listLabel.setTextInteractionFlags(Qt.TextSelectableByMouse) self.ui.questionLabel.setTextInteractionFlags(Qt.TextSelectableByMouse) self.ui.setVisible(False) self.ui.listLabel.setVisible(False) self.ui.questionOkButton.clicked.connect(self._on_question_ok) self.ui.questionCancelButton.clicked.connect(self._on_question_cancel) self.ui.checkBox_dnaa.stateChanged.connect(self._on_checkbox_state_changed) self._ask = 'ask' if info: color = QColor(232, 104, 80) self.ui.questionCancelButton.setVisible(False) self._ask = 'show' bg_style = ('QFrame#questionFame { background-color: %s;}' % color.name()) self.ui.setStyleSheet(('%s' % bg_style)) self._queue = MessageQueue() self._do_not_ask = {} def show_question(self, questionid, text, data=MessageData(None), color=None): if (questionid == 0): return try: if ((questionid == self.TYPE_LAUNCH_FILE) and nm.settings().autoreload_launch): self.accept_signal.emit(questionid, data) return if (self._do_not_ask[questionid] == 1): self.accept_signal.emit(questionid, data) elif (self._do_not_ask[questionid] == 0): self.cancel_signal.emit(questionid, data) return except Exception: pass if ((self.questionid != questionid) or (self.text != text)): self._queue.add(questionid, text, data) elif data.data_list: for dt in data.data_list: if (dt not in self.data.data_list): self.data.data_list.append(dt) self._update_list_label(self.data.data_list) if (self.questionid == self.TYPE_INVALID): self._new_request = self._read_next_item() self._frameui_4_request(self._new_request) if (self.questionid in [self.TYPE_NODELET, self.TYPE_NOSCREEN]): self.ui.checkBox_dnaa.setText(("don't %s again, never!" % self._ask)) else: self.ui.checkBox_dnaa.setText(("don't %s again, for session" % self._ask)) def show_info(self, infoid, text, data=MessageData(None), color=None): self.show_question(infoid, text=text, data=data, color=color) def is_do_not_ask(self, questionid): try: return (self._do_not_ask[questionid] in [0, 1]) except Exception: return False def hide_question(self, questionids, data=None): if self._in_resp_process: return for qid in questionids: self._queue.remove(qid, data) if ((data is None) or (data == self.data)): if (self.questionid in questionids): self._new_request = False self.ui.setVisible(False) self.cancel_signal.emit(self.questionid, self.data) self.questionid = 0 self._update_list_label([]) self._new_request = self._read_next_item() self._frameui_4_request(self._new_request) def _update_list_label(self, items=[]): if items: self.ui.listLabel.setText('') for item in items: ltext = self.ui.listLabel.text() item_str = item if (not isinstance(item, str)): if hasattr(item, 'name'): item_str = item.name if ltext: self.ui.listLabel.setText(('%s, %s' % (ltext, HTMLDelegate.toHTML(item_str)))) else: self.ui.listLabel.setText(('%s' % HTMLDelegate.toHTML(item_str))) self.ui.listLabel.setVisible(True) else: self.ui.listLabel.setText('') self.ui.listLabel.setVisible(False) def _frameui_4_request(self, request): if request: self.ui.checkBox_dnaa.setChecked(False) self.ui.setVisible(True) self.ui.listLabel.setVisible(True) else: self.questionid = 0 self.ui.setVisible(False) self.ui.listLabel.setVisible(False) def _on_question_ok(self): self._in_resp_process = True self._new_request = False self.ui.setVisible(False) try: if self.ui.checkBox_dnaa.isChecked(): self._do_not_ask[self.questionid] = 1 except Exception: pass self.accept_signal.emit(self.questionid, self.data) self.questionid = 0 self._update_list_label([]) self._new_request = self._read_next_item() self._frameui_4_request(self._new_request) self._in_resp_process = False def _on_question_cancel(self): self._in_resp_process = True self._new_request = False self.ui.setVisible(False) try: if self.ui.checkBox_dnaa.isChecked(): self._do_not_ask[self.questionid] = 0 except Exception: pass self.cancel_signal.emit(self.questionid, self.data) self.questionid = 0 self._update_list_label([]) self._new_request = self._read_next_item() self._frameui_4_request(self._new_request) self._in_resp_process = False def _is_launch_data_in_queue(self, newdata): for (_, data, _) in self._queue_launchfile: if (data == newdata): return True return False def _is_transfer_data_in_queue(self, newdata): for (_, data, _) in self._queue_transfer_files: if (data == newdata): return True return False def _is_other_data_in_queue(self, questionid, text, data): for (cqid, ctxt, cd, _) in self._queue_other: if ((cqid == questionid) and (cd == data) and (ctxt == text)): return True return False def _read_next_item(self): (qid, text, data) = self._queue.get() if (qid != self.TYPE_INVALID): self.questionid = qid self.text = text self.data = data self.ui.questionIcon.setPixmap(self.IMAGES[qid]) self.ui.questionLabel.setText(text) self._update_list_label(self.data.data_list) return (qid != self.TYPE_INVALID) def _on_checkbox_state_changed(self, state): if (self.questionid == self.TYPE_NODELET): self.ui.questionOkButton.setVisible((not state)) nm.settings().check_for_nodelets_at_start = (not state) elif (self.questionid == self.TYPE_NOSCREEN): self.ui.questionCancelButton.setVisible((not state)) nm.settings().show_noscreen_error = (not state) def _clear_scroll_area(self): child = self.ui.scrollAreaLayout.takeAt(0) while child: child.widget().setParent(None) del child child = self.ui.scrollAreaLayout.takeAt(0)
class AggregateTest(unittest.TestCase): def test_min(self): event = Event.sequence(array).min() self.assertEqual(event.run(), ([0] * 10)) def test_max(self): event = Event.sequence(array).max() self.assertEqual(event.run(), array) def test_sum(self): event = Event.sequence(array).sum() self.assertEqual(event.run(), [0, 1, 3, 6, 10, 15, 21, 28, 36, 45]) def test_product(self): event = Event.sequence(array[1:]).product() self.assertEqual(event.run(), [1, 2, 6, 24, 120, 720, 5040, 40320, 362880]) def test_any(self): event = Event.sequence(array).any() self.assertEqual(event.run(), [False, True, True, True, True, True, True, True, True, True]) def test_all(self): x = (([True] * 10) + ([False] * 10)) event = Event.sequence(x).all() self.assertEqual(event.run(), x) def test_pairwaise(self): event = Event.sequence(array).pairwise() self.assertEqual(event.run(), list(zip(array, array[1:]))) def test_chunk(self): event = Event.sequence(array).chunk(3) self.assertEqual(event.run(), [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]) def test_chunkwith(self): timer = Event.timer(0.029, 10) event = Event.sequence(array, 0.01).chunkwith(timer) self.assertEqual(event.run(), [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]) def test_array(self): event = Event.sequence(array).array(5).last() self.assertEqual(list(event.run()[0]), array[(- 5):])
class OptionPlotoptionsWaterfallSonificationContexttracksMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_partitioned_analyses_init_initialize_accumulators(): d = DumbPartDistinguisher() d.update(traces=np.random.randint(0, 255, (500, 200), dtype='int16'), data=np.random.randint(0, 9, (500, 4096), dtype='uint8')) assert np.array_equal(d.partitions, np.arange(9)) assert (d.sum.shape == (200, 4096, 9)) assert (d.sum_square.shape == (200, 4096, 9)) assert (d.counters.shape == (4096, 9))
class table__h_h_e_a(DefaultTable.DefaultTable): dependencies = ['hmtx', 'glyf', 'CFF ', 'CFF2'] def ascender(self): return self.ascent def ascender(self, value): self.ascent = value def descender(self): return self.descent def descender(self, value): self.descent = value def decompile(self, data, ttFont): sstruct.unpack(hheaFormat, data, self) def compile(self, ttFont): if (ttFont.recalcBBoxes and (ttFont.isLoaded('glyf') or ttFont.isLoaded('CFF ') or ttFont.isLoaded('CFF2'))): self.recalc(ttFont) self.tableVersion = fi2ve(self.tableVersion) return sstruct.pack(hheaFormat, self) def recalc(self, ttFont): if ('hmtx' not in ttFont): return hmtxTable = ttFont['hmtx'] self.advanceWidthMax = max((adv for (adv, _) in hmtxTable.metrics.values())) boundsWidthDict = {} if ('glyf' in ttFont): glyfTable = ttFont['glyf'] for name in ttFont.getGlyphOrder(): g = glyfTable[name] if (g.numberOfContours == 0): continue if ((g.numberOfContours < 0) and (not hasattr(g, 'xMax'))): g.recalcBounds(glyfTable) boundsWidthDict[name] = (g.xMax - g.xMin) elif (('CFF ' in ttFont) or ('CFF2' in ttFont)): if ('CFF ' in ttFont): topDict = ttFont['CFF '].cff.topDictIndex[0] else: topDict = ttFont['CFF2'].cff.topDictIndex[0] charStrings = topDict.CharStrings for name in ttFont.getGlyphOrder(): cs = charStrings[name] bounds = cs.calcBounds(charStrings) if (bounds is not None): boundsWidthDict[name] = int((math.ceil(bounds[2]) - math.floor(bounds[0]))) if boundsWidthDict: minLeftSideBearing = float('inf') minRightSideBearing = float('inf') xMaxExtent = (- float('inf')) for (name, boundsWidth) in boundsWidthDict.items(): (advanceWidth, lsb) = hmtxTable[name] rsb = ((advanceWidth - lsb) - boundsWidth) extent = (lsb + boundsWidth) minLeftSideBearing = min(minLeftSideBearing, lsb) minRightSideBearing = min(minRightSideBearing, rsb) xMaxExtent = max(xMaxExtent, extent) self.minLeftSideBearing = minLeftSideBearing self.minRightSideBearing = minRightSideBearing self.xMaxExtent = xMaxExtent else: self.minLeftSideBearing = 0 self.minRightSideBearing = 0 self.xMaxExtent = 0 def toXML(self, writer, ttFont): (formatstring, names, fixes) = sstruct.getformat(hheaFormat) for name in names: value = getattr(self, name) if (name == 'tableVersion'): value = fi2ve(value) value = ('0x%08x' % value) writer.simpletag(name, value=value) writer.newline() def fromXML(self, name, attrs, content, ttFont): if (name == 'tableVersion'): setattr(self, name, ve2fi(attrs['value'])) return setattr(self, name, safeEval(attrs['value']))
('rocm.gemm_rcr_bias_permute_m2n3.gen_profiler') def gemm_gen_profiler(func_attrs, workdir, dim_info_dict): return common.gen_profiler(func_attrs=func_attrs, workdir=workdir, dim_info_dict=dim_info_dict, args_parse=ARGS_PARSER_TEMPLATE.render(), gemm_flag='bias_permute_m2n3', extra_shape_template=permute_common.EXTRA_SHAPE_TEMPLATE_M2N3)
def make_stalecheck_middleware(allowable_delay: int, skip_stalecheck_for_methods: Collection[str]=SKIP_STALECHECK_FOR_METHODS) -> Middleware: if (allowable_delay <= 0): raise ValueError('You must set a positive allowable_delay in seconds for this middleware') def stalecheck_middleware(make_request: Callable[([RPCEndpoint, Any], Any)], w3: 'Web3') -> Callable[([RPCEndpoint, Any], RPCResponse)]: cache: Dict[(str, Optional[BlockData])] = {'latest': None} def middleware(method: RPCEndpoint, params: Any) -> RPCResponse: if (method not in skip_stalecheck_for_methods): if (not _is_fresh(cache['latest'], allowable_delay)): latest = w3.eth.get_block('latest') if _is_fresh(latest, allowable_delay): cache['latest'] = latest else: raise StaleBlockchain(latest, allowable_delay) return make_request(method, params) return middleware return stalecheck_middleware
class TestSaveOverride(BodhiClientTestCase): def test_save_override(self, mocker): client = bindings.BodhiClient() client.send_request = mocker.MagicMock(return_value='return_value') client.csrf_token = 'a token' now = datetime.utcnow() response = client.save_override(nvr='python-pyramid-1.5.6-3.el7', duration=2, notes='This is needed to build bodhi-2.4.0.') assert (response == 'return_value') actual_expiration = client.send_request.mock_calls[0][2]['data']['expiration_date'] client.send_request.assert_called_once_with('overrides/', verb='POST', auth=True, data={'nvr': 'python-pyramid-1.5.6-3.el7', 'expiration_date': actual_expiration, 'csrf_token': 'a token', 'notes': 'This is needed to build bodhi-2.4.0.'}) expected_expiration = (now + timedelta(days=2)) assert ((actual_expiration - expected_expiration) < timedelta(minutes=5)) def test_save_override_expiration_date(self, mocker): client = bindings.BodhiClient() client.send_request = mocker.MagicMock(return_value='return_value') client.csrf_token = 'a token' now = datetime.utcnow() response = client.save_override(nvr='python-pyramid-1.5.6-3.el7', expiration_date=now, notes='This is needed to build bodhi-2.4.0.') assert (response == 'return_value') client.send_request.assert_called_once_with('overrides/', verb='POST', auth=True, data={'nvr': 'python-pyramid-1.5.6-3.el7', 'expiration_date': now, 'csrf_token': 'a token', 'notes': 'This is needed to build bodhi-2.4.0.'}) def test_save_override_no_expiration(self, mocker): client = bindings.BodhiClient() client.send_request = mocker.MagicMock(return_value='return_value') client.csrf_token = 'a token' with pytest.raises(TypeError): client.save_override(nvr='python-pyramid-1.5.6-3.el7', notes='This is needed to build bodhi-2.4.0.') def test_save_override_both_expirations(self, mocker): client = bindings.BodhiClient() client.send_request = mocker.MagicMock(return_value='return_value') client.csrf_token = 'a token' now = datetime.utcnow() with pytest.raises(TypeError): client.save_override(nvr='python-pyramid-1.5.6-3.el7', notes='This is needed to build bodhi-2.4.0.', duration=1, expiration_date=now) def test_save_override_edit(self, mocker): client = bindings.BodhiClient() client.send_request = mocker.MagicMock(return_value='return_value') client.csrf_token = 'a token' now = datetime.utcnow() response = client.save_override(nvr='python-pyramid-1.5.6-3.el7', duration=2, notes='This is needed to build bodhi-2.4.0.', edit=True) assert (response == 'return_value') actual_expiration = client.send_request.mock_calls[0][2]['data']['expiration_date'] client.send_request.assert_called_once_with('overrides/', verb='POST', auth=True, data={'nvr': 'python-pyramid-1.5.6-3.el7', 'expiration_date': actual_expiration, 'csrf_token': 'a token', 'notes': 'This is needed to build bodhi-2.4.0.', 'edited': 'python-pyramid-1.5.6-3.el7'}) expected_expiration = (now + timedelta(days=2)) assert ((actual_expiration - expected_expiration) < timedelta(minutes=5)) def test_save_override_expired(self, mocker): client = bindings.BodhiClient() client.send_request = mocker.MagicMock(return_value='return_value') client.csrf_token = 'a token' now = datetime.utcnow() response = client.save_override(nvr='python-pyramid-1.5.6-3.el7', duration=2, notes='This is needed to build bodhi-2.4.0.', expired=True) assert (response == 'return_value') actual_expiration = client.send_request.mock_calls[0][2]['data']['expiration_date'] client.send_request.assert_called_once_with('overrides/', verb='POST', auth=True, data={'nvr': 'python-pyramid-1.5.6-3.el7', 'expiration_date': actual_expiration, 'csrf_token': 'a token', 'notes': 'This is needed to build bodhi-2.4.0.', 'expired': True}) expected_expiration = (now + timedelta(days=2)) assert ((actual_expiration - expected_expiration) < timedelta(minutes=5))
class Type_Inference(): MASKS = build_masks() def __init__(self): self.options = copy(Type_Inference.MASKS['leafs']) def is_resolved(self): return (len(self.options) == 1) def is_conflicted(self): return (len(self.options) == 0) def assert_positive(self, choice): assert issubclass(choice, m_types.Type) self.options &= Type_Inference.MASKS['mask'][choice.__name__] def assert_negative(self, choice): assert issubclass(choice, m_types.Type) self.options -= Type_Inference.MASKS['mask'][choice.__name__] def dump(self): print('Type inference') print((' resolved: %s' % self.is_resolved())) print((' conflict: %s' % self.is_conflicted())) print((' options: %s' % ', '.join(sorted(self.options))))
def get_read(delta_days: int) -> Dict[(int, Tuple[(int, str)])]: assert (delta_days >= 0) stamp = utility.date.date_x_days_ago_stamp(abs(delta_days)) conn = _get_connection() res = conn.execute(f"select counts.c, counts.nid, notes.title from notes join (select count(*) as c, nid from read where page > -1 and created like '{stamp}%' group by nid) as counts on notes.id = counts.nid").fetchall() conn.close() d = dict() for (c, nid, title) in res: d[nid] = (c, title) return d
class EnumerationValidator(object): def __init__(self, valid_values, case_sensitive=False) -> None: self.case_sensitive = case_sensitive if case_sensitive: self.valid_values = {s: s for s in valid_values} else: self.valid_values = {s.lower(): s for s in valid_values} def __call__(self, value, field_name): if self.case_sensitive: ret = self.valid_values.get(value) else: ret = self.valid_values.get(value.lower()) if (ret is None): raise ConfigurationError('{} is not in the list of valid values: {}'.format(value, list(self.valid_values.values())), field_name) return ret
def _pop_frame(): prev_frame = _current_frame() stack = _stack.get() del stack[(- 1)] if is_active(): current_frame = _current_frame() db_versions = {db: prev_frame.db_versions[db] for db in current_frame.db_versions.keys()} _update_frame(user=prev_frame.user, comment=prev_frame.comment, date_created=prev_frame.date_created, db_versions=db_versions, meta=prev_frame.meta)
def test_full_drop_table_volume_anomalies(test_id: str, dbt_project: DbtProject): utc_today = datetime.utcnow().date() data = [{TIMESTAMP_COLUMN: cur_date.strftime(DATE_FORMAT)} for cur_date in generate_dates(base_date=utc_today) if (cur_date < (utc_today - timedelta(days=1)))] test_result = dbt_project.test(test_id, DBT_TEST_NAME, DBT_TEST_ARGS, data=data) assert (test_result['status'] == 'fail')
class TestAITModule(unittest.TestCase): def setUpClass(cls) -> None: torch.manual_seed(0) def _test_fx2ait_impl(self, test_serialization=False, test_cuda_graph=False): mod = torch.nn.Sequential(torch.nn.Linear(3, 4), torch.nn.ReLU(), torch.nn.ReLU(), torch.nn.ReLU(), torch.nn.ReLU(), torch.nn.ReLU(), torch.nn.ReLU(), torch.nn.ReLU(), torch.nn.ReLU(), torch.nn.ReLU(), torch.nn.ReLU()).half().cuda() inputs = [torch.randn(5, 3).half().cuda()] ref_output = mod(*inputs) traced = acc_tracer.trace(mod, inputs) ait_dump_dir = tempfile.mkdtemp(prefix='test_fx2ait_', dir='/tmp') interp = AITInterpreter(traced, inputs, ait_dump_dir, 'test') interp_result = interp.run() ait_mod = AITModule(AIT_MODEL_CLASS(interp_result.engine.lib_path, interp_result.input_names, interp_result.output_names, torch.float16, torch.float16, 1)) ait_mod.engine.use_cuda_graph = test_cuda_graph if test_serialization: buf = io.BytesIO() ait_mod = torch.jit.trace(ait_mod, inputs) script_output = ait_mod(*inputs) torch.testing.assert_close(script_output, ref_output, atol=0.1, rtol=0.1) torch.jit.save(ait_mod, buf) buf.seek(0) AIT_MODEL_CLASS.register_library_name_to_path_map({os.path.basename(interp_result.engine.lib_path): interp_result.engine.lib_path}) ait_mod = torch.jit.load(buf) ait_output = ait_mod(*inputs) torch.testing.assert_close(ait_output, ref_output, atol=0.1, rtol=0.1) if (not is_oss_ait_model()): weights = {'_0_weight': torch.ones(3, 4).cuda().half(), '_0_bias': torch.randn(4).cuda().half()} ait_mod.engine.update_constants_with_weights(weights) ait_output = ait_mod(*inputs) torch.testing.assert_close(ait_output, ref_output, atol=0.01, rtol=0.01) ait_mod.engine.swap_constants() ait_output = ait_mod(*inputs) self.assertFalse(torch.allclose(ait_output, ref_output, atol=0.01, rtol=0.01)) def test_fx2ait(self): self._test_fx2ait_impl(test_serialization=False) def test_fx2ait_module_serialization(self): self._test_fx2ait_impl(test_serialization=True) def test_fx2ait_cuda_graph(self): self._test_fx2ait_impl(test_cuda_graph=True) def test_fx2ait_args(self): class TestModule(torch.nn.Module): def forward(self, a, b, c, d): temp = (((((a + b[0]) + b[1]) + c[0]) + c[1]) + d) return temp mod = TestModule().half().cuda() a = torch.randn(5, 3).half().cuda() b = [torch.randn(5, 3).half().cuda(), torch.randn(5, 3).half().cuda()] c = [torch.randn(5, 3).half().cuda(), torch.randn(5, 3).half().cuda()] d = torch.randn(5, 3).half().cuda() ref_output = mod(a, b, c, d) traced = acc_tracer.trace(mod, [a, b, c, d]) ait_dump_dir = tempfile.mkdtemp(prefix='test_fx2ait_', dir='/tmp') interp = AITInterpreter(traced, [a, b, c, d], ait_dump_dir, 'test') interp_result = interp.run() ait_mod = AITModule(AIT_MODEL_CLASS(interp_result.engine.lib_path, interp_result.input_names, interp_result.output_names, torch.float16, torch.float16, 1), interp_result) ait_output = ait_mod(a, b, c, d) torch.testing.assert_close(ait_output, ref_output, atol=0.1, rtol=0.1) ait_output = ait_mod(a, b, c, d=d) torch.testing.assert_close(ait_output, ref_output, atol=0.1, rtol=0.1) ait_output = ait_mod(a, b, c=c, d=d) torch.testing.assert_close(ait_output, ref_output, atol=0.1, rtol=0.1) ait_output = ait_mod(a, b=b, c=c, d=d) torch.testing.assert_close(ait_output, ref_output, atol=0.1, rtol=0.1)
class TestUnpackerBase(): def setup_method(self): self.config = ConfigParser() self.ds_tmp_dir = TemporaryDirectory(prefix='fact_tests_') self.tmp_dir = TemporaryDirectory(prefix='fact_tests_') self.config.add_section('unpack') self.config.set('unpack', 'data_folder', self.ds_tmp_dir.name) self.config.set('unpack', 'blacklist', 'text/plain, image/png') self.config.add_section('ExpertSettings') self.config.set('ExpertSettings', 'header_overhead', '256') self.config.set('ExpertSettings', 'unpack_threshold', '0.8') self.unpacker = Unpacker(config=self.config) os.makedirs(str(self.unpacker._report_folder), exist_ok=True) os.makedirs(str(self.unpacker._file_folder), exist_ok=True) self.test_file_path = Path(get_test_data_dir(), 'get_files_test/testfile1') def teardown_method(self): self.ds_tmp_dir.cleanup() self.tmp_dir.cleanup() gc.collect() def get_unpacker_meta(self): return json.loads(Path(self.unpacker._report_folder, 'meta.json').read_text()) def check_unpacker_selection(self, mime_type, plugin_name): name = self.unpacker.get_unpacker(mime_type)[1] assert (name == plugin_name), 'wrong unpacker plugin selected' def check_unpacking_of_standard_unpack_set(self, in_file: (Path | str), additional_prefix_folder: str='', output: bool=True, ignore: (set[str] | None)=None): (files, meta_data) = self.unpacker.extract_files_from_file(str(in_file), self.tmp_dir.name) files = {f for f in files if (not any(((rule in f) for rule in (ignore or set()))))} assert (len(files) == 3), f'file number incorrect: {meta_data}' assert (files == {os.path.join(self.tmp_dir.name, additional_prefix_folder, 'testfile1'), os.path.join(self.tmp_dir.name, additional_prefix_folder, 'testfile2'), os.path.join(self.tmp_dir.name, additional_prefix_folder, 'generic folder/test file 3_.txt')}), f'not all files found: {meta_data}' if output: assert ('output' in meta_data) return meta_data
.django_db def test_tas_multiple_program_activity_belonging_one_object_class(client, monkeypatch, tas_mulitple_pas_per_oc, helpers): helpers.mock_current_fiscal_year(monkeypatch) tas = '001-X-0000-000' resp = client.get(url.format(tas=tas, query_params='')) expected_result = {'fiscal_year': helpers.get_mocked_current_fiscal_year(), 'treasury_account_symbol': tas, 'messages': [], 'page_metadata': {'hasNext': False, 'hasPrevious': False, 'limit': 10, 'next': None, 'page': 1, 'previous': None, 'total': 2}, 'results': [{'name': 'NAME 2', 'gross_outlay_amount': 1000000.0, 'obligated_amount': 10.0, 'children': [{'gross_outlay_amount': 1000000.0, 'name': 'Other', 'obligated_amount': 10.0}]}, {'name': 'NAME 1', 'gross_outlay_amount': .0, 'obligated_amount': 1.0, 'children': [{'gross_outlay_amount': .0, 'name': 'Other', 'obligated_amount': 1.0}]}]} assert (resp.status_code == status.HTTP_200_OK) assert (resp.json() == expected_result)
class JsFileData(): def __init__(self, js_code: str): self.varName = js_code def raw(self): return JsObjects.JsObjects.get(self.varName) def headers(self) -> JsObjects.JsArray.JsArray: return JsObjects.JsArray.JsArray.get(self.varName)[0].keys() def records(self) -> JsObjects.JsArray.JsArray: return JsObjects.JsArray.JsArray.get(self.varName) def vector(self, name: str) -> JsObjects.JsArray.JsArray: name = JsUtils.jsConvertData(name, None) return JsObjects.JsArray.JsArray.get(('\n(function(records, col){\n var vector = []; records.forEach(function(rec){vector.push(rec[col])}); return vector\n})(%s, %s) ' % (self.varName, name))) def values(self, name: Union[(str, primitives.JsDataModel)], with_count: bool=False): name = JsUtils.jsConvertData(name, None) with_count = JsUtils.jsConvertData(with_count, None) return JsObjects.JsObjects.get(('\n(function(records, col){\n var vector = {}; records.forEach(function(rec){\n if(vector[rec[col]] == undefined){vector[rec[col]] = 0};\n vector[rec[col]]++});\n if(%s){return vector}\n else{return Object.keys(vector)}\n})(%s, %s) ' % (with_count, self.varName, name))) def series(self, names: Union[(str, primitives.JsDataModel)]) -> JsObjects.JsArray.JsArray: names = JsUtils.jsConvertData(names, None) return JsObjects.JsArray.JsArray.get(('\n(function(records, cols){\n var vector = []; records.forEach(function(rec){\n var row = []; cols.forEach(function(r){row.push(rec[r])}); vector.push(row)});\n return vector\n})(%s, %s) ' % (self.varName, names)))
(tags=['filings'], description=docs.OPERATIONS_LOG) class OperationsLogView(ApiResource): model = models.OperationsLog schema = schemas.OperationsLogSchema page_schema = schemas.OperationsLogPageSchema filter_multi_fields = [('candidate_committee_id', models.OperationsLog.candidate_committee_id), ('beginning_image_number', models.OperationsLog.beginning_image_number), ('report_type', models.OperationsLog.report_type), ('report_year', models.OperationsLog.report_year), ('form_type', models.OperationsLog.form_type), ('amendment_indicator', models.OperationsLog.amendment_indicator), ('status_num', models.OperationsLog.status_num)] filter_range_fields = [(('min_receipt_date', 'max_receipt_date'), models.OperationsLog.receipt_date), (('min_coverage_end_date', 'max_coverage_end_date'), models.OperationsLog.coverage_end_date), (('min_transaction_data_complete_date', 'max_transaction_data_complete_date'), models.OperationsLog.transaction_data_complete_date)] def args(self): default_sort = ['-report_year'] return utils.extend(args.paging, args.operations_log, args.make_multi_sort_args(default=default_sort)) def build_query(self, *args, **kwargs): query = super().build_query(*args, **kwargs) return query
class intArray(_object): __swig_setmethods__ = {} __setattr__ = (lambda self, name, value: _swig_setattr(self, intArray, name, value)) __swig_getmethods__ = {} __getattr__ = (lambda self, name: _swig_getattr(self, intArray, name)) __repr__ = _swig_repr def __init__(self, nelements): this = _optcc.new_intArray(nelements) try: self.this.append(this) except __builtin__.Exception: self.this = this __swig_destroy__ = _optcc.delete_intArray __del__ = (lambda self: None) def __getitem__(self, index): return _optcc.intArray___getitem__(self, index) def __setitem__(self, index, value): return _optcc.intArray___setitem__(self, index, value) def cast(self): return _optcc.intArray_cast(self) if _newclass: frompointer = staticmethod(_optcc.intArray_frompointer) else: frompointer = _optcc.intArray_frompointer
class OptionPlotoptionsOrganizationSonificationContexttracksMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class RouteFormatterMixin(object): fmtstr = ' {0:<3s} {1:<32s} {2:<8s} {3:<20s} {4:<15s} {5:<6s} {6:<6s} {7:<}\n' def _format_family_header(cls): ret = '' ret += 'Status codes: * valid, > best\n' ret += 'Origin codes: i - IGP, e - EGP, ? - incomplete\n' ret += cls.fmtstr.format('', 'Network', 'Labels', 'Next Hop', 'Reason', 'Metric', 'LocPrf', 'Path') return ret def _format_family(cls, dest_list): msg = six.StringIO() def _append_path_info(buff, path, is_best, show_prefix): aspath = path.get('aspath') origin = path.get('origin') if origin: aspath.append(origin) bpr = path.get('bpr') next_hop = path.get('nexthop') med = path.get('metric') labels = path.get('labels') localpref = path.get('localpref') path_status = '*' if is_best: path_status += '>' prefix = '' if show_prefix: prefix = path.get('prefix') buff.write(cls.fmtstr.format(path_status, prefix, str(labels), str(next_hop), bpr, str(med), str(localpref), ' '.join(map(str, aspath)))) for dist in dest_list: for (idx, path) in enumerate(dist.get('paths')): _append_path_info(msg, path, path['best'], (idx == 0)) ret = msg.getvalue() msg.close() return ret
() _migration_options ('revision', required=True) def show(alembic_ini_path: str, script_location: str, revision: str): from alembic.script import ScriptDirectory (alembic_cfg, db_manager) = _get_migration_config(alembic_ini_path, script_location) script = ScriptDirectory.from_config(alembic_cfg) rev = script.get_revision(revision) if (rev is None): print(f'Revision {revision} not found.') return script_files = os.listdir(os.path.join(script.dir, 'versions')) script_file = next((f for f in script_files if f.startswith(revision)), None) if (script_file is None): print(f'Migration script for revision {revision} not found.') return script_file_path = os.path.join(script.dir, 'versions', script_file) print(f'Migration script for revision {revision}: {script_file_path}') try: with open(script_file_path, 'r') as file: print(file.read()) except FileNotFoundError: print(f'Migration script {script_file_path} not found.')
def process_tagscript(content: str, seed_variables: Dict[(str, tse.Adapter)]={}) -> Dict[(str, Any)]: output: tse.Response = tagscript_engine.process(content, seed_variables) kwargs: Dict[(str, Any)] = {} if output.body: kwargs['content'] = output.body[:2000] if (embed := output.actions.get('embed')): kwargs['embed'] = embed return kwargs
class ModifyL2Dst(base_tests.SimpleDataPlane): def runTest(self): logging.info('Running Modify_L2_Dst test') of_ports = config['port_map'].keys() of_ports.sort() self.assertTrue((len(of_ports) > 1), 'Not enough ports for test') delete_all_flows(self.controller) logging.info('Verify if switch supports the action -- modify_l2_dst, if not skip the test') logging.info('Insert a flow with action -- set etherent dst address ') logging.info('Send packet matching the flow, verify recieved packet dst address rewritten ') sup_acts = sw_supported_actions(self) if (not (sup_acts & (1 << ofp.OFPAT_SET_DL_DST))): skip_message_emit(self, 'modify_l2_dst test skipped') return (pkt, exp_pkt, acts) = pkt_action_setup(self, mod_fields=['eth_dst'], check_test_params=True) flow_match_test(self, config['port_map'], pkt=pkt, exp_pkt=exp_pkt, action_list=acts, max_test=2)
class MocapViewerOffline(animation.FuncAnimation): def __init__(self, motion, cam_pos, v_up_str, play_speed=1.0, scale=1.0, thickness=1.0, hide_origin=False): animation.FuncAnimation.__init__(self, fig=plt.figure(figsize=(5, 5)), func=self.animate, frames=len(motion.poses), interval=50, blit=False) self.motion = motion self.play_speed = play_speed self.hide_origin = hide_origin self.file_idx = 0 self.cur_time = 0.0 self.scale = scale self.thickness = thickness self.cam_p = np.array(cam_pos) self.up_axis = utils.str_to_axis(v_up_str) self.ground_node = None self.init_pyrender() self.pt_pool = [] self.cap_pool = [] plt.axis('off') self.ims = None self.progress = tqdm.tqdm(total=len(motion.poses)) def render_point(self, at_index, p, scale=1.0, radius=1.0, color=[1.0, 0.0, 0.0]): if (at_index >= len(self.pt_pool)): for i in range(len(self.pt_pool), (at_index + 1)): sphere_trimesh = trimesh.creation.icosphere(radius=radius, subdivisions=1) sphere_face_colors = np.zeros(sphere_trimesh.faces.shape) sphere_face_colors[:] = np.array(color) sphere_trimesh.visual.face_colors = sphere_face_colors sphere_mesh = Mesh.from_trimesh(sphere_trimesh, smooth=False) sphere_node = Node(mesh=sphere_mesh, name=('sphere_' + str(i))) self.scene.add_node(sphere_node) self.pt_pool.append(sphere_node) self.pt_pool[at_index].scale = ([scale] * 3) self.pt_pool[at_index].translation = p def render_capsule(self, at_index, p, Q, length, scale=1.0, color=[1.0, 0.0, 0.0]): if (at_index >= len(self.cap_pool)): for i in range(len(self.cap_pool), (at_index + 1)): sphere_trimesh = trimesh.creation.capsule(height=1.0, radius=1.0, count=[8, 8]) sphere_face_colors = np.zeros(sphere_trimesh.faces.shape) sphere_face_colors[:] = np.array(color) sphere_trimesh.visual.face_colors = sphere_face_colors sphere_mesh = Mesh.from_trimesh(sphere_trimesh, smooth=False) sphere_node = Node(mesh=sphere_mesh, name=('capsule_' + str(i))) self.scene.add_node(sphere_node) self.cap_pool.append(sphere_node) self.cap_pool[at_index].scale = [0.1, 0.1, length] self.cap_pool[at_index].translation = p self.cap_pool[at_index].rotation = Q def _render_pose(self, pose, color): skel = pose.skel capnum = 0 for (ipt, j) in enumerate(skel.joints): T = pose.get_transform(j, local=False) pos = (0.4 * conversions.T2p(T)) self.render_point(ipt, pos, radius=(0.03 * self.scale), color=color) if (j.parent_joint is not None): pos_parent = (0.5 * conversions.T2p(pose.get_transform(j.parent_joint, local=False))) p = (0.4 * (pos_parent + pos)) l = np.linalg.norm((pos_parent - pos)) r = (0.1 * self.thickness) R = math.R_from_vectors(np.array([0, 0, 1]), (pos_parent - pos)) self.render_capsule(capnum, p, R2Q(R), (l / 2.0), 0.1, color=color) capnum += 1 def _render_characters(self, colors, frame): skel = self.motion.skel pose = self.motion.get_pose_by_frame(frame) color = colors[(0 % len(colors))] self._render_pose(pose, color) def render_ground(self, size=[20.0, 20.0], dsize=[1.0, 1.0], color=[0.0, 0.0, 0.0, 1.0], line_width=1.0, axis='y', origin=True, use_arrow=False, lighting=False): if (self.ground_node is None): lx = size[0] lz = size[1] dx = dsize[0] dz = dsize[1] nx = (int((lx / dx)) + 1) nz = (int((lz / dz)) + 1) grid_pts = np.zeros((((2 * nx) + (2 * nz)), 3)) colors = np.zeros((((2 * nx) + (2 * nz)), 4)) colors[:] = np.array(color) if (axis is 'x'): linei = 0 for i in np.linspace(((- 0.5) * lx), (0.5 * lx), nx): grid_pts[(2 * linei)] = [0, i, ((- 0.5) * lz)] grid_pts[((2 * linei) + 1)] = [0, i, (0.5 * lz)] linei += 1 for i in np.linspace(((- 0.5) * lz), (0.5 * lz), nz): grid_pts[(2 * linei)] = [0, ((- 0.5) * lx), i] grid_pts[((2 * linei) + 1)] = [0, (0.5 * lx), i] linei += 1 elif (axis is 'y'): linei = 0 for i in np.linspace(((- 0.5) * lx), (0.5 * lx), nx): grid_pts[(2 * linei)] = [i, 0, ((- 0.5) * lz)] grid_pts[((2 * linei) + 1)] = [i, 0, (0.5 * lz)] linei += 1 for i in np.linspace(((- 0.5) * lz), (0.5 * lz), nz): grid_pts[(2 * linei)] = [((- 0.5) * lx), 0, i] grid_pts[((2 * linei) + 1)] = [(0.5 * lx), 0, i] linei += 1 elif (axis is 'z'): linei = 0 for i in np.linspace(((- 0.5) * lx), (0.5 * lx), nx): grid_pts[(2 * linei)] = [i, ((- 0.5) * lz), 0.0] grid_pts[((2 * linei) + 1)] = [i, (0.5 * lz), 0.0] linei += 1 for (j, i) in enumerate(np.linspace(((- 0.5) * lz), (0.5 * lz), nz)): grid_pts[(2 * linei)] = [((- 0.5) * lx), i, 0.0] grid_pts[((2 * linei) + 1)] = [(0.5 * lx), i, 0.0] linei += 1 grid = pyrender.Primitive(grid_pts, color_0=colors, mode=1) grid = pyrender.Mesh([grid]) self.ground_node = Node(mesh=grid, name='ground_plane') self.scene.add_node(self.ground_node) def render_callback(self, frame_num): self.render_ground(size=[100, 100], color=[0.8, 0.8, 0.8, 1.0], axis='y', origin=(not self.hide_origin), use_arrow=True) colors = [(np.array([123, 174, 85]) / 255.0), (np.array([255, 255, 0]) / 255.0), (np.array([85, 160, 173]) / 255.0)] self._render_characters(colors, frame_num) (color, depth) = self.r.render(self.scene) return color def animate(self, frame_num): self.progress.update(frame_num) color = self.render_callback(frame_num) if (self.ims is None): self.ims = plt.imshow(color, animated=True) else: self.ims.set_array(color) return (self.ims,) def idle_callback(self): time_elapsed = self.time_checker.get_time(restart=False) self.cur_time += (self.play_speed * time_elapsed) self.time_checker.begin() def init_pyrender(self): self.spot_l = pyrender.SpotLight(color=np.ones(3), intensity=3.0, innerConeAngle=(np.pi / 16.0), outerConeAngle=(np.pi / 6.0)) cam = PerspectiveCamera(yfov=(np.pi / 2.0)) R = _get_cam_rotation(self.cam_p, np.zeros(3), self.up_axis) self.cam_pose = conversions.Rp2T(R, self.cam_p) self.scene = Scene(ambient_light=np.array([0.1, 0.1, 0.1, 1.0])) self.spot_l_node = self.scene.add(self.spot_l, pose=self.cam_pose, name='spot_light') self.cam_node = self.scene.add(cam, pose=self.cam_pose, name='camera') self.r = OffscreenRenderer(viewport_width=320, viewport_height=240)
def make_all(layout, subdir): def out_path(ext=''): return os.path.join(subdir, (layout.meta['fileName'] + ext)) if (not os.path.exists(subdir)): os.makedirs(subdir) klc_path = out_path('.klc') with open(klc_path, 'w', encoding='utf-16le', newline='\r\n') as file: file.write(layout.klc) print(('... ' + klc_path)) osx_path = out_path('.keylayout') with open(osx_path, 'w', encoding='utf-8', newline='\n') as file: file.write(layout.keylayout) print(('... ' + osx_path)) xkb_path = out_path('.xkb') with open(xkb_path, 'w', encoding='utf-8', newline='\n') as file: file.write(layout.xkb) print(('... ' + xkb_path)) xkb_custom_path = out_path('.xkb_custom') with open(xkb_custom_path, 'w', encoding='utf-8', newline='\n') as file: file.write(layout.xkb_patch) print(('... ' + xkb_custom_path)) json_path = out_path('.json') pretty_json(layout, json_path) print(('... ' + json_path))
('cuda.gemm_rrr.config') def gemm_rrr_config(func_attrs, dtype='float16'): common.make_fproc(func_attrs, RRR, include_cutlass_3x_ops=True) import cutlass_lib for op in func_attrs['op_instance'].values(): if (op.gemm_kind == cutlass_lib.library.GemmKind.Universal3x): op.C.element = cutlass_lib.library.DataType.void
_converter(acc_ops.exp) def acc_ops_exp(target: Target, args: Tuple[(Argument, ...)], kwargs: Dict[(str, Argument)], name: str) -> ConverterOutput: input_val = kwargs['input'] if (not isinstance(input_val, AITTensor)): raise RuntimeError(f'Unexpected input for {name}: {input_val}') return elementwise(FuncEnum.EXP)(input_val)
def get_lm_line(data, lm_mapping): line = [] lm_log = {'Active Bucket Counter': {'Read': 'na', 'Write': 'na'}, 'Active Measured Latency': {'Read': 'na', 'Write': 'na'}} if isinstance(data, dict): map = lm_mapping elif all([(data is None), lm_mapping]): map = {} elif all([(data is None), (not lm_mapping)]): return [] for (metric, _) in lm_log.items(): for (io_type, value) in _.items(): for (bucket, settings) in map.items(): if (io_type in settings['target']): lm_log[metric][io_type] = data[('%s: %s' % (metric, bucket))][io_type] for (metric, _) in lm_log.items(): for (io_type, value) in _.items(): line += [value] return line
class TestExecuteScriptsCommands(EfuseTestCase): def setup_class(self): self.stored_dir = os.getcwd() def teardown_class(self): os.chdir(self.stored_dir) .skipif((arg_chip in ['esp32c2', 'esp32p4']), reason='These chips do not have eFuses used in this test') def test_execute_scripts_with_check_that_only_one_burn(self): self.espefuse_py('execute_scripts -h') name = (arg_chip if (arg_chip in ['esp32', 'esp32c2']) else 'esp32xx') os.chdir(os.path.join(TEST_DIR, 'efuse_scripts', name)) self.espefuse_py('execute_scripts execute_efuse_script2.py') .skipif((arg_chip in ['esp32c2', 'esp32p4']), reason='These chips do not have eFuses used in this test') def test_execute_scripts_with_check(self): self.espefuse_py('execute_scripts -h') name = (arg_chip if (arg_chip in ['esp32', 'esp32c2']) else 'esp32xx') os.chdir(os.path.join(TEST_DIR, 'efuse_scripts', name)) self.espefuse_py('execute_scripts execute_efuse_script.py') def test_execute_scripts_with_index_and_config(self): os.chdir(TEST_DIR) if (arg_chip in ['esp32', 'esp32c2']): cmd = f'execute_scripts {EFUSE_S_DIR}/efuse_burn1.py --index 10 --configfiles {EFUSE_S_DIR}/esp32/config1.json' else: cmd = f'execute_scripts {EFUSE_S_DIR}/efuse_burn1.py --index 10 --configfiles {EFUSE_S_DIR}/esp32xx/config1.json' self.espefuse_py(cmd) output = self.espefuse_py('summary -d') if (arg_chip in ['esp32', 'esp32c2']): assert ('[3 ] read_regs: e00007ff ' in output) else: assert ('[8 ] read_regs: e00007ff ' in output) def test_execute_scripts_nesting(self): os.chdir(TEST_DIR) if (arg_chip in ['esp32', 'esp32c2']): cmd = f'execute_scripts {EFUSE_S_DIR}/efuse_burn2.py --index 28 --configfiles {EFUSE_S_DIR}/esp32/config2.json' else: cmd = f'execute_scripts {EFUSE_S_DIR}/efuse_burn2.py --index 28 --configfiles {EFUSE_S_DIR}/esp32xx/config2.json' self.espefuse_py(cmd) output = self.espefuse_py('summary -d') if (arg_chip in ['esp32', 'esp32c2']): assert ('[2 ] read_regs: ' in output) assert ('[3 ] read_regs: ffffffff ' in output) else: assert ('[7 ] read_regs: ' in output) assert ('[8 ] read_regs: ffffffff ' in output)
def test_rounding(): x = Fxp(None, True, 8, 2, rounding='trunc') vi = [0.0, 1.0, 1.24, 1.25, 1.26, 1.49, 1.5] vo = [0.0, 1.0, 1.0, 1.25, 1.25, 1.25, 1.5] for (i, o) in zip(vi, vo): assert (x(i) == o) assert (x((- i)) == (- o)) x = Fxp(None, True, 8, 2, rounding='ceil') vi = [0.0, 1.0, 1.24, 1.25, 1.26, 1.49, 1.5, (- 1.0), (- 1.24), (- 1.25), (- 1.26), (- 1.49), (- 1.5)] vo = [0.0, 1.0, 1.25, 1.25, 1.5, 1.5, 1.5, (- 1.0), (- 1.0), (- 1.25), (- 1.25), (- 1.25), (- 1.5)] for (i, o) in zip(vi, vo): assert (x(i) == o) x = Fxp(None, True, 8, 2, rounding='floor') vi = [0.0, 1.0, 1.24, 1.25, 1.26, 1.49, 1.5, (- 1.0), (- 1.24), (- 1.25), (- 1.26), (- 1.49), (- 1.5)] vo = [0.0, 1.0, 1.0, 1.25, 1.25, 1.25, 1.5, (- 1.0), (- 1.25), (- 1.25), (- 1.5), (- 1.5), (- 1.5)] for (i, o) in zip(vi, vo): assert (x(i) == o) x = Fxp(None, True, 8, 2, rounding='fix') vi = [0.0, 1.0, 1.24, 1.25, 1.26, 1.49, 1.5, (- 1.0), (- 1.24), (- 1.25), (- 1.26), (- 1.49), (- 1.5)] vo = [0.0, 1.0, 1.0, 1.25, 1.25, 1.25, 1.5, (- 1.0), (- 1.0), (- 1.25), (- 1.25), (- 1.25), (- 1.5)] for (i, o) in zip(vi, vo): assert (x(i) == o) x = Fxp(None, True, 8, 2, rounding='around') vi = [0.0, 1.0, 1.24, 1.25, 1.26, 1.49, 1.5, (- 1.0), (- 1.24), (- 1.25), (- 1.26), (- 1.49), (- 1.5)] vo = [0.0, 1.0, 1.25, 1.25, 1.25, 1.5, 1.5, (- 1.0), (- 1.25), (- 1.25), (- 1.25), (- 1.5), (- 1.5)] for (i, o) in zip(vi, vo): assert (x(i) == o)
class CoverPhoto(AbstractCrudObject): def __init__(self, fbid=None, parent_id=None, api=None): self._isCoverPhoto = True super(CoverPhoto, self).__init__(fbid, parent_id, api) class Field(AbstractObject.Field): cover_id = 'cover_id' id = 'id' offset_x = 'offset_x' offset_y = 'offset_y' source = 'source' _field_types = {'cover_id': 'string', 'id': 'string', 'offset_x': 'float', 'offset_y': 'float', 'source': 'string'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
def test_flexx_multiprocessing(): t0 = time.time() processes = [] for i in range(10): p = multiprocessing.Process(target=multiprocessing_func) p.daemon = True p.start() processes.append(p) for p in processes: p.join() t1 = time.time() assert True
def retry_stream_api(num_retries: int=10, backoff_base: float=2.0, warn_user: bool=True): retry_limit_msg = f'Error: Reached rate limit, passing...' backoff_msg = f'Error: API Bad gateway. Waiting {{backoff}} seconds...' def _wrapper(func): (func) def _wrapped(*args, **kwargs): user_warned = (not warn_user) num_attempts = (num_retries + 1) for attempt in range(1, (num_attempts + 1)): try: return func(*args, **kwargs) except Exception as e: if ((e. != 502) or (attempt == num_attempts)): raise backoff = (backoff_base ** (attempt + 2)) time.sleep(backoff) return _wrapped return _wrapper
class OptionSeriesBellcurveSonificationTracksMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def protect_options(p): p.add_argument('--no-write-protect', help='Disable write-protecting of the key. The key remains writable. (The keys use the RS coding scheme that does not support post-write data changes. Forced write can damage RS encoding bits.) The write-protecting of keypurposes does not depend on the option, it will be set anyway.', action='store_true') p.add_argument('--no-read-protect', help='Disable read-protecting of the key. The key remains readable software.', action='store_true')
.parametrize('pathdef, expected', [('M 100 100 L 300 100 L 200 300 z', [('moveTo', ((100.0, 100.0),)), ('lineTo', ((300.0, 100.0),)), ('lineTo', ((200.0, 300.0),)), ('lineTo', ((100.0, 100.0),)), ('closePath', ())]), ('M 0 0 L 50 20 M 100 100 L 300 100 L 200 300 z', [('moveTo', ((0.0, 0.0),)), ('lineTo', ((50.0, 20.0),)), ('endPath', ()), ('moveTo', ((100.0, 100.0),)), ('lineTo', ((300.0, 100.0),)), ('lineTo', ((200.0, 300.0),)), ('lineTo', ((100.0, 100.0),)), ('closePath', ())]), ('M100,200 C100,100 250,100 250,200 S400,300 400,200', [('moveTo', ((100.0, 200.0),)), ('curveTo', ((100.0, 100.0), (250.0, 100.0), (250.0, 200.0))), ('curveTo', ((250.0, 300.0), (400.0, 300.0), (400.0, 200.0))), ('endPath', ())]), ('M100,200 C100,100 400,100 400,200', [('moveTo', ((100.0, 200.0),)), ('curveTo', ((100.0, 100.0), (400.0, 100.0), (400.0, 200.0))), ('endPath', ())]), ('M100,500 C25,400 475,400 400,500', [('moveTo', ((100.0, 500.0),)), ('curveTo', ((25.0, 400.0), (475.0, 400.0), (400.0, 500.0))), ('endPath', ())]), ('M100,800 C175,700 325,700 400,800', [('moveTo', ((100.0, 800.0),)), ('curveTo', ((175.0, 700.0), (325.0, 700.0), (400.0, 800.0))), ('endPath', ())]), ('M600,200 C675,100 975,100 900,200', [('moveTo', ((600.0, 200.0),)), ('curveTo', ((675.0, 100.0), (975.0, 100.0), (900.0, 200.0))), ('endPath', ())]), ('M600,500 C600,350 900,650 900,500', [('moveTo', ((600.0, 500.0),)), ('curveTo', ((600.0, 350.0), (900.0, 650.0), (900.0, 500.0))), ('endPath', ())]), ('M600,800 C625,700 725,700 750,800 S875,900 900,800', [('moveTo', ((600.0, 800.0),)), ('curveTo', ((625.0, 700.0), (725.0, 700.0), (750.0, 800.0))), ('curveTo', ((775.0, 900.0), (875.0, 900.0), (900.0, 800.0))), ('endPath', ())]), ('M200,300 Q400,50 600,300 T1000,300', [('moveTo', ((200.0, 300.0),)), ('qCurveTo', ((400.0, 50.0), (600.0, 300.0))), ('qCurveTo', ((800.0, 550.0), (1000.0, 300.0))), ('endPath', ())]), ('M 0 0 L 50 20 m 50 80 L 300 100 L 200 300 z', [('moveTo', ((0.0, 0.0),)), ('lineTo', ((50.0, 20.0),)), ('endPath', ()), ('moveTo', ((100.0, 100.0),)), ('lineTo', ((300.0, 100.0),)), ('lineTo', ((200.0, 300.0),)), ('lineTo', ((100.0, 100.0),)), ('closePath', ())]), ('M100,200 s 150,-100 150,0', [('moveTo', ((100.0, 200.0),)), ('curveTo', ((100.0, 200.0), (250.0, 100.0), (250.0, 200.0))), ('endPath', ())]), ('M100,200 t 150,0', [('moveTo', ((100.0, 200.0),)), ('qCurveTo', ((100.0, 200.0), (250.0, 200.0))), ('endPath', ())]), ('M 100 100 L 300 100 l -100 200 z', [('moveTo', ((100.0, 100.0),)), ('lineTo', ((300.0, 100.0),)), ('lineTo', ((200.0, 300.0),)), ('lineTo', ((100.0, 100.0),)), ('closePath', ())]), ('M200,300 q200,-250 400,0', [('moveTo', ((200.0, 300.0),)), ('qCurveTo', ((400.0, 50.0), (600.0, 300.0))), ('endPath', ())]), ('M 100 100 H 300 L 200 300 z', [('moveTo', ((100.0, 100.0),)), ('lineTo', ((300.0, 100.0),)), ('lineTo', ((200.0, 300.0),)), ('lineTo', ((100.0, 100.0),)), ('closePath', ())]), ('M 100 100 h 200 L 200 300 z', [('moveTo', ((100.0, 100.0),)), ('lineTo', ((300.0, 100.0),)), ('lineTo', ((200.0, 300.0),)), ('lineTo', ((100.0, 100.0),)), ('closePath', ())]), ('M 100 100 V 300 L 200 300 z', [('moveTo', ((100.0, 100.0),)), ('lineTo', ((100.0, 300.0),)), ('lineTo', ((200.0, 300.0),)), ('lineTo', ((100.0, 100.0),)), ('closePath', ())]), ('M 100 100 v 200 L 200 300 z', [('moveTo', ((100.0, 100.0),)), ('lineTo', ((100.0, 300.0),)), ('lineTo', ((200.0, 300.0),)), ('lineTo', ((100.0, 100.0),)), ('closePath', ())])]) def test_parse_path(pathdef, expected): pen = RecordingPen() parse_path(pathdef, pen) assert (pen.value == expected)
class ForumIndex(MethodView): def get(self): categories = Category.get_all(user=real(current_user)) user_count = User.query.count() topic_count = Topic.query.count() post_count = Post.query.count() newest_user = User.query.order_by(User.id.desc()).first() if (not current_app.config['REDIS_ENABLED']): online_users = User.query.filter((User.lastseen >= time_diff())).count() online_guests = None else: online_users = len(get_online_users()) online_guests = len(get_online_users(guest=True)) return render_template('forum/index.html', categories=categories, user_count=user_count, topic_count=topic_count, post_count=post_count, newest_user=newest_user, online_users=online_users, online_guests=online_guests)
class DistributionRestClientTestCase(TestCase): def test_CommunityPool(): content = {'pool': [{'denom': 'string', 'amount': '123'}]} mock_client = MockRestClient(json_encode(content).encode('utf8')) expected_response = ParseDict(content, QueryCommunityPoolResponse()) distribution = DistributionRestClient(mock_client) assert (distribution.CommunityPool() == expected_response) assert (mock_client.last_base_url == '/cosmos/distribution/v1beta1/community_pool') def test_DelegationTotalRewards(): content = {'rewards': [{'validator_address': 'string', 'reward': [{'denom': 'string', 'amount': '123'}]}], 'total': [{'denom': 'string', 'amount': '123'}]} mock_client = MockRestClient(json_encode(content)) expected_response = ParseDict(content, QueryDelegationTotalRewardsResponse()) distribution = DistributionRestClient(mock_client) assert (distribution.DelegationTotalRewards(QueryDelegationTotalRewardsRequest(delegator_address='delegator_addr')) == expected_response) assert (mock_client.last_base_url == '/cosmos/distribution/v1beta1/delegators/delegator_addr/rewards') def test_DelegationRewards(): content = {'rewards': [{'denom': 'string', 'amount': '1234'}]} mock_client = MockRestClient(json_encode(content)) expected_response = ParseDict(content, QueryDelegationRewardsResponse()) distribution = DistributionRestClient(mock_client) assert (distribution.DelegationRewards(QueryDelegationRewardsRequest(delegator_address='delegator_addr', validator_address='validator_addr')) == expected_response) assert (mock_client.last_base_url == '/cosmos/distribution/v1beta1/delegators/delegator_addr/rewards/validator_addr') def test_DelegatorValidators(): content = {'validators': ['string']} mock_client = MockRestClient(json_encode(content)) expected_response = ParseDict(content, QueryDelegatorValidatorsResponse()) distribution = DistributionRestClient(mock_client) assert (distribution.DelegatorValidators(QueryDelegatorValidatorsRequest(delegator_address='delegator_addr')) == expected_response) assert (mock_client.last_base_url == '/cosmos/distribution/v1beta1/delegators/delegator_addr/validators') def test_DelegatorWithdrawAddress(): content = {'withdraw_address': 'string'} mock_client = MockRestClient(json_encode(content)) expected_response = ParseDict(content, QueryDelegatorWithdrawAddressResponse()) distribution = DistributionRestClient(mock_client) assert (distribution.DelegatorWithdrawAddress(QueryDelegatorWithdrawAddressRequest(delegator_address='delegator_addr')) == expected_response) assert (mock_client.last_base_url == '/cosmos/distribution/v1beta1/delegators/delegator_addr/withdraw_address') def test_Params(): content = {'params': {'community_tax': '0.1', 'base_proposer_reward': '0.2', 'bonus_proposer_reward': '0.3', 'withdraw_addr_enabled': True}} mock_client = MockRestClient(json_encode(content)) expected_response = ParseDict(content, QueryParamsResponse()) distribution = DistributionRestClient(mock_client) assert (expected_response.params.community_tax == '0.1') assert (expected_response.params.base_proposer_reward == '0.2') assert (expected_response.params.bonus_proposer_reward == '0.3') assert (expected_response.params.withdraw_addr_enabled is True) assert (distribution.Params() == expected_response) assert (mock_client.last_base_url == '/cosmos/distribution/v1beta1/params') def test_ValidatorCommission(): content = {'commission': {'commission': [{'denom': 'string', 'amount': '1234'}]}} mock_client = MockRestClient(json_encode(content)) expected_response = ParseDict(content, QueryValidatorCommissionResponse()) distribution = DistributionRestClient(mock_client) assert (distribution.ValidatorCommission(QueryValidatorCommissionRequest(validator_address='validator_addr')) == expected_response) assert (mock_client.last_base_url == '/cosmos/distribution/v1beta1/validators/validator_addr/commission') def test_ValidatorOutstandingRewards(): content = {'rewards': {'rewards': [{'denom': 'string', 'amount': '1234'}]}} mock_client = MockRestClient(json_encode(content)) expected_response = ParseDict(content, QueryValidatorOutstandingRewardsResponse()) distribution = DistributionRestClient(mock_client) assert (distribution.ValidatorOutstandingRewards(QueryValidatorOutstandingRewardsRequest(validator_address='validator_addr')) == expected_response) assert (mock_client.last_base_url == '/cosmos/distribution/v1beta1/validators/validator_addr/outstanding_rewards') def test_ValidatorSlashes(): content = {'slashes': [{'validator_period': '1', 'fraction': '1'}], 'pagination': {'next_key': None, 'total': '1'}} mock_client = MockRestClient(json_encode(content)) expected_response = ParseDict(content, QueryValidatorSlashesResponse()) distribution = DistributionRestClient(mock_client) assert (distribution.ValidatorSlashes(QueryValidatorSlashesRequest(validator_address='validator_addr')) == expected_response) assert (mock_client.last_base_url == '/cosmos/distribution/v1beta1/validators/validator_addr/slashes')
def test_custom_config_values(): for (chunker_class, _) in chunker_common_config.items(): chunker = chunker_class(config=chunker_config) assert (chunker.text_splitter._chunk_size == 500) assert (chunker.text_splitter._chunk_overlap == 0) assert (chunker.text_splitter._length_function == len)
(schema=CartSchema, validators=(marshmallow_body_validator,), content_type='application/json') def create_sale(request): cart = Cart() for product in request.json.get('products'): cart.products.append(Product(**product)) request.dbsession.add(cart) request.dbsession.flush() schema = CartSchema() return HTTPCreated(json=schema.dump(cart).data)
def test_generic_data_model(): GenericDataModel('test', {'attr1': {'name': 'attr1', 'type': 'str', 'is_required': True}}) with pytest.raises(AEAEnforceError): GenericDataModel('test', {'attr1': {'name': 'attr1', 'type': 'bad type', 'is_required': True}}) with pytest.raises(AEAEnforceError): GenericDataModel('test', {'attr1': {'name': 1231, 'type': 'str', 'is_required': True}}) with pytest.raises(AEAEnforceError): GenericDataModel('test', {'attr1': {'name': 'attr1', 'type': 'str', 'is_required': 'True'}})