code
stringlengths
281
23.7M
def cluge_package_imports(name, package): if ((name == '__main__') and (package is None)): file = Path(__file__).expanduser().resolve() (parent, top) = (file.parent, file.parents[1]) sys.path.append(str(top)) try: sys.path.remove(str(parent)) except ValueError: pass import pipeline sys.modules[name].__package__ = 'pipeline'
def train(model, train_loader, val_loader, optimizer, init_lr=0.002, checkpoint_dir=None, checkpoint_interval=None, nepochs=None, clip_thresh=1.0): model.train() if use_cuda: model = model.cuda() linear_dim = model.linear_dim criterion = nn.L1Loss() global global_step, global_epoch while (global_epoch < nepochs): h = open(logfile_name, 'a') running_loss = 0.0 for (step, x) in tqdm(enumerate(train_loader)): current_lr = learning_rate_decay(init_lr, global_step) for param_group in optimizer.param_groups: param_group['lr'] = current_lr optimizer.zero_grad() x = Variable(x) if use_cuda: x = x.cuda() if use_multigpu: (outputs, r_, o_) = data_parallel_workaround(model, (x, mel)) (mel_outputs, linear_outputs, attn) = (outputs[0], outputs[1], outputs[2]) else: loss = model(x) if ((global_step > 0) and ((global_step % checkpoint_interval) == 0)): save_checkpoint(model, optimizer, global_step, checkpoint_dir, global_epoch) loss.backward(retain_graph=False) grad_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), clip_thresh) optimizer.step() log_value('loss', float(loss.item()), global_step) log_value('gradient norm', grad_norm, global_step) log_value('learning rate', current_lr, global_step) global_step += 1 running_loss += loss.item() averaged_loss = (running_loss / len(train_loader)) log_value('loss (per epoch)', averaged_loss, global_epoch) h.write((((('Loss after epoch ' + str(global_epoch)) + ': ') + format((running_loss / len(train_loader)))) + '\n')) h.close() global_epoch += 1
def test_substitution(base_message, chat): base_message.substitutions = Substitutions({(0, 3): chat}) base_message.verify() with pytest.raises(AssertionError): Substitutions([chat]) with pytest.raises(AssertionError): Substitutions({(1, 2, 3): chat}) with pytest.raises(AssertionError): Substitutions({(1, 2, 3): chat.uid}) with pytest.raises(AssertionError): Substitutions({(2, 1): chat}) with pytest.raises(AssertionError): Substitutions({(1, 3): chat, (2, 4): chat})
class TransactionBuilderAPI(TransactionDecoderAPI): def deserialize(cls, encoded: DecodedZeroOrOneLayerRLP) -> 'SignedTransactionAPI': ... def serialize(cls, obj: 'SignedTransactionAPI') -> DecodedZeroOrOneLayerRLP: ... def create_unsigned_transaction(cls, *, nonce: int, gas_price: int, gas: int, to: Address, value: int, data: bytes) -> UnsignedTransactionAPI: ... def new_transaction(cls, nonce: int, gas_price: int, gas: int, to: Address, value: int, data: bytes, v: int, r: int, s: int) -> 'SignedTransactionAPI': ...
class TestAddResource(BaseTestCase): def test_pkey(self, db_session): resource = add_resource(db_session, 99, 'some random name') assert (resource.resource_id == 99) def test_nopkey(self, db_session): resource = add_resource(db_session, None, 'some random name') assert (resource.resource_id == 1)
def add_hook(dm, setup=None, teardown=None, call_setup=False, call_teardown=False): stack = dm.getAttr('__setup_hooks__') if (not stack): raise ValueError('Expecting non-empty stack') obj = stack[(- 1)] if (setup is not None): obj.add_setup(setup) if call_setup: setup() if (teardown is not None): obj.add_teardown(teardown) if call_teardown: teardown()
class TripAvgConsumption(object): swagger_types = {'type': 'str', 'value': 'float'} attribute_map = {'type': 'type', 'value': 'value'} def __init__(self, type=None, value=None): self._type = None self._value = None self.discriminator = None if (type is not None): self.type = type if (value is not None): self.value = value def type(self): return self._type def type(self, type): allowed_values = ['Fuel', 'Electric'] if (type not in allowed_values): raise ValueError('Invalid value for `type` ({0}), must be one of {1}'.format(type, allowed_values)) self._type = type def value(self): return self._value def value(self, value): self._value = value def to_dict(self): result = {} for (attr, _) in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) else: result[attr] = value if issubclass(TripAvgConsumption, dict): for (key, value) in self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if (not isinstance(other, TripAvgConsumption)): return False return (self.__dict__ == other.__dict__) def __ne__(self, other): return (not (self == other))
class BlockFieldsFactory(factory.Factory): class Meta(): model = BlockFields header = factory.SubFactory(BlockHeaderFactory) transactions = factory.LazyFunction((lambda : tuple(UninterpretedTransactionFactory.create_batch(2)))) uncles = factory.LazyFunction((lambda : tuple(BlockHeaderFactory.create_batch(2))))
def load_checkpoint(checkpoint_path, model, optimizer=None, skip_optimizer=False): assert os.path.isfile(checkpoint_path) checkpoint_dict = torch.load(checkpoint_path, map_location='cpu') iteration = checkpoint_dict['iteration'] learning_rate = checkpoint_dict['learning_rate'] if ((optimizer is not None) and (not skip_optimizer) and (checkpoint_dict['optimizer'] is not None)): optimizer.load_state_dict(checkpoint_dict['optimizer']) elif ((optimizer is None) and (not skip_optimizer)): new_opt_dict = optimizer.state_dict() new_opt_dict_params = new_opt_dict['param_groups'][0]['params'] new_opt_dict['param_groups'] = checkpoint_dict['optimizer']['param_groups'] new_opt_dict['param_groups'][0]['params'] = new_opt_dict_params optimizer.load_state_dict(new_opt_dict) saved_state_dict = checkpoint_dict['model'] if hasattr(model, 'module'): state_dict = model.module.state_dict() else: state_dict = model.state_dict() new_state_dict = {} for (k, v) in state_dict.items(): try: new_state_dict[k] = saved_state_dict[k] assert (saved_state_dict[k].shape == v.shape), (saved_state_dict[k].shape, v.shape) except: if ('ja_bert_proj' in k): v = torch.zeros_like(v) logger.warn(f'Seems you are using the old version of the model, the {k} is automatically set to zero for backward compatibility') else: logger.error(f'{k} is not in the checkpoint') new_state_dict[k] = v if hasattr(model, 'module'): model.module.load_state_dict(new_state_dict, strict=False) else: model.load_state_dict(new_state_dict, strict=False) logger.info("Loaded checkpoint '{}' (iteration {})".format(checkpoint_path, iteration)) return (model, optimizer, learning_rate, iteration)
class ModelWrapper(tf.keras.Model): def __init__(self, model): super(ModelWrapper, self).__init__() self.model = model self.trainer = None def call(self, inputs, training=None, mask=None): return self.model(inputs, training, mask) def compile(self, optimizer=tf.keras.optimizers.SGD(learning_rate=1.0), loss=MeanSquaredError(), damping_algorithm=DampingAlgorithm(), attempts_per_step=10, solve_method='qr', jacobian_max_num_rows=100, experimental_use_pfor=True, metrics=None, loss_weights=None, weighted_metrics=None, **kwargs): super(ModelWrapper, self).compile(optimizer=optimizer, loss=loss, metrics=metrics, loss_weights=loss_weights, weighted_metrics=weighted_metrics, run_eagerly=True) self.built = self.model.built self.trainer = Trainer(model=self, optimizer=optimizer, loss=loss, damping_algorithm=damping_algorithm, attempts_per_step=attempts_per_step, solve_method=solve_method, jacobian_max_num_rows=jacobian_max_num_rows, experimental_use_pfor=experimental_use_pfor) def train_step(self, data): (x, y, sample_weight) = data_adapter.unpack_x_y_sample_weight(data) (loss, y_pred, attempts, stop_training) = self.trainer.train_step(x, y) logs = {'damping_factor': self.trainer.damping_factor, 'attempts': attempts, 'loss': loss} self.compiled_metrics.update_state(y, y_pred) self._validate_target_and_loss(y, loss) metrics = self.compute_metrics(x, y, y_pred, sample_weight) if ('loss' in metrics): del metrics['loss'] logs.update(metrics) self.stop_training = stop_training return logs def fit(self, x=None, y=None, batch_size=None, epochs=1, verbose=1, callbacks=None, **kwargs): if (verbose > 0): if (callbacks is None): callbacks = [] callbacks.append(tf.keras.callbacks.ProgbarLogger(count_mode='steps', stateful_metrics=['damping_factor', 'attempts'])) return super(ModelWrapper, self).fit(x=x, y=y, batch_size=batch_size, epochs=epochs, verbose=verbose, callbacks=callbacks, **kwargs)
def test_generated_css1(): assert (not hasattr(MyPComponent1.JS, 'CSS')) assert (not hasattr(MyJComponent1.JS, 'CSS')) assert (not hasattr(MyPComponent2.JS, 'CSS')) assert (not hasattr(MyJComponent2.JS, 'CSS')) assert (MyPComponent1.CSS == 'xx') assert (MyJComponent1.CSS == 'xx') assert (MyPComponent2.CSS == '') assert (MyJComponent2.CSS == '')
('/api/users/<endpoint_id>') def users(endpoint_id): post_to_back_if_telemetry_enabled(**{'name': f'users/{endpoint_id}'}) with session_scope() as session: users_hits = get_users(session, endpoint_id) dicts = [] for uh in users_hits: dicts.append({'user': uh[0], 'hits': uh[1]}) return jsonify(dicts)
class GcalendarClient(): def __init__(self, access_token: str): self.client = googleapiclient.discovery.build('calendar', 'v3', credentials=gcreds.Credentials(token=access_token), static_discovery=False) def events(self, start: Optional[datetime.datetime]=datetime.datetime.utcnow(), end: Optional[datetime.datetime]=None) -> List['Event']: query = self.client.events().list(calendarId='primary', orderBy='startTime', singleEvents=True, timeMin=_to_utc_iso(start), timeMax=_to_utc_iso(end), maxResults=20) raw_events = query.execute() raw_events = raw_events.get('items', []) return [Event.from_raw_event(raw_event) for raw_event in raw_events] def create_event(self, title: str, description: str, attendees: List[str], start: datetime.datetime, duration: datetime.timedelta=datetime.timedelta(seconds=3600)) -> 'Event': event = {'start': {'dateTime': start.isoformat()}, 'end': {'dateTime': (start + duration).isoformat()}, 'attendees': [{'email': attendee} for attendee in attendees], 'summary': title, 'description': description} event = self.client.events().insert(calendarId='primary', body=event).execute() return Event.from_raw_event(event)
def usort_command(fn: Callable[(..., int)]) -> Callable[(..., None)]: (fn) def wrapper(*args: Any, **kwargs: Any) -> None: exit_code = (fn(*args, **kwargs) or 0) if BENCHMARK: print_timings(click.echo, timings=get_timings()) sys.exit(exit_code) return wrapper
def fortios_extension_controller(data, fos): fos.do_member_operation('extension-controller', 'fortigate') if data['extension_controller_fortigate']: resp = extension_controller_fortigate(data, fos) else: fos._module.fail_json(msg=('missing task body: %s' % 'extension_controller_fortigate')) return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
.parametrize('rate,capacity,etype', [(0, 0, ValueError), (0, 1, ValueError), (1, 0, ValueError), ((- 1), (- 1), ValueError), ((- 1), 0, ValueError), (0, (- 1), ValueError), ((- 2), (- 2), ValueError), ((- 2), 0, ValueError), (0, (- 2), ValueError), ('x', 'y', TypeError), ('x', (- 1), (ValueError, TypeError)), ((- 1), 'y', (ValueError, TypeError)), ('x', 1, TypeError), (1, 'y', TypeError), ('x', None, TypeError), (None, 'y', TypeError), (None, None, TypeError), (None, 1, TypeError), (1, None, TypeError)]) def test_input_validation_rate_and_capacity(rate, capacity, etype): with pytest.raises(etype): token_bucket.Limiter(rate, capacity, token_bucket.MemoryStorage())
('/nu_heads/', methods=['GET']) _required def nu_heads(): g.session.expire_all() g.session.commit() g.session.expire_all() new_items_q = g.session.query(db.NuReleaseItem).filter((db.NuReleaseItem.reviewed == 'unverified')).order_by(desc(db.NuReleaseItem.first_seen)) new_items_q = new_items_q.options(joinedload('resolved')) print('Fetching items') new_items = new_items_q.all() print(('Fetched %s items' % len(new_items))) new_items = [tmp for tmp in new_items if len(tmp.resolved)] print(('Fetched %s items with resolves' % len(new_items))) g.session.commit() print('Rendering') response = make_response(render_template('nu_heads.html', new_items=new_items)) print('Rendered') g.session.expire_all() g.session.commit() g.session.expire_all() response.headers['X-UA-Compatible'] = 'IE=Edge,chrome=1' response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate, max-age=0' response.headers['Pragma'] = 'no-cache' response.headers['Expires'] = 'Thu, 01 Jan 1970 00:00:00' return response
def redirection_get_uri(url, method, headers, body, scanid): url_query = urllib.parse.urlparse(url) parsed_query = urllib.parse.parse_qs(url_query.query) for (key, value) in list(parsed_query.items()): redirect_name = fetch_redirection_names() for name in redirect_name: if (name == key): redirection_payload = fetch_open_redirect_payload() for payload in redirection_payload: if ('=' in payload): payload = payload[(payload.find('=') + 1):].replace('{target}', redirection_url) else: payload = payload.replace('{target}', redirection_url) parsed_url = urllib.parse.urlparse(url) redirect_url = (((((parsed_url.scheme + '://') + parsed_url.netloc) + parsed_url.path) + '/?') + parsed_url.query.replace(value[0], payload)) fuzz_req = req.api_request(redirect_url, 'GET', headers) if (str(fuzz_req.status_code)[0] == '3'): if (fuzz_req.headers['Location'].startswith(redirection_url) is True): print(('%s[Medium] {0} is vulnerable to open redirection%s'.format(url) % (api_logger.Y, api_logger.W))) logs.logging.info('%s is vulnerable to open redirection', redirect_url) attack_result = {'id': 12, 'scanid': scanid, 'url': redirect_url, 'alert': 'Open redirection', 'impact': 'Medium', 'req_headers': headers, 'req_body': body, 'res_headers': fuzz_req.headers, 'res_body': 'NA'} dbupdate.insert_record(attack_result) return
def colorful_swat(logo: str) -> str: colors = [Fore.RED, Fore.GREEN, Fore.BLUE, Fore.YELLOW] subtitle = ':: Simple Workspace ATT&CK Tool ::' words = subtitle.split(' ') colored_subtitle = ':: ' for (i, word) in enumerate(words[1:(- 1)]): colored_subtitle += ((colors[(i % len(colors))] + word) + ' ') colored_subtitle += '::' colored_logo = (logo.split('::')[0] + colored_subtitle) return colored_logo
class ClientIdModelsTestCase(TestCase): WRONG_CLIENT_ID_WILDCARD = ['', '/', '+', '#'] def test_client_id(self): if (hasattr(settings, 'MQTT_ALLOW_EMPTY_CLIENT_ID') and settings.MQTT_ALLOW_EMPTY_CLIENT_ID): ClientId.objects.create(name='') self.assertEqual(str(ClientId.objects.create(name='1234')), '1234') ClientId.objects.create(name=gen_client_id()) def test_wrong_client_id(self): if ((not hasattr(settings, 'MQTT_ALLOW_EMPTY_CLIENT_ID')) or (not settings.MQTT_ALLOW_EMPTY_CLIENT_ID)): self.assertRaises(ValidationError, ClientId.objects.create, name='') for client_id in self.WRONG_CLIENT_ID_WILDCARD: self.assertRaises(ValidationError, ClientId.objects.create, name=client_id) def test_client_id_is_public(self): cli = ClientId.objects.create(name='test') self.assertEqual(cli.is_public(), True) user = User.objects.create_user('test') cli.users.add(user) self.assertEqual(cli.is_public(), False) cli.users.remove(user) self.assertEqual(cli.is_public(), True) group = Group.objects.create(name='test') cli.groups.add(group) self.assertEqual(cli.is_public(), False) def test_client_id_has_permission(self): cli = ClientId.objects.create(name='test') self.assertEqual(cli.has_permission(None), True) user = User.objects.create_user('test') cli.users.add(user) self.assertEqual(cli.has_permission(None), False) self.assertEqual(cli.has_permission(user), True) cli.users.remove(user) self.assertEqual(cli.has_permission(None), True) group = Group.objects.create(name='test') cli.groups.add(group) self.assertEqual(cli.has_permission(None), False) self.assertEqual(cli.has_permission(user), False) user.groups.add(group) self.assertEqual(cli.has_permission(user), True)
class StartCLIParserTestCase(unittest.TestCase): def test_ok_empty_args(self): args = generate_args(None, None, None, None, False, None, None, None, None) status = StartCLIParser.verify_args(args) self.assertEqual(status, 0) def test_ok_server_ports(self): args = generate_args(None, 5555, None, 27017, False, None, None, None, None) status = StartCLIParser.verify_args(args) self.assertEqual(status, 0) def test_fail_server_port(self): args = generate_args(None, 65536, None, None, False, None, None, None, None) status = StartCLIParser.verify_args(args) self.assertEqual(status, 1) def test_fail_mongodb_port(self): args = generate_args(None, None, None, 65536, False, None, None, None, None) status = StartCLIParser.verify_args(args) self.assertEqual(status, 2) def test_fail_only_mongodb_user(self): args = generate_args(None, None, None, None, False, 'admin', None, None, None) status = StartCLIParser.verify_args(args) self.assertEqual(status, 3) def test_fail_only_mongodb_pass(self): args = generate_args(None, None, None, None, False, None, '1234', None, None) status = StartCLIParser.verify_args(args) self.assertEqual(status, 4) def test_fail_falco_rules(self): temporary_dir = tempfile.mkdtemp() filename = (temporary_dir + '/fail_falco_rules') with open(filename, 'a+') as f: f.write('{}$##') f.flush() args = generate_args(None, None, None, None, False, None, None, open(filename, 'rb+'), None) status = StartCLIParser.verify_args(args) os.remove(filename) shutil.rmtree(temporary_dir) self.assertEqual(status, 5) def test_fail_external_falco(self): temporary_dir = tempfile.mkdtemp() filename = (temporary_dir + '/fail_falco_rules') with open(filename, 'a+') as f: f.write('{}$##') f.flush() args = generate_args(None, None, None, None, False, None, None, open(filename, 'rb+'), open(filename, 'rb+')) status = StartCLIParser.verify_args(args) os.remove(filename) shutil.rmtree(temporary_dir) self.assertEqual(status, 6) def test_start_full_happy_path(self): sys.argv = ['dagda.py', 'start', '-s', '127.0.0.1', '-p', '5000', '-m', '127.0.0.1', '-mp', '27017'] parsed_args = StartCLIParser() self.assertEqual(parsed_args.get_server_host(), '127.0.0.1') self.assertEqual(parsed_args.get_server_port(), 5000) self.assertEqual(parsed_args.get_mongodb_host(), '127.0.0.1') self.assertEqual(parsed_args.get_mongodb_port(), 27017) self.assertFalse(parsed_args.is_mongodb_ssl_enabled()) self.assertIsNone(parsed_args.get_mongodb_user()) self.assertIsNone(parsed_args.get_mongodb_pass()) self.assertIsNone(parsed_args.get_falco_rules_filename()) def test_check_falco_rules_file(self): sys.argv = ['dagda.py', 'start', '--falco_rules_file', './tests/mock_files/falco_rules.yaml'] args = StartCLIParser() self.assertEqual(args.get_falco_rules_filename(), './tests/mock_files/falco_rules.yaml') def test_check_none_external_falco(self): sys.argv = ['dagda.py', 'start'] args = StartCLIParser() self.assertEqual(args.get_external_falco_output_filename(), None) def test_check_external_falco(self): sys.argv = ['dagda.py', 'start', '--external_falco', './tests/mock_files/falco_rules.yaml'] args = StartCLIParser() self.assertEqual(args.get_external_falco_output_filename(), './tests/mock_files/falco_rules.yaml') def test_check_exit_1(self): sys.argv = ['dagda.py', 'start', '-p', '-1'] with self.assertRaises(SystemExit) as cm: StartCLIParser() self.assertEqual(cm.exception.code, 1) def test_DagdaStartParser_exit_2(self): with self.assertRaises(SystemExit) as cm: DagdaStartParser().error('fail') self.assertEqual(cm.exception.code, 2) def test_DagdaStartParser_format_help(self): self.assertEqual(DagdaStartParser().format_help(), start_parser_text)
def generate_key_cert_signed_by_authority(organization_name, common_name, alternative_dns_names=None, key_filename=None, csr_filename=None, cert_filename=None, key_password=None, country_name=None, state_or_province_name=None, locality_name=None, valid_days=None, authority_key_filename=None, authority_cert_filename=None, authority_key_password=None): key = rsa.generate_private_key(public_exponent=65537, key_size=2048) encryption_algorithm = serialization.NoEncryption() if (key_password is not None): if (not isinstance(key_password, bytes)): encoding = default_encoding key_password = key_password.encode(encoding) encryption_algorithm = serialization.BestAvailableEncryption(key_password) key_bytes = key.private_bytes(encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=encryption_algorithm) if (key_filename is None): key_filename = 'key.pem' with open(key_filename, 'wb') as f: f.write(key_bytes) name_attributes = [] if (country_name is not None): name_attributes.append(x509.NameAttribute(NameOID.COUNTRY_NAME, country_name)) if (state_or_province_name is not None): name_attributes.append(x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, state_or_province_name)) if (locality_name is not None): name_attributes.append(x509.NameAttribute(NameOID.LOCALITY_NAME, locality_name)) name_attributes += [x509.NameAttribute(NameOID.ORGANIZATION_NAME, organization_name), x509.NameAttribute(NameOID.COMMON_NAME, common_name)] subject = x509.Name(name_attributes) csr_builder = x509.CertificateSigningRequestBuilder() csr_builder = csr_builder.subject_name(subject) if (alternative_dns_names is None): alternative_dns_names = ['localhost'] csr_builder = csr_builder.add_extension(x509.SubjectAlternativeName([x509.DNSName(name) for name in alternative_dns_names]), critical=False) csr = csr_builder.sign(key, hashes.SHA256()) csr_bytes = csr.public_bytes(serialization.Encoding.PEM) if (csr_filename is None): csr_filename = 'csr.pem' with open(csr_filename, 'wb') as f: f.write(csr_bytes) if (authority_key_password is not None): if (not isinstance(authority_key_password, bytes)): encoding = default_encoding authority_key_password = authority_key_password.encode(encoding) if (authority_key_filename is None): authority_key_filename = 'root_ca_key.pem' with open(authority_key_filename, 'rb') as f: authority_key = serialization.load_pem_private_key(f.read(), password=authority_key_password) if (authority_cert_filename is None): authority_cert_filename = 'root_ca_cert.pem' with open(authority_cert_filename, 'rb') as f: authority_cert = x509.load_pem_x509_certificate(f.read()) cert_builder = x509.CertificateBuilder() cert_builder = cert_builder.subject_name(subject) cert_builder = cert_builder.issuer_name(authority_cert.subject) cert_builder = cert_builder.public_key(csr.public_key()) cert_builder = cert_builder.serial_number(x509.random_serial_number()) if (valid_days is None): valid_days = 356 not_valid_before = datetime.datetime.utcnow() not_valid_after = (not_valid_before + datetime.timedelta(days=valid_days)) cert_builder = cert_builder.not_valid_before(not_valid_before) cert_builder = cert_builder.not_valid_after(not_valid_after) for extension in csr.extensions: cert_builder = cert_builder.add_extension(extension.value, critical=extension.critical) cert = cert_builder.sign(authority_key, hashes.SHA256()) cert_bytes = cert.public_bytes(serialization.Encoding.PEM) if (cert_filename is None): cert_filename = 'cert.pem' with open(cert_filename, 'wb') as f: f.write(cert_bytes)
class DefaultScript(ScriptBase): def create(cls, key, **kwargs): errors = [] obj = None kwargs['key'] = key kwargs['typeclass'] = kwargs.pop('typeclass', cls) try: obj = create.create_script(**kwargs) except Exception: logger.log_trace() errors.append(("The script '%s' encountered errors and could not be created." % key)) return (obj, errors) def at_script_creation(self): pass def is_valid(self): return True def at_start(self, **kwargs): pass def at_repeat(self, **kwargs): pass def at_pause(self, manual_pause=True, **kwargs): pass def at_stop(self, **kwargs): pass def at_script_delete(self): return True def at_server_reload(self): pass def at_server_shutdown(self): pass def at_server_start(self): pass
class SpatialDataArray(DataArray): __slots__ = () _dims = ('x', 'y', 'z') _data_attrs = {'long_name': 'field value'} def sel_inside(self, bounds: Bound) -> SpatialDataArray: inds_list = [] for (coord, smin, smax) in zip(self.coords.values(), bounds[0], bounds[1]): length = len(coord) if (smax < coord[0]): comp_inds = np.arange(0, max(2, length)) elif (smin > coord[(- 1)]): comp_inds = np.arange(min(0, (length - 2)), length) else: if (smin < coord[0]): ind_min = 0 else: ind_min = max(0, ((coord >= smin).argmax().data - 1)) if (smax > coord[(- 1)]): ind_max = (length - 1) else: ind_max = (coord >= smax).argmax().data comp_inds = np.arange(ind_min, (ind_max + 1)) inds_list.append(comp_inds) return self.isel(x=inds_list[0], y=inds_list[1], z=inds_list[2]) def does_cover(self, bounds: Bound) -> bool: return all(((((coord[0] <= smin) and (coord[(- 1)] >= smax)) or (len(coord) == 1)) for (coord, smin, smax) in zip(self.coords.values(), bounds[0], bounds[1]))) def reflect(self, axis: Axis, center: float) -> SpatialDataArray: coords = list(self.coords.values()) data = np.array(self.data) if np.isclose(center, coords[axis].data[0]): num_duplicates = 1 elif (center > coords[axis].data[0]): raise DataError('Reflection center must be outside and on the left of the data region.') else: num_duplicates = 0 shape = np.array(np.shape(data)) old_len = shape[axis] shape[axis] = ((2 * old_len) - num_duplicates) ind_left = [slice(shape[0]), slice(shape[1]), slice(shape[2])] ind_right = [slice(shape[0]), slice(shape[1]), slice(shape[2])] ind_left[axis] = slice((old_len - 1), None, (- 1)) ind_right[axis] = slice((old_len - num_duplicates), None) new_data = np.zeros(shape) new_data[(ind_left[0], ind_left[1], ind_left[2])] = data new_data[(ind_right[0], ind_right[1], ind_right[2])] = data new_coords = np.zeros(shape[axis]) new_coords[(old_len - num_duplicates):] = coords[axis] new_coords[(old_len - 1)::(- 1)] = ((2 * center) - coords[axis]) coords[axis] = new_coords coords_dict = dict(zip('xyz', coords)) return SpatialDataArray(new_data, coords=coords_dict)
class ConditionHandler(object): def __init__(self): self.count = 0 self.times = [] self.called = False def callback(self, event=None): self.times.append(perf_counter()) self.count += 1 self.called = True def is_called(self): return self.called def called_n(self, repeat): return (lambda : (self.count >= repeat))
class DiffusersSDXL(Protocol): unet: fl.Module text_encoder: fl.Module text_encoder_2: fl.Module tokenizer: fl.Module tokenizer_2: fl.Module vae: fl.Module def __call__(self, prompt: str, *args: Any, **kwargs: Any) -> Any: ... def encode_prompt(self, prompt: str, prompt_2: (str | None)=None, negative_prompt: (str | None)=None, negative_prompt_2: (str | None)=None) -> tuple[(torch.Tensor, torch.Tensor, torch.Tensor, torch.Tensor)]: ...
class CarbonFilter(Boxes): description = 'The filter does not include the top rim. You need some rectangular wooden strip about 2-3cm in size to glue around. The x and y are without this rim and should be about 5 cm smaller that the nominal size.\n\nThe following sizes are currently hard coded:\n\n* Height of rails on top: 50mm\n* Borders on top: 40mm\n* Char coal width (horizontal): 40mm\n* Bottom width: 40 + 20 + 40 mm\n\nFor assembly it is important that all bottom plates are the same way up. This allows the ribs of adjacent pockets to pass beside each other.\n\nThere are three type of ribs:\n\n* Those with staight tops go in the middle of the bottom plates\n* Those pointier angle go at the outer sides and meet with the side bars\n* The less pointy go at all other sides of the bottom plates that will end up on the inside\n\nThe last two types of ribs do not have finger joints on the outside but still need to be glued to the top beam of the adjacent pocket or the left or right side bar.\n' ui_group = 'Misc' def __init__(self) -> None: Boxes.__init__(self) self.addSettingsArgs(edges.FingerJointSettings) self.buildArgParser(x=550, y=550, h=250) self.argparser.add_argument('--pockets', action='store', type=int, default=3, help='number of V shaped filter pockets') self.argparser.add_argument('--ribs', action='store', type=int, default=12, help='number of ribs to hold the bottom and the mesh') def sideCB(self): (x, y, h) = (self.x, self.y, self.h) t = self.thickness p = self.pockets posx = t w = self.w a = self.a self.fingerHolesAt((t / 2), h, 50, (- 90)) self.fingerHolesAt((x - (t / 2)), h, 50, (- 90)) for i in range(p): self.fingerHolesAt((posx + (t / 2)), h, 50, ((- 90) + a)) self.fingerHolesAt(((posx + 40) + (t / 2)), h, 50, ((- 90) + a)) self.fingerHolesAt(((posx + w) - (t / 2)), h, 50, ((- 90) - a)) self.fingerHolesAt((((posx + w) - 40) - (t / 2)), h, 50, ((- 90) - a)) self.fingerHolesAt((((posx + (w / 2)) - 50) + t), (3.5 * t), (100 - (2 * t)), 0) posx += w def bottomCB(self): t = self.thickness for i in range(self.ribs): self.fingerHolesAt(((((i + 1) * self.y) / (self.ribs + 1)) - (1.5 * t)), 0, (4 * t), 90) self.fingerHolesAt(((((i + 1) * self.y) / (self.ribs + 1)) - (1.5 * t)), (40 - t), 20, 90) def topRailCB(self): t = self.thickness for i in range(self.ribs): self.fingerHolesAt(((((i + 1) * self.y) / (self.ribs + 1)) - (1.5 * t)), 0, 30, 90) def innerRibs(self, n, move=None): (x, y, h) = (self.x, self.y, self.h) t = self.thickness a = self.a a_ = math.radians(a) l = (((h - (4 * t)) / math.cos(a_)) - ((0.5 * t) * math.sin(a_))) tw = ((n * (20 + self.spacing)) + (l * math.sin(a_))) th = (((h - (3 * t)) - (20 * math.cos(a_))) + self.spacing) if self.move(tw, th, move, True): return self.moveTo(0, t) for i in range(n): self.edges['f'](20) self.polyline(0, (90 - a), (l - 50), 90, t, (- 90)) self.edges['f'](30) self.polyline(0, (90 + a), (20 - t), (90 - a), ((l - 20) + (t * math.sin(a_))), (90 + a)) self.moveTo((20 + self.spacing)) self.ctx.stroke() self.move(tw, th, move, label='Inner ribs') def sideHolders(self, n, move=None): (x, y, h) = (self.x, self.y, self.h) t = self.thickness a = self.a a_ = math.radians(a) l = ((((h - (4 * t)) / math.cos(a_)) - ((0.5 * t) * math.sin(a_))) - 50) tw = ((n * (10 + self.spacing)) + (l * math.sin(a_))) th = ((h - (4 * t)) - 50) if self.move(tw, th, move, True): return for i in range(n): self.polyline(10, (90 - a), l, (90 + a), 10, (90 - a), l, (90 + a)) self.ctx.stroke() self.moveTo((10 + self.spacing)) self.move(tw, th, move, label='Inner ribs') def topStabilizers(self, n, move=None): t = self.thickness l = (((2 * (self.h - 60)) * math.sin(math.radians(self.a))) - 20) tw = (n * ((6 * t) + self.spacing)) th = (l + (4 * t)) if self.move(tw, th, move, True): return self.moveTo(t) for i in range(n): for j in range(2): self.polyline(0, 90, (2 * t), (- 90), t, (- 90), (2 * t), 90, (3 * t), (90, t), (l + (2 * t)), (90, t)) self.ctx.stroke() self.moveTo(((6 * t) + self.spacing)) self.move(tw, th, move, label='Inner ribs') def outerRibs(self, n, n_edge, move=None): (x, y, h) = (self.x, self.y, self.h) t = self.thickness a = self.a a_ = math.radians(a) l = (((h - (4 * t)) / math.cos(a_)) + ((0.5 * t) * math.sin(a_))) dl = ((20 - t) * (math.tan(((math.pi / 2) - (2 * a_))) + math.sin(a_))) dll = ((20 - t) * (1 / math.sin((2 * a_)))) dl2 = ((20 - t) * (math.tan(((math.pi / 2) - a_)) + math.sin(a_))) dll2 = ((20 - t) * (1 / math.sin(a_))) tw = (((n // 2) * (40 + t)) + (l * math.sin(a_))) th = (h + (5 * t)) if self.move(tw, th, move, True): return self.moveTo((2 * t)) for i in range(n): self.polyline(((0 * t) + 20), (90, (2 * t)), (2 * t), (- a)) if (i < n_edge): self.polyline(((l - dl2) - (t * math.sin(a_))), a, dll2, (180 - a), 20) else: self.polyline(((l - dl) - (t * math.sin(a_))), (2 * a), dll, (180 - (2 * a)), 20) self.edges['f'](30) self.polyline(0, (- 90), t, 90, (l - 50), a, t, (- 90)) self.edges['f']((4 * t)) self.polyline(0, 90, (1 * t), (90, (2 * t))) self.moveTo((t + 40)) if ((i + 1) == (n // 2)): self.moveTo(((2 * t) + (0.7 * self.spacing)), (h + (5 * t)), 180) self.ctx.stroke() self.move(tw, th, move, label='Outer ribs') def render(self): (x, y, h) = (self.x, self.y, self.h) self.y = y = self.adjustSize(y) t = self.thickness self.w = ((x - (2 * t)) / self.pockets) self.a = math.degrees(math.atan((((self.w - 100) / 2) / (h - (4 * t))))) for i in range(2): self.rectangularWall(x, h, callback=[self.sideCB], move='up') for i in range(2): self.rectangularWall(y, 50, 'efef', label='Sides', move='up') for i in range((self.pockets * 4)): self.rectangularWall(y, 50, 'efef', callback=[self.topRailCB], label='Top rails', move='up') w = (100 - (2 * t)) for i in range(self.pockets): self.rectangularWall(y, w, 'efef', callback=[self.bottomCB, None, self.bottomCB], label='bottom plate', move='up') self.innerRibs(((self.pockets * self.ribs) * 2), move='up') self.outerRibs(((self.pockets * self.ribs) * 2), (self.ribs * 2), move='up') self.sideHolders((self.pockets * 8), move='up') self.topStabilizers((min(3, self.ribs) * self.pockets))
class DeleteIndexParamSource(ParamSource): def __init__(self, track, params, **kwargs): super().__init__(track, params, **kwargs) self.request_params = params.get('request-params', {}) self.only_if_exists = params.get('only-if-exists', True) self.index_definitions = [] target_index = params.get('index') if target_index: if isinstance(target_index, str): target_index = [target_index] for idx in target_index: self.index_definitions.append(idx) elif track.indices: for idx in track.indices: self.index_definitions.append(idx.name) else: raise exceptions.InvalidSyntax('delete-index operation targets no index') def params(self): p = {} p.update(self._params) p.update({'indices': self.index_definitions, 'request-params': self.request_params, 'only-if-exists': self.only_if_exists}) return p
def check_migrations(): (classes, models, table_names) = ([], [], []) for class_ in list(db.Model._decl_class_registry.values()): try: table_names.append(class_.__tablename__) classes.append(class_) except: pass for table in list(db.metadata.tables.items()): if (table[0] in table_names): models.append(classes[table_names.index(table[0])]) for model in models: try: db.session.query(model).first() except: capture_exception() return f'failure,{model} model out of date with migrations' return 'success,database up to date with migrations'
class OFPExperimenterOxmId(OFPOxmId): def __init__(self, type_, exp_id, hasmask=False, length=None): super(OFPExperimenterOxmId, self).__init__(type_=type_, hasmask=hasmask, length=length) self.exp_id = exp_id def serialize(self): buf = super(OFPExperimenterOxmId, self).serialize() msg_pack_into(self._EXPERIMENTER_ID_PACK_STR, buf, struct.calcsize(self._PACK_STR), self.exp_id)
class OptionSeriesPyramidOnpointPosition(Options): def offsetX(self): return self._config_get(None) def offsetX(self, num: float): self._config(num, js_type=False) def offsetY(self): return self._config_get(None) def offsetY(self, num: float): self._config(num, js_type=False) def x(self): return self._config_get(None) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(None) def y(self, num: float): self._config(num, js_type=False)
def generate_aws(aws_config: AWSConfig, organization: Organization) -> List[Dict[(str, str)]]: from fides.connectors.aws import validate_credentials log.info('Validating AWS credentials') try: validate_credentials(aws_config) except ConnectorAuthFailureException as error: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail=str(error)) log.info('Generating systems from AWS') aws_systems = generate_aws_systems(organization=organization, aws_config=aws_config) return [i.dict(exclude_none=True) for i in aws_systems]
class RootTracker(Generic[TNodeID]): _roots: Dict[(TNodeID, TreeRoot[TNodeID])] _original_depth_to_root: Dict[(TNodeID, int)] _cache: Dict[(TNodeID, Tuple[(TNodeID, int)])] def __init__(self) -> None: self._tree = Tree[TNodeID]() self._roots = {} self._original_depth_to_root = {} self._cache = {} def add(self, node_id: TNodeID, parent_id: TNodeID) -> None: self._cache = {} self._tree.add(node_id, parent_id) (node_root, original_depth) = self._get_new_root(node_id, parent_id) self._roots[node_id] = node_root self._original_depth_to_root[node_id] = original_depth children = self._tree.children_of(node_id) self._link_children(node_root, original_depth, children) def get_children(self, node_id: TNodeID) -> Tuple[(TNodeID, ...)]: return self._tree.children_of(node_id) def get_root(self, node_id: TNodeID) -> Tuple[(TNodeID, int)]: if (node_id not in self._roots): raise ValidationError(f'Node {node_id} is not in the tree') elif (node_id in self._cache): (root_node_id, root_depth) = self._cache[node_id] if self._tree.has_parent(root_node_id): self._cache.pop(root_node_id) uncached_root = self.get_root(root_node_id) raise ValidationError(f'RootTracker had stale and invalid cache for {node_id} root, correct: {root_node_id}, stale: {uncached_root}') else: return (root_node_id, root_depth) else: root = self._roots[node_id] original_depth = self._original_depth_to_root[node_id] (root_node_id, root_depth) = (root.node_id, (original_depth + root.depth_offset)) if self._tree.has_parent(root_node_id): parent = self._tree.parent_of(root_node_id) if (parent in self._roots): parent_root = self._roots[parent] else: parent_root = None raise ValidationError(f'{root_node_id} has parent {parent}, but was going to be returned as a root. {node_id} appears to have that bad root {root!r}, and the parent has bad root {parent_root!r}') self._cache[node_id] = (root_node_id, root_depth) return self._cache[node_id] def prune(self, prune_off_id: TNodeID) -> None: if (prune_off_id not in self._original_depth_to_root): raise ValidationError(f'prune id {prune_off_id} not in depths') elif (prune_off_id not in self._roots): raise ValidationError(f'prune id {prune_off_id} not in roots') self._cache = {} root_to_prune = self._roots[prune_off_id] node_id = root_to_prune.node_id if (node_id != prune_off_id): raise ValidationError(f'Can only prune of a root node, tried to prune {prune_off_id}, but the root is {root_to_prune}') elif self._tree.has_parent(node_id): parent = self._tree.parent_of(node_id) raise ValidationError(f'{node_id} has parent {parent}, but was about to be pruned') child_nodes = self._tree.children_of(prune_off_id) child_pairs = tuple(((child_node, self._roots[child_node]) for child_node in child_nodes)) root_to_prune.prune_to(child_pairs) for (child_node, child_root) in child_pairs: if (child_node != child_root.node_id): raise ValidationError(f'Pruned child node should point to itself {child_node}, instead of {child_root.node_id}') self._tree.prune(prune_off_id) self._original_depth_to_root.pop(prune_off_id) del self._roots[prune_off_id] def _get_new_root(self, node_id: TNodeID, parent_id: TNodeID) -> Tuple[(TreeRoot[TNodeID], int)]: if self._tree.has_parent(node_id): try: parent_root = self._roots[parent_id] except KeyError as e: tree_parent = self._tree.parent_of(node_id) raise ValidationError(f'When adding node {node_id} with parent {parent_id}, The tree says that parent {tree_parent} is present, but the parent is missing from roots.') from e if (len(self._tree.children_of(parent_id)) > 1): node_root = TreeRoot(node_id) node_root.extend(parent_root, 0) else: node_root = parent_root original_depth = (self._original_depth_to_root[parent_id] + 1) else: node_root = TreeRoot(node_id) original_depth = 0 return (node_root, original_depth) def _link_children(self, parent_root: TreeRoot[TNodeID], parent_original_depth: int, children: Tuple[(TNodeID, ...)]) -> None: for child in children: child_root = self._roots[child] if ((child_root.depth_offset + self._original_depth_to_root[child]) != 0): raise ValidationError(f'children without parents must have net depth 0: but offset was {child_root.depth_offset} and original depth was {self._original_depth_to_root[child]}.') else: ideal_original_depth = (parent_original_depth + 1) actual_original_depth = self._original_depth_to_root[child] child_root.extend(parent_root, (ideal_original_depth - actual_original_depth))
class Test_WindowWrapper(): def test_name(self, *, wtable): assert (wtable.name == wtable.table.name) def test_relative_to(self, *, wtable): relative_to = Mock(name='relative_to') w2 = wtable.relative_to(relative_to) assert (w2.table is wtable.table) assert (w2._get_relative_timestamp is relative_to) def test_relative_to_now(self, *, table, wtable): w2 = wtable.relative_to_now() assert (w2._get_relative_timestamp == wtable.table._relative_now) def test_relative_to_field(self, *, table, wtable): table._relative_field = Mock(name='_relative_field') field = Mock(name='field') w2 = wtable.relative_to_field(field) table._relative_field.assert_called_once_with(field) assert (w2._get_relative_timestamp == table._relative_field()) def test_relative_to_stream(self, *, table, wtable): w2 = wtable.relative_to_stream() assert (w2._get_relative_timestamp == wtable.table._relative_event) .parametrize('input,expected', [(DATETIME, DATETIME_TS), (303.333, 303.333), (None, 99999.6)]) def test_get_timestamp(self, input, expected, *, event, wtable): event.message.timestamp = 99999.6 if (input is not None): wtable.get_relative_timestamp = (lambda e=None: input) else: wtable.get_relative_timestamp = None assert (wtable.get_timestamp(event) == expected) def test_get_timestamp__event_is_None(self, *, event, wtable): wtable.get_relative_timestamp = None with patch('faust.tables.wrappers.current_event') as ce: ce.return_value = None with pytest.raises(RuntimeError): assert wtable.get_timestamp(None) def test_on_recover(self, *, wtable, table): cb = Mock(name='callback') wtable.on_recover(cb) assert (cb in table._recover_callbacks) def test_contains(self, *, table, wtable): table._windowed_contains = Mock(name='windowed_contains') wtable.get_timestamp = Mock(name='get_timestamp') ret = wtable.__contains__('k') wtable.get_timestamp.assert_called_once_with() table._windowed_contains.assert_called_once_with('k', wtable.get_timestamp()) assert (ret is table._windowed_contains()) def test_getitem(self, *, wtable): w = wtable['k2'] assert isinstance(w, WindowSet) assert (w.key == 'k2') assert (w.table is wtable.table) assert (w.wrapper is wtable) def test_setitem(self, *, table, wtable): table._set_windowed = Mock(name='set_windowed') wtable.get_timestamp = Mock(name='get_timestamp') wtable['foo'] = 300 wtable.get_timestamp.assert_called_once_with() table._set_windowed.assert_called_once_with('foo', 300, wtable.get_timestamp()) def test_setitem__key_is_WindowSet(self, *, wtable): wtable['k2'] = wtable['k2'] def test_delitem(self, *, table, wtable): table._del_windowed = Mock(name='del_windowed') wtable.get_timestamp = Mock(name='get_timestamp') del wtable['foo'] wtable.get_timestamp.assert_called_once_with() table._del_windowed.assert_called_once_with('foo', wtable.get_timestamp()) def test_len__no_key_index_raises(self, *, wtable): with pytest.raises(NotImplementedError): len(wtable) def test_as_ansitable__raises(self, *, wtable): with pytest.raises(NotImplementedError): wtable.as_ansitable() def test_keys_raises(self, *, wtable): with pytest.raises(NotImplementedError): list(wtable._keys()) .parametrize('input', [datetime.now(), 103.33, User.id, (lambda s: s)]) def test_relative_handler(self, input, *, wtable): wtable.get_relative_timestamp = input assert wtable.get_relative_timestamp def test_relative_handler__invalid_handler(self, *, wtable): with pytest.raises(ImproperlyConfigured): wtable._relative_handler(object())
def pick_dualporo_values(values: np.ndarray, actind: np.ndarray, num_cells: int, fracture: bool) -> np.ndarray: active_size = len(actind) indsize = (num_cells if (len(values) == (2 * num_cells)) else active_size) if fracture: return values[(- indsize):] return values[:indsize]
def ensure_business_categories_functions_exist(force_close_conns: bool=False) -> None: file_path = ((((settings.APP_DIR / 'broker') / 'management') / 'sql') / 'create_business_categories_functions.sql') sql = file_path.read_text() if force_close_conns: db.connections.close_all() with db.connection.cursor() as cursor: cursor.execute(sql)
def downgrade(): connection = op.get_bind() connection.execute('pragma foreign_keys=OFF') for field_name in ['uid', 'type', 'softwareVersion', 'softwareName', 'osName', 'sdkVersion', 'name']: connection.execute(f"UPDATE kronos_gateway SET {field_name} = 'unknown' WHERE {field_name} IS NULL") for field_name in ['uid', 'type', 'softwareVersion', 'softwareName', 'name']: connection.execute(f"UPDATE kronos_device SET {field_name} = 'unknown' WHERE {field_name} IS NULL") with op.batch_alter_table('kronos_gateway') as batch_op: batch_op.alter_column('uid', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('type', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('softwareVersion', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('softwareName', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('sdkVersion', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('osName', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('name', existing_type=sa.TEXT(), nullable=False) with op.batch_alter_table('kronos_device') as batch_op: batch_op.alter_column('uid', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('type', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('softwareVersion', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('softwareName', existing_type=sa.TEXT(), nullable=False) batch_op.alter_column('name', existing_type=sa.TEXT(), nullable=False) connection.execute('pragma foreign_keys=ON')
class AdAccountDsaRecommendations(AbstractObject): def __init__(self, api=None): super(AdAccountDsaRecommendations, self).__init__() self._isAdAccountDsaRecommendations = True self._api = api class Field(AbstractObject.Field): recommendations = 'recommendations' _field_types = {'recommendations': 'list<string>'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
class IPUtils(): def proxy_required(cls, ip: str) -> bool: return False def should_nat(cls, ip: str, service: Optional['FcrServiceBase']=None) -> bool: return False async def translate_address(cls, ip: str, service: Optional['FcrServiceBase']=None) -> str: return ip def check_ip(cls, ip: str, service: Optional['FcrServiceBase']=None) -> bool: return True def is_mgmt_ip(cls, ip: DeviceIP) -> bool: return False def get_ip(cls, options: Dict[(str, Any)], devinfo: 'DeviceInfo', service: 'FcrServiceBase') -> List[IPInfo]: ip_list: List[IPInfo] = [] ip_address = options.get('ip_address') if ip_address: return [IPInfo(ip_address, cls.check_ip(ip_address, service))] use_mgmt_ip = options.get('mgmt_ip', False) if use_mgmt_ip: devinfo.inc_counter('device_info.mgmt_ip') ip_list = cls._get_ip_list(use_mgmt_ip=True, service=service, devinfo=devinfo) if (len(ip_list) == 0): raise LookupErrorException(f"User has set 'mgmt_ip=True' in the request but no mgmt ip is found for {devinfo.hostname}") return ip_list devinfo.inc_counter('device_info.default_ip') ip_list = cls._get_ip_list(use_mgmt_ip=use_mgmt_ip, service=service, devinfo=devinfo) if (len(ip_list) == 0): raise LookupErrorException(f'No Valid IP address was found for the device {devinfo.hostname}') return ip_list def _get_ip_list(cls, devinfo: 'DeviceInfo', service: 'FcrServiceBase', use_mgmt_ip: bool=False) -> List[IPInfo]: pingable_list: List[IPInfo] = [] non_pingable_list: List[IPInfo] = [] for ip in (devinfo._pref_ips + [devinfo._ip]): if (not ip.addr): continue if (use_mgmt_ip and (not cls.is_mgmt_ip(ip))): continue if cls.check_ip(ip, service): pingable_list.append(IPInfo(ip.addr, True)) elif (ip.addr == devinfo._ip.addr): non_pingable_list = ([IPInfo(ip.addr, False)] + non_pingable_list) else: non_pingable_list.append(IPInfo(ip.addr, False)) return (pingable_list + non_pingable_list)
def get_deployment_addresses(manifest: Dict, contract_name: str, genesis_hash: Optional[str]=None) -> List: if (genesis_hash is None): genesis_hash = web3.genesis_hash if ('meta_brownie' not in manifest): manifest = process_manifest(manifest) chain_uri = f'blockchain://{genesis_hash}' key = next((i for i in manifest['deployments'] if i.startswith(chain_uri)), None) if (key is None): return [] return [v['address'] for v in manifest['deployments'][key].values() if (manifest['contract_types'][v['contract_type']]['contract_name'] == contract_name)]
def test_digest_ignore(flyte_project): ignore = IgnoreGroup(flyte_project, [GitIgnore, DockerIgnore, StandardIgnore]) digest1 = compute_digest(flyte_project, ignore.is_ignored) change_file = ((flyte_project / 'data') / 'large.file') assert ignore.is_ignored(change_file) change_file.write_text("I don't matter") digest2 = compute_digest(flyte_project, ignore.is_ignored) assert (digest1 == digest2)
class CssSearch(CssStyle.Style): _attrs = {'width': 'auto', 'display': 'inline-block', 'border': 'none', 'background-repeat': 'no-repeat', 'padding': '5px 0px'} _focus = {'outline': 0} def customize(self): self.css({'border-bottom-color': self.page.theme.greys[3], 'color': self.page.theme.greys[(- 1)]}) self.hover.css({'color': self.page.theme.greys[(- 1)], 'border-bottom-color': self.page.theme.notch(1)})
def parse_gos(gos, target_go_ev, excluded_go_ev): selected_gos = set() for g in gos.strip().split(','): if (not g): continue (gocat, gid, gevidence) = list(map(str, g.strip().split('|'))) if ((not target_go_ev) or (gevidence in target_go_ev)): if ((not excluded_go_ev) or (gevidence not in excluded_go_ev)): selected_gos.add(gid) return selected_gos
class Core(object): def __init__(self, user_agent=None): (self.alive, self.isLogging) = (False, False) self.storageClass = storage.Storage(self) self.memberList = self.storageClass.memberList self.mpList = self.storageClass.mpList self.chatroomList = self.storageClass.chatroomList self.msgList = self.storageClass.msgList self.loginInfo = {} self.s = requests.Session() self.uuid = None self.functionDict = {'FriendChat': {}, 'GroupChat': {}, 'MpChat': {}} (self.useHotReload, self.hotReloadDir) = (False, 'itchat.pkl') self.receivingRetryCount = 5 if (user_agent is None): self.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36' else: self.user_agent = user_agent def login(self, enableCmdQR=False, picDir=None, qrCallback=None, loginCallback=None, exitCallback=None): raise NotImplementedError() def get_QRuuid(self): raise NotImplementedError() def get_QR(self, uuid=None, enableCmdQR=False, picDir=None, qrCallback=None): raise NotImplementedError() def check_login(self, uuid=None): raise NotImplementedError() def web_init(self): raise NotImplementedError() def show_mobile_login(self): raise NotImplementedError() def start_receiving(self, exitCallback=None, getReceivingFnOnly=False): raise NotImplementedError() def get_msg(self): raise NotImplementedError() def logout(self): raise NotImplementedError() def update_chatroom(self, userName, detailedMember=False): raise NotImplementedError() def update_friend(self, userName): raise NotImplementedError() def get_contact(self, update=False): raise NotImplementedError() def get_friends(self, update=False): raise NotImplementedError() def get_chatrooms(self, update=False, contactOnly=False): raise NotImplementedError() def get_mps(self, update=False): raise NotImplementedError() def set_alias(self, userName, alias): raise NotImplementedError() def set_pinned(self, userName, isPinned=True): raise NotImplementedError() def add_friend(self, userName, status=2, verifyContent='', autoUpdate=True): raise NotImplementedError() def get_head_img(self, userName=None, chatroomUserName=None, picDir=None): raise NotImplementedError() def create_chatroom(self, memberList, topic=''): raise NotImplementedError() def set_chatroom_name(self, chatroomUserName, name): raise NotImplementedError() def delete_member_from_chatroom(self, chatroomUserName, memberList): raise NotImplementedError() def add_member_into_chatroom(self, chatroomUserName, memberList, useInvitation=False): raise NotImplementedError() def send_raw_msg(self, msgType, content, toUserName): raise NotImplementedError() def send_msg(self, msg='Test Message', toUserName=None): raise NotImplementedError() def upload_file(self, fileDir, isPicture=False, isVideo=False, toUserName='filehelper', file_=None, preparedFile=None): raise NotImplementedError() def send_file(self, fileDir, toUserName=None, mediaId=None, file_=None): raise NotImplementedError() def send_image(self, fileDir=None, toUserName=None, mediaId=None, file_=None): raise NotImplementedError() def send_video(self, fileDir=None, toUserName=None, mediaId=None, file_=None): raise NotImplementedError() def send(self, msg, toUserName=None, mediaId=None): raise NotImplementedError() def revoke(self, msgId, toUserName, localId=None): raise NotImplementedError() def dump_login_status(self, fileDir=None): raise NotImplementedError() def load_login_status(self, fileDir, loginCallback=None, exitCallback=None): raise NotImplementedError() def auto_login(self, hotReload=False, statusStorageDir='itchat.pkl', enableCmdQR=False, picDir=None, qrCallback=None, loginCallback=None, exitCallback=None): raise NotImplementedError() def configured_reply(self): raise NotImplementedError() def msg_register(self, msgType, isFriendChat=False, isGroupChat=False, isMpChat=False): raise NotImplementedError() def run(self, debug=True, blockThread=True): raise NotImplementedError() def search_friends(self, name=None, userName=None, remarkName=None, nickName=None, wechatAccount=None): return self.storageClass.search_friends(name, userName, remarkName, nickName, wechatAccount) def search_chatrooms(self, name=None, userName=None): return self.storageClass.search_chatrooms(name, userName) def search_mps(self, name=None, userName=None): return self.storageClass.search_mps(name, userName)
class EditorFromView(Editor): def init(self, parent): self._ui = ui = self.init_ui(parent) if (ui.history is None): ui.history = self.ui.history self.control = ui.control def init_ui(self, parent): raise NotImplementedError def update_editor(self): pass def dispose(self): self._ui.dispose() super().dispose()
class Main(base.Module): parameters = {'ip': '192.168.1.1/24'} completions = list(parameters.keys()) def do_execute(self, line): arp = ARP(pdst=self.parameters['ip']) ether = Ether(dst='ff:ff:ff:ff:ff:ff') packet = (ether / arp) result = srp(packet, timeout=5)[0] self.cp.green(f"{'IP':<16} {'MAC':^15}") for (_, received) in result: self.cp.yellow(f'{received.psrc:<20} {received.hwsrc:^18}') def complete_set(self, text, line, begidx, endidx): mline = line.partition(' ')[2] offs = (len(mline) - len(text)) return [s[offs:] for s in self.completions if s.startswith(mline)]
class FetchMarkerInputThread(QThread): match_marker = pyqtSignal(str, str) def __init__(self, callback_tag, markers): QThread.__init__(self) self.callback_tag = callback_tag self.running_flag = True self.marker_quit_keys = (get_emacs_var('eaf-marker-quit-keys') or '') self.markers = markers def run(self): while self.running_flag: if self.markers: minibuffer_input = get_emacs_func_result('minibuffer-contents-no-properties', []) marker_input_quit = (minibuffer_input and (len(minibuffer_input) > 0) and (minibuffer_input[(- 1)] in self.marker_quit_keys)) marker_input_finish = (minibuffer_input in self.markers) if (marker_input_quit or marker_input_finish): self.running_flag = False eval_in_emacs('exit-minibuffer', []) message_to_emacs(('Quit marker selection.' if marker_input_quit else 'Marker selected.')) time.sleep(0.1) def stop(self): self.running_flag = False
def test_serialize_transformer_data(): data = {'x': TransformerData.empty()} bytes_data = srsly.msgpack_dumps(data) new_data = srsly.msgpack_loads(bytes_data) assert isinstance(new_data['x'], TransformerData) nlp = Language() nlp.add_pipe('transformer', config={'model': {'name': 'hf-internal-testing/tiny-random-DistilBertModel', 'transformer_config': {'output_attentions': True}}}) nlp.initialize() doc = nlp('This is a test.') b = doc.to_bytes() reloaded_doc = Doc(nlp.vocab) reloaded_doc.from_bytes(b) assert_docs_equal(doc, reloaded_doc) ops = get_current_ops() for key in doc._.trf_data.model_output: assert_array_equal(ops.to_numpy(ops.asarray(doc._.trf_data.model_output[key])), ops.to_numpy(ops.asarray(reloaded_doc._.trf_data.model_output[key])))
def class_separation_traces_agg(df, label, color_options): traces = [] df_name = df[(df['values'] == label)] traces.append(go.Box(lowerfence=df_name['mins'], q1=df_name['lowers'], q3=df_name['uppers'], median=df_name['means'], upperfence=df_name['maxs'], x=df_name['values'].astype(str), marker_color=color_options.get_current_data_color())) df_name = df[(df['values'] == 'others')] traces.append(go.Box(lowerfence=df_name['mins'], q1=df_name['lowers'], q3=df_name['uppers'], median=df_name['means'], upperfence=df_name['maxs'], x=df_name['values'], marker_color=color_options.get_reference_data_color())) return traces
class TestSpockspaceRepr(): def test_repr(self, monkeypatch, capsys): with monkeypatch.context() as m: m.setattr(sys, 'argv', ['', '--config', './tests/conf/yaml/test.yaml']) config = ConfigArgBuilder(*all_configs, desc='Test Builder') print(config.generate()) (out, _) = capsys.readouterr() assert (('NestedListStuff' in out) and ('TypeConfig' in out))
class BaseServing(ErsiliaBase): def __init__(self, model_id, config_json=None, preferred_port=None, url=None): ErsiliaBase.__init__(self, config_json=config_json) self.model_id = model_id self.bundle_tag = self._get_latest_bundle_tag(model_id=self.model_id) self.port = preferred_port def _get_info_from_bento(self): tmp_folder = tempfile.mkdtemp(prefix='ersilia-') tmp_file = os.path.join(tmp_folder, 'info.json') cmd = 'bentoml info --quiet {0}:{1} > {2}'.format(self.model_id, self.bundle_tag, tmp_file) self.logger.debug('Getting info from BentoML and storing in {0}'.format(tmp_file)) run_command(cmd) with open(tmp_file, 'r') as f: info = json.load(f) self.logger.debug('Info {0}'.format(info)) return info def _get_apis_from_bento(self): self.logger.debug('Getting APIs from Bento') info = self._get_info_from_bento() apis_list = [] for item in info['apis']: apis_list += [item['name']] return apis_list def _get_apis_from_apis_list(self): self.logger.debug('Getting APIs from list file') file_name = os.path.join(self._get_bundle_location(self.model_id), APIS_LIST_FILE) if (not os.path.exists(file_name)): return None with open(file_name, 'r') as f: apis_list = [] for l in f: apis_list += [l.rstrip()] print(apis_list) if (len(apis_list) > 0): return apis_list else: return None def _get_apis_from_where_available(self): apis_list = self._get_apis_from_apis_list() if (apis_list is None): apis_list = self._get_apis_from_bento() if (apis_list is None): apis_list = [] for api in apis_list: (yield api) def _api_with_url(self, api_name, input): if (self.url is None): return self.logger.debug('Using URL: {0}'.format(self.url)) response = requests.post('{0}/{1}'.format(self.url, api_name), json=input) return response.json()
def lights(color, indexes): off = True while True: for idx in indexes: tree[idx] = (colored_dot(color) if off else '') mutex.acquire() os.system(('cls' if (os.name == 'nt') else 'clear')) print(''.join(tree)) mutex.release() off = (not off) time.sleep(random.uniform(0.5, 1.5))
def merge(sources=None, paths=None, reader_class=None, **kwargs): assert sources options = infer_open_mfdataset_kwargs(sources=sources, paths=paths, reader_class=reader_class, user_kwargs=kwargs) if ((reader_class is not None) and hasattr(reader_class, 'to_xarray_multi_from_sources')): return reader_class.to_xarray_multi_from_sources(sources, **options) if (paths is not None): if ((reader_class is not None) and hasattr(reader_class, 'to_xarray_multi_from_paths')): return reader_class.to_xarray_multi_from_paths(paths, **options) LOG.debug(f'xr.open_mfdataset with options={options}') return xr.open_mfdataset(paths, **options) LOG.debug(f'xr.open_mfdataset with options= {options}') return xr.open_mfdataset([WrappedSource(s) for s in sources], engine=CMLEngine, **options)
class OptionSeriesXrangeData(Options): def accessibility(self) -> 'OptionSeriesXrangeDataAccessibility': return self._config_sub_data('accessibility', OptionSeriesXrangeDataAccessibility) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def colorIndex(self): return self._config_get(None) def colorIndex(self, num: float): self._config(num, js_type=False) def custom(self): return self._config_get(None) def custom(self, value: Any): self._config(value, js_type=False) def dataLabels(self) -> 'OptionSeriesXrangeDataDatalabels': return self._config_sub_data('dataLabels', OptionSeriesXrangeDataDatalabels) def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def dragDrop(self) -> 'OptionSeriesXrangeDataDragdrop': return self._config_sub_data('dragDrop', OptionSeriesXrangeDataDragdrop) def drilldown(self): return self._config_get(None) def drilldown(self, text: str): self._config(text, js_type=False) def events(self) -> 'OptionSeriesXrangeDataEvents': return self._config_sub_data('events', OptionSeriesXrangeDataEvents) def id(self): return self._config_get(None) def id(self, text: str): self._config(text, js_type=False) def labelrank(self): return self._config_get(None) def labelrank(self, num: float): self._config(num, js_type=False) def marker(self) -> 'OptionSeriesXrangeDataMarker': return self._config_sub_data('marker', OptionSeriesXrangeDataMarker) def name(self): return self._config_get(None) def name(self, text: str): self._config(text, js_type=False) def partialFill(self) -> 'OptionSeriesXrangeDataPartialfill': return self._config_sub_data('partialFill', OptionSeriesXrangeDataPartialfill) def selected(self): return self._config_get(False) def selected(self, flag: bool): self._config(flag, js_type=False) def x(self): return self._config_get(None) def x(self, num: float): self._config(num, js_type=False) def x2(self): return self._config_get(None) .setter def x2(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(None) def y(self, num: float): self._config(num, js_type=False)
class Scheduler(): groups: List[TaskGroup] def __post_init__(self) -> None: index_provider = DynamicIndexProvider() for group in self.groups: for task in group.iter_tasks(): task.set_run_index(index_provider) def filter_groups(self, status: Status) -> List[TaskGroup]: return [group for group in self.groups if (group.status is status)] def pending_groups(self) -> List[TaskGroup]: return self.filter_groups(Status.PENDING) def __bool__(self) -> bool: return bool(self.pending_groups) def _calculate_score(self, target_group: TaskGroup) -> tuple[(int, int)]: direct_dependants = 0 indirect_dependants = 0 for group in self.pending_groups: if (group is target_group): continue if any(((dependency is target_group) for dependency in group.dependencies)): indirect_dependants += 1 if (len(group.dependencies) == 1): direct_dependants += 1 return (direct_dependants, indirect_dependants) def _stage_group(self, target_group: TaskGroup) -> None: target_group.status = Status.RUNNING def finish(self, target_group: TaskGroup, status: int) -> None: if (status == SUCCESS): self._succeed(target_group) else: self._fail(target_group) def _fail(self, target_group: TaskGroup) -> None: target_group.status = Status.FAILURE for group in self.pending_groups: if (target_group in group.dependencies): group.status = Status.SKIPPED def _succeed(self, target_group: TaskGroup) -> None: target_group.status = Status.SUCCESS for group in self.pending_groups.copy(): if (target_group in group.dependencies): group.dependencies.remove(target_group) def iter_available_groups(self) -> Iterator[TaskGroup]: unblocked_groups = [group for group in self.pending_groups if (not group.dependencies)] unblocked_groups.sort(key=self._calculate_score, reverse=True) for group in unblocked_groups: self._stage_group(group) (yield group)
class OptionPlotoptionsItemDatalabels(Options): def alignTo(self): return self._config_get(None) def alignTo(self, text: str): self._config(text, js_type=False) def animation(self) -> 'OptionPlotoptionsItemDatalabelsAnimation': return self._config_sub_data('animation', OptionPlotoptionsItemDatalabelsAnimation) def backgroundColor(self): return self._config_get(None) def backgroundColor(self, text: str): self._config(text, js_type=False) def borderColor(self): return self._config_get(None) def borderColor(self, text: str): self._config(text, js_type=False) def borderRadius(self): return self._config_get(0) def borderRadius(self, num: float): self._config(num, js_type=False) def borderWidth(self): return self._config_get(0) def borderWidth(self, num: float): self._config(num, js_type=False) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def connectorColor(self): return self._config_get(None) def connectorColor(self, text: str): self._config(text, js_type=False) def connectorPadding(self): return self._config_get(5) def connectorPadding(self, num: float): self._config(num, js_type=False) def connectorShape(self): return self._config_get('crookedLine') def connectorShape(self, text: str): self._config(text, js_type=False) def connectorWidth(self): return self._config_get(1) def connectorWidth(self, num: float): self._config(num, js_type=False) def crookDistance(self): return self._config_get('undefined') def crookDistance(self, text: str): self._config(text, js_type=False) def crop(self): return self._config_get(True) def crop(self, flag: bool): self._config(flag, js_type=False) def defer(self): return self._config_get(True) def defer(self, flag: bool): self._config(flag, js_type=False) def distance(self): return self._config_get(30) def distance(self, num: float): self._config(num, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def filter(self) -> 'OptionPlotoptionsItemDatalabelsFilter': return self._config_sub_data('filter', OptionPlotoptionsItemDatalabelsFilter) def format(self): return self._config_get('undefined') def format(self, text: str): self._config(text, js_type=False) def formatter(self): return self._config_get('function () { return this.point.isNull ? void 0 : this.point.name; }') def formatter(self, text: str): self._config(text, js_type=True) def nullFormat(self): return self._config_get(None) def nullFormat(self, flag: bool): self._config(flag, js_type=False) def nullFormatter(self): return self._config_get(None) def nullFormatter(self, value: Any): self._config(value, js_type=False) def overflow(self): return self._config_get('justify') def overflow(self, text: str): self._config(text, js_type=False) def padding(self): return self._config_get(5) def padding(self, num: float): self._config(num, js_type=False) def position(self): return self._config_get('center') def position(self, text: str): self._config(text, js_type=False) def rotation(self): return self._config_get(0) def rotation(self, num: float): self._config(num, js_type=False) def shadow(self): return self._config_get(False) def shadow(self, flag: bool): self._config(flag, js_type=False) def shape(self): return self._config_get('square') def shape(self, text: str): self._config(text, js_type=False) def softConnector(self): return self._config_get(True) def softConnector(self, flag: bool): self._config(flag, js_type=False) def style(self): return self._config_get(None) def style(self, value: Any): self._config(value, js_type=False) def textPath(self) -> 'OptionPlotoptionsItemDatalabelsTextpath': return self._config_sub_data('textPath', OptionPlotoptionsItemDatalabelsTextpath) def useHTML(self): return self._config_get(False) def useHTML(self, flag: bool): self._config(flag, js_type=False) def verticalAlign(self): return self._config_get('bottom') def verticalAlign(self, text: str): self._config(text, js_type=False) def x(self): return self._config_get(0) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(0) def y(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(6) def zIndex(self, num: float): self._config(num, js_type=False)
class Threading(ProcessBase): def test_orig_thread(self): new_mod = "import eventlet\neventlet.monkey_patch()\nfrom eventlet import patcher\nimport threading\n_threading = patcher.original('threading')\ndef test():\n print(repr(threading.currentThread()))\nt = _threading.Thread(target=test)\nt.start()\nt.join()\nprint(len(threading._active))\nprint(len(_threading._active))\n" self.write_to_tempfile('newmod', new_mod) (output, lines) = self.launch_subprocess('newmod.py') self.assertEqual(len(lines), 4, '\n'.join(lines)) assert lines[0].startswith('<Thread'), lines[0] assert (lines[1] == '1'), lines assert (lines[2] == '1'), lines def test_tpool(self): new_mod = 'import eventlet\neventlet.monkey_patch()\nfrom eventlet import tpool\nimport threading\ndef test():\n print(repr(threading.currentThread()))\ntpool.execute(test)\nprint(len(threading._active))\n' self.write_to_tempfile('newmod', new_mod) (output, lines) = self.launch_subprocess('newmod.py') self.assertEqual(len(lines), 3, '\n'.join(lines)) assert lines[0].startswith('<Thread'), lines[0] self.assertEqual(lines[1], '1', lines[1]) def test_greenlet(self): new_mod = 'import eventlet\neventlet.monkey_patch()\nfrom eventlet import event\nimport threading\nevt = event.Event()\ndef test():\n print(repr(threading.currentThread()))\n evt.send()\neventlet.spawn_n(test)\nevt.wait()\nprint(len(threading._active))\n' self.write_to_tempfile('newmod', new_mod) (output, lines) = self.launch_subprocess('newmod.py') self.assertEqual(len(lines), 3, '\n'.join(lines)) assert lines[0].startswith('<_MainThread'), lines[0] self.assertEqual(lines[1], '1', lines[1]) def test_greenthread(self): new_mod = 'import eventlet\neventlet.monkey_patch()\nimport threading\ndef test():\n print(repr(threading.currentThread()))\nt = eventlet.spawn(test)\nt.wait()\nprint(len(threading._active))\n' self.write_to_tempfile('newmod', new_mod) (output, lines) = self.launch_subprocess('newmod.py') self.assertEqual(len(lines), 3, '\n'.join(lines)) assert lines[0].startswith('<_GreenThread'), lines[0] self.assertEqual(lines[1], '1', lines[1]) def test_keyerror(self): new_mod = 'import eventlet\neventlet.monkey_patch()\n' self.write_to_tempfile('newmod', new_mod) (output, lines) = self.launch_subprocess('newmod.py') self.assertEqual(len(lines), 1, '\n'.join(lines))
class OptionPlotoptionsSolidgaugeSonificationTracksMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def import_to_node_single(imp: SortableImport, module: cst.Module) -> cst.BaseStatement: leading_lines = [(cst.EmptyLine(indent=True, comment=cst.Comment(line)) if line.startswith('#') else cst.EmptyLine(indent=False)) for line in imp.comments.before] trailing_whitespace = cst.TrailingWhitespace() trailing_comments = list(imp.comments.first_inline) names: List[cst.ImportAlias] = [] for item in imp.items: name = name_to_node(item.name) asname = (cst.AsName(name=cst.Name(item.asname)) if item.asname else None) node = cst.ImportAlias(name=name, asname=asname) names.append(node) trailing_comments += item.comments.before trailing_comments += item.comments.inline trailing_comments += item.comments.following trailing_comments += imp.comments.inline trailing_comments += imp.comments.final trailing_comments += imp.comments.last_inline if trailing_comments: text = COMMENT_INDENT.join(trailing_comments) trailing_whitespace = cst.TrailingWhitespace(whitespace=cst.SimpleWhitespace(COMMENT_INDENT), comment=cst.Comment(text)) if imp.stem: (stem, ndots) = split_relative(imp.stem) if (not stem): module_name = None else: module_name = name_to_node(stem) relative = ((cst.Dot(),) * ndots) line = cst.SimpleStatementLine(body=[cst.ImportFrom(module=module_name, names=names, relative=relative)], leading_lines=leading_lines, trailing_whitespace=trailing_whitespace) else: line = cst.SimpleStatementLine(body=[cst.Import(names=names)], leading_lines=leading_lines, trailing_whitespace=trailing_whitespace) return line
class OptionPlotoptionsGaugeSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class SpendingMixin(): required_filters = ['def_codes', 'query'] def query(self): return self.filters.get('query') _property def spending_type(self): model = [{'key': 'spending_type', 'name': 'spending_type', 'type': 'enum', 'enum_values': ['total', 'award'], 'allow_nulls': False, 'optional': False}] return TinyShield(model).block(self.request.data)['spending_type']
def build_data_file_name(source, download_job, piid, assistance_id): if (download_job and download_job.monthly_download): return strip_file_extension(download_job.file_name) file_name_pattern = VALUE_MAPPINGS[source.source_type]['download_name'] timestamp = datetime.strftime(datetime.now(timezone.utc), '%Y-%m-%d_H%HM%MS%S') if (source.is_for_idv or source.is_for_contract): file_name_values = {'piid': slugify_text_for_file_names(piid, 'UNKNOWN', 50)} elif source.is_for_assistance: file_name_values = {'assistance_id': slugify_text_for_file_names(assistance_id, 'UNKNOWN', 50)} elif (source.source_type == 'disaster_recipient'): file_name_values = {'award_category': source.award_category, 'timestamp': timestamp} else: d_map = {'d1': 'Contracts', 'd2': 'Assistance', 'treasury_account': 'TAS', 'federal_account': 'FA'} if (source.agency_code == 'all'): agency = 'All' else: agency = str(source.agency_code) request = json.loads(download_job.json_request) filters = request['filters'] if (request.get('limit') or ((request.get('request_type') == 'disaster') and (source.source_type in ('elasticsearch_awards', 'sub_awards')))): agency = '' elif (source.file_type not in ('treasury_account', 'federal_account')): agency = f'{agency}_' if (request.get('request_type') == 'disaster'): account_filters = request['account_filters'] current_fiscal_period = f"FY{account_filters['latest_fiscal_year']}P{str(account_filters['latest_fiscal_period']).zfill(2)}" data_quarters = f'{current_fiscal_period}-Present' else: data_quarters = construct_data_date_range(filters) file_name_values = {'agency': agency, 'data_quarters': data_quarters, 'level': d_map[source.file_type], 'timestamp': timestamp, 'type': d_map[source.file_type], 'extra_file_type': source.extra_file_type} return file_name_pattern.format(**file_name_values)
def time_func(func, iterations=100, name=None): for i in range(5): func() start = time.perf_counter() for i in range(iterations): func() time_taken = (((time.perf_counter() - start) * 1000.0) / iterations) name = ('function' if (name is None) else name) print('"{}" took {:.3f} (ms)'.format(name, time_taken))
class TestExtractDistribution(object): .parametrize(('var', 'expected'), [(['pkg'], ['pkg']), (None, []), ('pkg >= 2.5\npkg2', ['pkg >= 2.5', 'pkg2']), ('pkg', ['pkg']), (('pkg',), ['pkg']), ((p for p in ('pkg',)), ['pkg'])]) def test_list(self, var, expected): assert (to_list(var) == expected) .parametrize('metadata', [{'foo': (lambda : None)}, {'foo': ['bar', (lambda : None)]}]) def test_serializing_metadata_to_stdout_success(self, metadata, capsys): flexmock(extract_dist).should_receive('__init__').and_return(None) command = extract_dist() command.metadata = metadata command.stdout = True command.run() (out, err) = capsys.readouterr() assert (not err)
class HasMentions(): def notify_mentions(self): mentions_field = getattr(self, 'mentions_field', None) if (not mentions_field): return mentions = extract_mentions(self.get(mentions_field)) for mention in mentions: values = frappe._dict(from_user=self.owner, to_user=mention.email) if (self.doctype == 'GP Discussion'): values.discussion = self.name if (self.doctype == 'GP Task'): values.task = self.name values.project = self.project elif (self.doctype == 'GP Comment'): values.comment = self.name if (self.reference_doctype == 'GP Discussion'): values.discussion = self.reference_name elif (self.reference_doctype == 'GP Task'): values.task = self.reference_name values.project = frappe.db.get_value('GP Task', self.reference_name, 'project') if frappe.db.exists('GP Notification', values): continue notification = frappe.get_doc(doctype='GP Notification') if ('GP Task' in [self.doctype, self.get('reference_doctype')]): notification.message = f'{get_fullname(self.owner)} mentioned you in a task' elif ('GP Discussion' in [self.doctype, self.get('reference_doctype')]): notification.message = f'{get_fullname(self.owner)} mentioned you in a post' notification.update(values) notification.insert(ignore_permissions=True)
class MergeSmallDimsTest(unittest.TestCase): def _test_merge_small_dims(self, dims, merged_dims, threshold) -> None: self.assertEqual(merge_small_dims(dims, threshold), merged_dims) def test_merge_all_small_dims(self) -> None: dims = [1, 2, 5, 1] merged_dims = [10] threshold = 10 self._test_merge_small_dims(dims, merged_dims, threshold) def test_merge_some_small_dims(self) -> None: dims = [1, 2, 5, 1] merged_dims = [2, 5] threshold = 1 self._test_merge_small_dims(dims, merged_dims, threshold) def test_merge_small_dims_for_single_dim(self) -> None: dims = torch.tensor([2]) merged_dims = [2] threshold = 10 self._test_merge_small_dims(dims, merged_dims, threshold)
def production_processor_live(json_tot, json_ren): gen_total = json_tot['data'][0]['values'] if (json_ren['data'][1]['key'] == 'ENERGIA SOLAR'): rawgen_sol = json_ren['data'][1]['values'] else: raise ParserException('CL.py', f"Unexpected data label. Expected 'ENERGIA SOLAR' and got {json_ren['data'][1]['key']}", 'CL') if (json_ren['data'][0]['key'] == 'ENERGIA EOLICA'): rawgen_wind = json_ren['data'][0]['values'] else: raise ParserException('CL.py', f"Unexpected data label. Expected 'ENERGIA EOLICA' and got {json_ren['data'][0]['key']}", 'CL') mapped_totals = [] for total in gen_total: datapoint = {} dt = total[0] for pair in rawgen_sol: if (pair[0] == dt): solar = pair[1] break for pair in rawgen_wind: if (pair[0] == dt): wind = pair[1] break datapoint['datetime'] = arrow.get((dt / 1000), tzinfo='Chile/Continental').datetime datapoint['unknown'] = ((total[1] - wind) - solar) datapoint['wind'] = wind datapoint['solar'] = solar mapped_totals.append(datapoint) return mapped_totals
def check_verkle_proof(trie, keys, values, proof, display_times=True): start_logging_time_if_eligible(' Starting proof check', display_times) (depths, commitments_sorted_by_index_serialized, D_serialized, y, sigma_serialized) = proof commitments_sorted_by_index = ([blst.P1(trie)] + [blst.P1(x) for x in commitments_sorted_by_index_serialized]) all_indices = set() all_indices_and_subindices = set() leaf_values_by_index_and_subindex = {} for (key, value, depth) in zip(keys, values, depths): verkle_indices = get_verkle_indices(key) for i in range(depth): all_indices.add(verkle_indices[:i]) all_indices_and_subindices.add((verkle_indices[:i], verkle_indices[i])) leaf_values_by_index_and_subindex[(verkle_indices[:(depth - 1)], verkle_indices[(depth - 1)])] = hash([key, value]) all_indices = sorted(all_indices) all_indices_and_subindices = sorted(all_indices_and_subindices) log_time_if_eligible(' Computed indices', 30, display_times) commitments_by_index = {index: commitment for (index, commitment) in zip(all_indices, commitments_sorted_by_index)} commitments_by_index_and_subindex = {index_and_subindex: commitments_by_index[index_and_subindex[0]] for index_and_subindex in all_indices_and_subindices} subhashes_by_index_and_subindex = {} for index_and_subindex in all_indices_and_subindices: full_subindex = (index_and_subindex[0] + (index_and_subindex[1],)) if (full_subindex in commitments_by_index): subhashes_by_index_and_subindex[index_and_subindex] = hash(commitments_by_index[full_subindex]) else: subhashes_by_index_and_subindex[index_and_subindex] = leaf_values_by_index_and_subindex[index_and_subindex] Cs = list(map((lambda x: x[1]), sorted(commitments_by_index_and_subindex.items()))) indices = list(map((lambda x: x[1]), sorted(all_indices_and_subindices))) ys = list(map((lambda x: int.from_bytes(x[1], 'little')), sorted(subhashes_by_index_and_subindex.items()))) log_time_if_eligible(' Recreated commitment lists', 30, display_times) return check_kzg_multiproof(Cs, indices, ys, [D_serialized, y, sigma_serialized], display_times)
(scope='session') def django_db_modify_db_settings_xdist_suffix(request): skip_if_no_django() if (is_xdist_worker(request) and (not is_test_db_setup_trigger(request))): worker_id = get_xdist_worker_id(request) suffix = transform_xdist_worker_id_to_django_test_db_id(worker_id) _set_suffix_to_test_databases(suffix=suffix)
def test_envelope_serialization(): envelope = Envelope(to='to', sender='sender', protocol_specification_id=PublicId('author', 'name', '0.1.0'), message=b'') with tempfile.TemporaryDirectory() as temp_dir: output_file = Path(os.path.join(temp_dir, 'output_file')) with output_file.open(mode='wb') as fout: write_envelope(envelope, fout) actual_envelope = envelope_from_bytes(output_file.read_bytes()) assert (envelope == actual_envelope)
def test_dump_load_with_password(): with tempfile.TemporaryDirectory() as dirname: encrypted_file_name = Path(dirname, 'eth_key_encrypted') password = 'somePwd' ec = EthereumCrypto() ec.dump(encrypted_file_name, password) assert encrypted_file_name.exists() with pytest.raises(DecryptError, match='Decrypt error! Bad password?'): ec2 = EthereumCrypto.load_private_key_from_path(encrypted_file_name, 'wrongPassw') ec2 = EthereumCrypto(encrypted_file_name, password) assert (ec2.private_key == ec.private_key)
class OptionSeriesHeatmapSonificationContexttracksMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OpUtil(object): def package_install(cls, user_volume_path) -> None: OpUtil.log_operation_info('Installing packages') t0 = time.time() requirements_file = cls.determine_elyra_requirements() elyra_packages = cls.package_list_to_dict(requirements_file) current_packages = cls.package_list_to_dict('requirements-current.txt') to_install_list = [] for (package, ver) in elyra_packages.items(): if (package in current_packages): if (current_packages[package] is None): logger.warning(f"WARNING: Source package '{package}' found already installed as an editable package. This may conflict with the required version: {ver} . Skipping...") continue try: version.Version(current_packages[package]) except version.InvalidVersion: logger.warning(f"WARNING: Source package '{package}' found already installed from {current_packages[package]}. This may conflict with the required version: {ver} . Skipping...") continue if (version.Version(ver) > version.Version(current_packages[package])): logger.info(f'Updating {package} package from version {current_packages[package]} to {ver}...') to_install_list.append(f'{package}=={ver}') elif (version.Version(ver) < version.Version(current_packages[package])): logger.info(f'Newer {package} package with version {current_packages[package]} already installed. Skipping...') else: logger.info(f'Package not found. Installing {package} package with version {ver}...') to_install_list.append(f'{package}=={ver}') if to_install_list: if user_volume_path: to_install_list.insert(0, f'--target={user_volume_path}') to_install_list.append('--no-cache-dir') subprocess.run(([sys.executable, '-m', 'pip', 'install'] + to_install_list), check=True) if user_volume_path: os.environ['PIP_CONFIG_FILE'] = f'{user_volume_path}/pip.conf' subprocess.run([sys.executable, '-m', 'pip', 'freeze']) duration = (time.time() - t0) OpUtil.log_operation_info('Packages installed', duration) def determine_elyra_requirements(cls) -> Any: if (sys.version_info.major == 3): if (sys.version_info.minor in [8, 9, 10, 11]): return 'requirements-elyra.txt' logger.error(f"This version of Python '{sys.version_info.major}.{sys.version_info.minor}' is not supported for Elyra generic components") return None def package_list_to_dict(cls, filename: str) -> dict: package_dict = {} with open(filename) as fh: for line in fh: if (line[0] != '#'): if (' ' in line): (package_name, package_version) = line.strip('\n').split(sep=' ') elif ('===' in line): (package_name, package_version) = line.strip('\n').split(sep='===') elif ('==' in line): (package_name, package_version) = line.strip('\n').split(sep='==') elif (line.startswith('-e ') or line.startswith('--editable ')): package_name = line.strip('\n').replace('-e ', '').replace('--editable ', '') if ('#egg=' in package_name): package_name = package_name.split('=')[(- 1)] elif ('/' in package_name): package_name = os.path.basename(package_name) package_version = None else: continue package_dict[package_name] = package_version return package_dict def parse_arguments(cls, args) -> dict: import argparse global pipeline_name, operation_name logger.debug('Parsing Arguments.....') parser = argparse.ArgumentParser() parser.add_argument('-e', '--cos-endpoint', dest='cos-endpoint', help='Cloud object storage endpoint', required=True) parser.add_argument('-b', '--cos-bucket', dest='cos-bucket', help='Cloud object storage bucket to use', required=True) parser.add_argument('-d', '--cos-directory', dest='cos-directory', help='Working directory in cloud object storage bucket to use', required=True) parser.add_argument('-t', '--cos-dependencies-archive', dest='cos-dependencies-archive', help='Archive containing notebook and dependency artifacts', required=True) parser.add_argument('-f', '--file', dest='filepath', help='File to execute', required=True) parser.add_argument('-o', '--outputs', dest='outputs', help='Files to output to object store', required=False) parser.add_argument('-i', '--inputs', dest='inputs', help='Files to pull in from parent node', required=False) parser.add_argument('-p', '--user-volume-path', dest='user-volume-path', help='Directory in Volume to install python libraries into', required=False) parser.add_argument('-n', '--pipeline-name', dest='pipeline-name', help='Pipeline name', required=True) parser.add_argument('-r', '--pipeline-parameters', dest='pipeline_parameters', help='Pipeline parameters that apply to this node', required=False) parser.add_argument('-m', '--parameter-pass-method', dest='parameter_pass_method', choices=['env'], help='The method by which pipeline parameters should be applied to this node.', required=False) parsed_args = vars(parser.parse_args(args)) pipeline_name = parsed_args.get('pipeline-name') operation_name = os.path.basename(os.path.splitext(parsed_args.get('filepath'))[0]) return parsed_args def log_operation_info(cls, action_clause: str, duration_secs: Optional[float]=None) -> None: global pipeline_name, operation_name if enable_pipeline_info: duration_clause = (f'({duration_secs:.3f} secs)' if duration_secs else '') logger.info(f"'{pipeline_name}':'{operation_name}' - {action_clause} {duration_clause}")
class OptionPlotoptionsFunnelSonificationDefaultinstrumentoptionsMappingPitch(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get('y') def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get('c6') def max(self, text: str): self._config(text, js_type=False) def min(self): return self._config_get('c2') def min(self, text: str): self._config(text, js_type=False) def scale(self): return self._config_get(None) def scale(self, value: Any): self._config(value, js_type=False) def within(self): return self._config_get('yAxis') def within(self, text: str): self._config(text, js_type=False)
class OptionSeriesWindbarbDatalabelsTextpath(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def enabled(self): return self._config_get(False) def enabled(self, flag: bool): self._config(flag, js_type=False)
def show_update_warning(): app = QApplication([]) dialog = NotifyApp.NotifyDialog(Util.is_sdapp_halted()) result = dialog.run() if (result == NotifyApp.NotifyStatus.CHECK_UPDATES): log.info('Launching Preflight Updater') updater = UpdaterApp.UpdaterApp() updater.show() sys.exit(app.exec_()) elif (result == NotifyApp.NotifyStatus.DEFER_UPDATES): log.info('User has deferred update check. sdw-notify will run again at the next scheduled interval.') sys.exit(0) else: log.info('Unexpected result from NotifyDialog. sdw-notify will run again at the next scheduled interval.') sys.exit(result)
_PathAttribute.register_type(BGP_ATTR_TYPE_NEXT_HOP) class BGPPathAttributeNextHop(_PathAttribute): _VALUE_PACK_STR = '!4s' _ATTR_FLAGS = BGP_ATTR_FLAG_TRANSITIVE _TYPE = {'ascii': ['value']} def parse_value(cls, buf): (ip_addr,) = struct.unpack_from(cls._VALUE_PACK_STR, six.binary_type(buf)) return {'value': addrconv.ipv4.bin_to_text(ip_addr)} def serialize_value(self): buf = bytearray() msg_pack_into(self._VALUE_PACK_STR, buf, 0, addrconv.ipv4.text_to_bin(self.value)) return buf
class OptionPlotoptionsBellcurveSonificationDefaultinstrumentoptionsPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def pql_table_substract(t1: T.table, t2: T.table): if (get_db().target is sql.mysql): raise Signal.make(T.NotImplementedError, t1, "MySQL doesn't support EXCEPT (yeah, really!)") if (get_db().target == sql.bigquery): op = 'EXCEPT DISTINCT' else: op = 'EXCEPT' return sql_bin_op(op, t1, t2, 'subtract')
class OptionSeriesPackedbubbleStatesHoverHalo(Options): def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def opacity(self): return self._config_get(0.25) def opacity(self, num: float): self._config(num, js_type=False) def size(self): return self._config_get(5) def size(self, num: float): self._config(num, js_type=False)
class HDF5Reader(): def __init__(self, path: FILELIKE_T, log_types: Dict[(str, Type[Message])]) -> None: self.path = path self.log_types = log_types self._logs: Optional[Dict[(str, List[Message])]] = None def logs(self) -> Optional[Dict[(str, List[Message])]]: if (self._logs is None): self._parse() return self._logs def _parse(self) -> None: self._logs = {} with h5py.File(self.path, 'r') as f: for (key, type_) in self.log_types.items(): if (key not in f): LOGGER.warning(f'{key} not found in h5 file, skipping.') continue messages = [] for raw in f[key]: kwargs = {} raw_values = tuple(raw) for (index, field) in enumerate(type_.__message_fields__.values()): if isinstance(field.data_type, SERIALIZABLE_DYNAMIC_TYPES): value = field.data_type.postprocess(bytes(raw_values[index])) else: value = get_deserialized_value(raw_values[index], field.data_type) kwargs[field.name] = value messages.append(type_(**kwargs)) self._logs[key] = messages
def set_owner_defaults(apps, schema_editor): Activity = apps.get_model('manager', 'Activity') EventUser = apps.get_model('manager', 'EventUser') User = apps.get_model(settings.AUTH_USER_MODEL) qs = Activity.objects.filter(owner=None) for activity in qs: email = activity.speaker_contact username = '{}-{}'.format(email.split('')[0], get_random_string(4)) password = make_password(None) defaults = {'username': username, 'password': password} try: (user, created) = User.objects.get_or_create(email=email, defaults=defaults) if created: print('new user created: {}, (activity: {})'.format(user.email, activity.title)) except MultipleObjectsReturned: print('Multiple EventUser returned: {}, (activity: {})'.format(user, activity.title)) user = User.objects.filter(email=email).first() try: (event_user, created) = EventUser.objects.get_or_create(user=user, defaults={'event': activity.event}) if created: print('new EventUser created: {}, (activity: {})'.format(event_user, activity.title)) except MultipleObjectsReturned: print('Multiple EventUser returned: {}, (activity: {})'.format(user, activity.title)) event_user = EventUser.objects.filter(user=user).first() activity.owner = event_user activity.save()
class VersionUid(str, StorageKeyMixIn['VersionUid']): def __new__(cls, uid: str): if (not isinstance(uid, str)): raise InternalError(f'Unexpected type {type(uid)} in constructor.') if (not InputValidation.is_version_uid(uid)): raise InputDataError('Version name {} is invalid.'.format(uid)) return str.__new__(cls, uid) _STORAGE_PREFIX = 'versions/' def storage_prefix(cls) -> str: return cls._STORAGE_PREFIX def _storage_object_to_key(self) -> str: return str(self) def _storage_key_to_object(cls, key: str) -> 'VersionUid': return VersionUid(key)
class CmdNewPassword(COMMAND_DEFAULT_CLASS): key = 'userpassword' locks = 'cmd:perm(newpassword) or perm(Admin)' help_category = 'Admin' def func(self): caller = self.caller if (not self.rhs): self.msg('Usage: userpassword <user obj> = <new password>') return account = caller.search_account(self.lhs) if (not account): return newpass = self.rhs (validated, error) = account.validate_password(newpass) if (not validated): errors = [e for suberror in error.messages for e in error.messages] string = '\n'.join(errors) caller.msg(string) return account.set_password(newpass) account.save() self.msg(f"{account.name} - new password set to '{newpass}'.") if (account.character != caller): account.msg(f"{caller.name} has changed your password to '{newpass}'.") logger.log_sec(f'Password Changed: {account} (Caller: {caller}, IP: {self.session.address}).')
def test_branch(converter): w = converter._world x = w.variable('x', 32) one = w.constant(1, 32) branch1 = converter.convert(Branch(Condition(OperationType.equal, [var_x.copy(), const_1.copy()]))) branch2 = converter.convert(Branch(Condition(OperationType.not_equal, [var_x.copy(), const_1.copy()]))) branch3 = converter.convert(Branch(Condition(OperationType.less, [var_x.copy(), const_1.copy()]))) branch4 = converter.convert(Branch(Condition(OperationType.less_or_equal, [var_x.copy(), const_1.copy()]))) branch5 = converter.convert(Branch(Condition(OperationType.greater, [var_x.copy(), const_1.copy()]))) branch6 = converter.convert(Branch(Condition(OperationType.greater_or_equal, [var_x.copy(), const_1.copy()]))) assert (branch1 == w.bool_equal(x, one)) assert (branch2 == w.bool_unequal(x, one)) assert (branch3 == w.signed_lt(x, one)) assert (branch4 == w.signed_le(x, one)) assert (branch5 == w.signed_gt(x, one)) assert (branch6 == w.signed_ge(x, one))
_dict def normalize_vmtest_fixture(fixture: Dict[(str, Any)]) -> Iterable[Tuple[(str, Any)]]: (yield ('env', normalize_environment(fixture['env']))) (yield ('exec', normalize_exec(fixture['exec']))) (yield ('pre', normalize_account_state(fixture['pre']))) if ('post' in fixture): (yield ('post', normalize_account_state(fixture['post']))) if ('callcreates' in fixture): (yield ('callcreates', normalize_callcreates(fixture['callcreates']))) if ('gas' in fixture): (yield ('gas', to_int(fixture['gas']))) if ('out' in fixture): (yield ('out', decode_hex(fixture['out']))) if ('logs' in fixture): (yield ('logs', decode_hex(fixture['logs'])))
class ASMLinesmoothPC(ASMPatchPC): _prefix = 'pc_linesmooth_' def get_patches(self, V): mesh = V._mesh assert mesh.cell_set._extruded dm = mesh.topology_dm section = V.dm.getDefaultSection() codim_list = PETSc.Options().getString((self.prefix + 'codims'), '0, 1') codim_list = [int(ii) for ii in codim_list.split(',')] ises = [] for codim in codim_list: for p in range(*dm.getHeightStratum(codim)): if (dm.getLabelValue('pyop2_ghost', p) != (- 1)): continue dof = section.getDof(p) if (dof <= 0): continue off = section.getOffset(p) indices = numpy.arange((off * V.value_size), (V.value_size * (off + dof)), dtype=IntType) iset = PETSc.IS().createGeneral(indices, comm=PETSc.COMM_SELF) ises.append(iset) return ises
def test_destroy_delete(db): row = Standard.new(foo='test1', bar='test2') row.save() done = row.destroy() assert done assert (not row._concrete) assert (row.id is None) assert (row.foo == 'test1') row = CustomType.new(id='test1', foo='test2', bar='test3') row.save() done = row.destroy() assert done assert (not row._concrete) assert (row.id is None) assert (row.foo == 'test2') row = CustomName.new(foo='test1', bar='test2') row.save() done = row.destroy() assert done assert (not row._concrete) assert (row.foo is None) assert (row.bar == 'test2') row = CustomMulti.new(foo='test1', bar='test2', baz='test3') row.save() done = row.destroy() assert done assert (not row._concrete) assert (row.foo is None) assert (row.bar is None) assert (row.baz == 'test3')
class Backend(ModelNormal): allowed_values = {} validations = {('share_key',): {'regex': {'pattern': '^[A-Za-z0-9]+$'}}} _property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): return {'address': (str,), 'auto_loadbalance': (bool,), 'between_bytes_timeout': (int,), 'client_cert': (str, none_type), 'comment': (str, none_type), 'connect_timeout': (int,), 'first_byte_timeout': (int,), 'healthcheck': (str, none_type), 'hostname': (str, none_type), 'ipv4': (str, none_type), 'ipv6': (str, none_type), 'keepalive_time': (int, none_type), 'max_conn': (int,), 'max_tls_version': (str, none_type), 'min_tls_version': (str, none_type), 'name': (str,), 'override_host': (str, none_type), 'port': (int,), 'request_condition': (str,), 'share_key': (str, none_type), 'shield': (str, none_type), 'ssl_ca_cert': (str, none_type), 'ssl_cert_hostname': (str, none_type), 'ssl_check_cert': (bool, none_type), 'ssl_ciphers': (str, none_type), 'ssl_client_cert': (str, none_type), 'ssl_client_key': (str, none_type), 'ssl_hostname': (str, none_type), 'ssl_sni_hostname': (str, none_type), 'use_ssl': (bool,), 'weight': (int,)} _property def discriminator(): return None attribute_map = {'address': 'address', 'auto_loadbalance': 'auto_loadbalance', 'between_bytes_timeout': 'between_bytes_timeout', 'client_cert': 'client_cert', 'comment': 'comment', 'connect_timeout': 'connect_timeout', 'first_byte_timeout': 'first_byte_timeout', 'healthcheck': 'healthcheck', 'hostname': 'hostname', 'ipv4': 'ipv4', 'ipv6': 'ipv6', 'keepalive_time': 'keepalive_time', 'max_conn': 'max_conn', 'max_tls_version': 'max_tls_version', 'min_tls_version': 'min_tls_version', 'name': 'name', 'override_host': 'override_host', 'port': 'port', 'request_condition': 'request_condition', 'share_key': 'share_key', 'shield': 'shield', 'ssl_ca_cert': 'ssl_ca_cert', 'ssl_cert_hostname': 'ssl_cert_hostname', 'ssl_check_cert': 'ssl_check_cert', 'ssl_ciphers': 'ssl_ciphers', 'ssl_client_cert': 'ssl_client_cert', 'ssl_client_key': 'ssl_client_key', 'ssl_hostname': 'ssl_hostname', 'ssl_sni_hostname': 'ssl_sni_hostname', 'use_ssl': 'use_ssl', 'weight': 'weight'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
def _to_select_type(selector: str) -> SelectType: if (':' in selector): parts = selector.split(':') if (parts[0] == 'tag'): return SelectType.TAG return SelectType.COMPLEX elif _is_script_node(selector): return SelectType.SCRIPT else: return SelectType.MODEL
def load_archived_murs(mur_no=None): es_client = create_es_client() mur_count = 0 if es_client.indices.exists(index=ARCH_MUR_ALIAS): for mur in get_murs(mur_no): if (mur is not None): try: logger.info('Loading archived MUR No: {0}'.format(mur['no'])) es_client.index(ARCH_MUR_ALIAS, mur, id=mur['doc_id']) mur_count += 1 logger.info('{0} Archived Mur(s) loaded'.format(mur_count)) except Exception as err: logger.error('An error occurred while uploading archived mur:\nmur no={0} \nerr={1}'.format(mur['no'], err)) mur_debug_data = mur logger.debug(('mur_data count=' + str(mur_count))) logger.debug(('mur_debug_data =' + json.dumps(mur_debug_data, indent=3, cls=DateTimeEncoder))) else: logger.error(" The index alias '{0}' is not found, cannot load arch mur".format(ARCH_MUR_ALIAS))
def create_site_in_bench(args): if ('mariadb' == args.db_type): cprint('Set db_host', level=3) subprocess.call(['bench', 'set-config', '-g', 'db_host', 'mariadb'], cwd=((os.getcwd() + '/') + args.bench_name)) new_site_cmd = ['bench', 'new-site', f'--db-host=mariadb', f'--db-type={args.db_type}', f'--no-mariadb-socket', f'--db-root-password=123', f'--admin-password={args.admin_password}'] else: cprint('Set db_host', level=3) subprocess.call(['bench', 'set-config', '-g', 'db_host', 'postgresql'], cwd=((os.getcwd() + '/') + args.bench_name)) new_site_cmd = ['bench', 'new-site', f'--db-host=postgresql', f'--db-type={args.db_type}', f'--db-root-password=123', f'--admin-password={args.admin_password}'] apps = os.listdir(f'{os.getcwd()}/{args.bench_name}/apps') apps.remove('frappe') for app in apps: new_site_cmd.append(f'--install-app={app}') new_site_cmd.append(args.site_name) cprint(f'Creating Site {args.site_name} ...', level=2) subprocess.call(new_site_cmd, cwd=((os.getcwd() + '/') + args.bench_name))
class TestRegexUrlPath(URLPatternsTestCase, TestCase): urlpatterns = [path('regex/', include(regex_url_path_router.urls))] def test_regex_url_path_list(self): kwarg = '1234' response = self.client.get('/regex/list/{}/'.format(kwarg)) assert (response.status_code == 200) assert (json.loads(response.content.decode()) == {'kwarg': kwarg}) def test_regex_url_path_detail(self): pk = '1' kwarg = '1234' response = self.client.get('/regex/{}/detail/{}/'.format(pk, kwarg)) assert (response.status_code == 200) assert (json.loads(response.content.decode()) == {'pk': pk, 'kwarg': kwarg})
('/render_rsc', methods=['GET']) def render_resource(): req_url = request.args.get('url') if (not req_url): return render_template('error.html', title='Resource Render', message='Error! No page specified!') ignore_cache = request.args.get('nocache') try: print(("Returning content response for '%s'" % (req_url,))) (mimetype, fname, content, cachestate) = WebMirror.API.getResource(req_url, ignore_cache=ignore_cache) print(("Response for '%s' read from disk" % (req_url,))) except sqlalchemy.exc.InvalidRequestError: sys.exit(1) if (mimetype == 'image/webp'): img = Image.open(io.BytesIO(content)) if (hasattr(img, 'rgb_mode') and (img.rgb_mode == 'RGBX')): img = img.convert('RGBA') try: out = io.BytesIO() img.save(out, format='png') except (KeyError, OSError): img = img.convert('RGBA') out = io.BytesIO() img.save(out, format='png') content = out.getvalue() mimetype = 'img/png' fname = (fname + '.png') response = make_response(content) response.headers['Content-Type'] = mimetype response.headers['Content-Disposition'] = 'attachment; filename="{}"'.format(fname) return set_cache_control_headers(response)
def test_linear_solves_equivalent(): mesh = UnitSquareMesh(50, 50) V = FunctionSpace(mesh, 'CG', 1) f = Function(V) f.assign(1) f.vector()[:] = 1.0 t = TestFunction(V) q = TrialFunction(V) a = (inner(q, t) * dx) L = (inner(f, t) * dx) sol = Function(V) solve((a == L), sol) sol2 = Function(V) solve((a == L), sol2) assert (np_norm((sol.vector()[:] - sol2.vector()[:])) == 0) sol3 = Function(V) solve(assemble(a), sol3, assemble(L)) assert (np_norm((sol.vector()[:] - sol3.vector()[:])) < 5e-14) sol4 = sol3.vector() solve(assemble(a), sol4, assemble(L)) assert (np_norm((sol.vector()[:] - sol4[:])) < 5e-14)
def init_model(name: str='htdemucs', device: Optional[Union[(str, torch.device)]]=None, segment: Optional[int]=None) -> torch.nn.Module: model = get_model(name) model.eval() if (device is not None): model.to(device) logger.info(f'Model {name} loaded on {device}') if (isinstance(model, BagOfModels) and (len(model.models) > 1)): logger.info(f'Selected model is a bag of {len(model.models)} models. You will see {len(model.models)} progress bars per track.') if (segment is not None): if isinstance(model, BagOfModels): for m in model.models: m.segment = segment else: model.segment = segment return model
def make_aea_bumper(new_aea_version: Version) -> PythonPackageVersionBumper: aea_version_bumper = PythonPackageVersionBumper(ROOT_DIR, AEA_DIR, new_aea_version, specifier_set_patterns=['(?<=aea_version:) *({specifier_set})', '(?<={package_name})({specifier_set})'], files_to_pattern=AEA_PATHS) return aea_version_bumper
class TariffPrice(models.Model): date = models.DateField(db_index=True) vmpp = models.ForeignKey('dmd.VMPP', on_delete=models.DO_NOTHING) tariff_category = models.ForeignKey('dmd.DtPaymentCategory', on_delete=models.DO_NOTHING) price_pence = models.IntegerField() class Meta(): unique_together = ('date', 'vmpp')
def test_sign_and_recover_message(fetchai_private_key_file): account = FetchAICrypto(fetchai_private_key_file) sign_bytes = account.sign_message(message=b'hello') assert (len(sign_bytes) > 0), 'The len(signature) must not be 0' recovered_addresses = FetchAIApi.recover_message(message=b'hello', signature=sign_bytes) assert (account.address in recovered_addresses), 'Failed to recover the correct address.'
def wipe_node_shutdown_metadata(client): try: shutdown_status = client.shutdown.get_node() if (('_nodes' in shutdown_status) and ('cluster_name' in shutdown_status)): return for shutdown_node in shutdown_status.get('nodes', []): node_id = shutdown_node['node_id'] client.shutdown.delete_node(node_id=node_id) except AuthorizationException: pass