code
stringlengths
281
23.7M
def generate_dates(end_str, months=None): if (months is None): months = DEFAULT_NUM_MONTHS end_date = parse_date(end_str) assert (months > 0) dates = [] for offset in range((1 - months), 1): date = increment_months(end_date, offset) dates.append('{:04d}-{:02d}-01'.format(date[0], date[1])) return dates
def _alchemy_mock_engine(adapter: SQLAdapter): url_string = f'{adapter.type()}://' if (adapter.type() == 'athena'): SCHEMA_NAME = adapter.config.credentials.schema S3_STAGING_DIR = adapter.config.credentials.s3_staging_dir AWS_REGION = adapter.config.credentials.region_name conn_str = 'awsathena+rest://athena.{region_name}.amazonaws.com:443/{schema_name}?s3_staging_dir={s3_staging_dir}&work_group=primary' url_string = conn_str.format(region_name=AWS_REGION, schema_name=SCHEMA_NAME, s3_staging_dir=quote_plus(S3_STAGING_DIR)) def null_dump(sql, *multiparams, **params): pass return sqlalchemy.create_mock_engine(url_string, executor=null_dump)
def extractFourseasonsforestWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
_renderer(wrap_type=TestRecallByClass) class TestRecallByClassRenderer(TestRenderer): def render_html(self, obj: TestRecallByClass) -> TestHtmlInfo: info = super().render_html(obj) curr_matrix = obj.conf_matrix.get_result().current_matrix ref_matrix = obj.conf_matrix.get_result().reference_matrix fig = plot_conf_mtrx(curr_matrix, ref_matrix) info.with_details('Recall by Class', plotly_figure(title='', figure=fig)) return info
def run_emulated(simulation: td.Simulation, path=None, **kwargs) -> td.SimulationData: from scipy.ndimage.filters import gaussian_filter def make_data(coords: dict, data_array_type: type, is_complex: bool=False) -> td.components.data.data_array.DataArray: data_shape = [len(coords[k]) for k in data_array_type._dims] np.random.seed(1) data = np.random.random(data_shape) data = (((1 + 1j) * data) if is_complex else data) data = gaussian_filter(data, sigma=1.0) data_array = data_array_type(data, coords=coords) return data_array def make_field_data(monitor: td.FieldMonitor) -> td.FieldData: field_cmps = {} coords = {} grid = simulation.discretize_monitor(monitor) for field_name in monitor.fields: spatial_coords_dict = grid[field_name].dict() for (axis, dim) in enumerate('xyz'): if (monitor.size[axis] == 0): coords[dim] = [monitor.center[axis]] else: coords[dim] = np.array(spatial_coords_dict[dim]) coords['f'] = list(monitor.freqs) field_cmps[field_name] = make_data(coords=coords, data_array_type=td.ScalarFieldDataArray, is_complex=True) return td.FieldData(monitor=monitor, symmetry=(0, 0, 0), symmetry_center=simulation.center, grid_expanded=grid, **field_cmps) def make_eps_data(monitor: td.PermittivityMonitor) -> td.PermittivityData: field_mnt = td.FieldMonitor(**monitor.dict(exclude={'type', 'fields'})) field_data = make_field_data(monitor=field_mnt) return td.PermittivityData(monitor=monitor, eps_xx=field_data.Ex, eps_yy=field_data.Ey, eps_zz=field_data.Ez, grid_expanded=simulation.discretize_monitor(monitor)) def make_diff_data(monitor: td.DiffractionMonitor) -> td.DiffractionData: f = list(monitor.freqs) orders_x = np.linspace((- 1), 1, 3) orders_y = np.linspace((- 2), 2, 5) coords = dict(orders_x=orders_x, orders_y=orders_y, f=f) values = np.random.random((len(orders_x), len(orders_y), len(f))) data = td.DiffractionDataArray(values, coords=coords) field_data = {field: data for field in ('Er', 'Etheta', 'Ephi', 'Hr', 'Htheta', 'Hphi')} return td.DiffractionData(monitor=monitor, sim_size=(1, 1), bloch_vecs=(0, 0), **field_data) def make_mode_data(monitor: td.ModeMonitor) -> td.ModeData: _ = np.arange(monitor.mode_spec.num_modes) coords_ind = {'f': list(monitor.freqs), 'mode_index': np.arange(monitor.mode_spec.num_modes)} n_complex = make_data(coords=coords_ind, data_array_type=td.ModeIndexDataArray, is_complex=True) coords_amps = dict(direction=['+', '-']) coords_amps.update(coords_ind) amps = make_data(coords=coords_amps, data_array_type=td.ModeAmpsDataArray, is_complex=True) return td.ModeData(monitor=monitor, n_complex=n_complex, amps=amps) MONITOR_MAKER_MAP = {td.FieldMonitor: make_field_data, td.ModeMonitor: make_mode_data, td.PermittivityMonitor: make_eps_data, td.DiffractionMonitor: make_diff_data} data = [MONITOR_MAKER_MAP[type(mnt)](mnt) for mnt in simulation.monitors] sim_data = td.SimulationData(simulation=simulation, data=data) if (path is not None): sim_data.to_file(str(path)) return sim_data
def setup_hooks(): add_hook('editor-with-siac-initialised', (lambda : set_editor_ready())) add_hook('user-note-created', (lambda : UI.sidebar.refresh_tab(1))) add_hook('user-note-created', (lambda : try_repeat_last_search())) add_hook('user-note-deleted', (lambda : UI.sidebar.refresh_tab(1))) add_hook('user-note-deleted', (lambda : recalculate_priority_queue())) add_hook('user-note-deleted', (lambda : try_repeat_last_search())) add_hook('user-note-edited', (lambda : UI.sidebar.refresh_tab(1))) add_hook('user-note-edited', (lambda : Reader.reload_bottom_bar())) add_hook('user-note-edited', (lambda : try_repeat_last_search())) add_hook('updated-schedule', (lambda : recalculate_priority_queue())) add_hook('updated-schedule', (lambda : Reader.reload_bottom_bar())) add_hook('reading-modal-closed', (lambda : UI.sidebar.refresh_tab(1))) add_hook('reading-modal-closed', (lambda : try_repeat_last_search()))
def __b85_decode(data, lut, byte_order, special_values=None): if special_values: for key in special_values.keys(): data = data.replace(special_values[key], key) parts = [] parts_append = parts.append pack = struct.pack byte_format = (b'%sI' % byte_order) for i in range(0, len(data), 5): int_sum = ((((( * lut[data[i:(i + 1)]]) + (614125 * lut[data[(i + 1):(i + 2)]])) + (7225 * lut[data[(i + 2):(i + 3)]])) + (85 * lut[data[(i + 3):(i + 4)]])) + lut[data[(i + 4):(i + 5)]]) parts_append(pack(byte_format, int_sum)) return b''.join(parts)
def main(): ap = argparse.ArgumentParser(description='Prepare list of words into TSV format for merge') ap.add_argument('--quiet', '-q', action='store_false', dest='verbose', default=False, help='do not print output to stdout while processing') ap.add_argument('--verbose', '-v', action='store_true', default=False, help='print each step to stdout while processing') ap.add_argument('--version', '-V', action='version') ap.add_argument('--input', '-i', required=True, dest='infilename', metavar='IFILE', help='read dictionary data from IFILE') ap.add_argument('--output', '-o', action='store', required=True, dest='outfilename', metavar='OFILE', help='write resulting data to OFILE') ap.add_argument('--fields', '-f', action='store', type=int, default=3, metavar='N', help='read N fields from master') ap.add_argument('--separator', '-s', action='store', default='\t', metavar='SEP', help='use SEP as separator') ap.add_argument('--comment', '-C', action='append', default=['#'], metavar='COMMENT', help='skip lines starting with COMMENT thatdo not have SEPs') ap.add_argument('--strip', '-S', action='store', metavar='STRIP', help='strip STRIP from fields before using') ap.add_argument('--ignore-errors', '-I', action='store_true', default=False, help='silently ignore references to entries missing from master file') args = ap.parse_args() if ((args.strip == '"') or (args.strip == "'")): quoting = csv.QUOTE_ALL else: quoting = csv.QUOTE_NONE output = open(args.outfilename, 'w') linecount = 0 entry_count = 0 lexdata = dict() print('lemma', 'homonym', 'new_para', 'origin', sep='\t', file=output) with open(args.infilename, 'r', newline='') as tsv_file: if args.verbose: print('Reading dictionary from', args.infilename) tsv_reader = csv.DictReader(tsv_file, delimiter=args.separator, strict=True) for tsv_parts in tsv_reader: if ('lemma' not in tsv_parts): print('Need at least a lemma column...:', tsv_parts) exit(1) if ('new_para' not in tsv_parts): tsv_parts['new_para'] = guess_new_para_interactive(tsv_parts) if (not tsv_parts['new_para']): continue if ('homonym' not in tsv_parts): tsv_parts['homonym'] = tsv_parts['new_para'].split('_')[0] if ('origin' not in tsv_parts): tsv_parts['origin'] = 'omorfi' entry_count += 1 print(tsv_parts['lemma'], tsv_parts['homonym'], tsv_parts['new_para'], tsv_parts['origin'], sep='\t', file=output) if args.verbose: print('\n', entry_count, 'entries in database') exit()
def test_observation_keys(facade): expected_obs = {'FOPR': ['FOPR'], 'WOPR:OP1': ['WOPR_OP1_108', 'WOPR_OP1_144', 'WOPR_OP1_190', 'WOPR_OP1_36', 'WOPR_OP1_72', 'WOPR_OP1_9'], 'SNAKE_OIL_WPR_': ['WPR_DIFF_1']} for key in facade.all_data_type_keys(): obs_keys = facade.observation_keys(key) assert (expected_obs.get(key, []) == obs_keys)
class PhoneNumberField(TelField): def process_data(self, value): if ((value is None) or (value is unset_value)): self.data = None return self.data = self._validate_phone_number(value) def process_formdata(self, valuelist): if (not valuelist): return self.data = self._validate_phone_number(valuelist[0]) def _validate_phone_number(self, value: str) -> str: try: parsed = phonenumbers.parse(value) except phonenumbers.phonenumberutil.NumberParseException as e: raise ValueError(_('The country code is missing.')) from e if (not phonenumbers.is_valid_number(parsed)): raise ValueError(_('This phone number is invalid.')) return phonenumbers.format_number(parsed, phonenumbers.PhoneNumberFormat.E164)
def assert_config_keystore(config): nosigningkey = False if ('repo_keyalias' not in config): nosigningkey = True logging.critical(_("'repo_keyalias' not found in config.yml!")) if ('keystore' not in config): nosigningkey = True logging.critical(_("'keystore' not found in config.yml!")) elif (config['keystore'] == 'NONE'): if (not config.get('smartcardoptions')): nosigningkey = True logging.critical(_("'keystore' is NONE and 'smartcardoptions' is blank!")) elif (not os.path.exists(config['keystore'])): nosigningkey = True logging.critical((("'" + config['keystore']) + "' does not exist!")) if ('keystorepass' not in config): nosigningkey = True logging.critical(_("'keystorepass' not found in config.yml!")) if (('keypass' not in config) and (config.get('keystore') != 'NONE')): nosigningkey = True logging.critical(_("'keypass' not found in config.yml!")) if nosigningkey: raise FDroidException(('This command requires a signing key, ' + 'you can create one using: fdroid update --create-key'))
def get_matrix_png(matrix, module_width_px=10): max_x = max([x for (x, y) in matrix.keys()]) min_x = min([x for (x, y) in matrix.keys()]) max_y = max([y for (x, y) in matrix.keys()]) min_y = min([y for (x, y) in matrix.keys()]) img = Image.new('RGB', ((module_width_px * ((max_x - min_x) + 3)), (module_width_px * ((max_y - min_y) + 3)))) img.paste('white', (0, 0, (module_width_px * ((max_x - min_x) + 3)), module_width_px)) for y in range(min_y, (max_y + 1)): posy = ((y + 1) * module_width_px) img.paste('white', (0, posy, module_width_px, (posy + module_width_px))) for x in range(min_x, (max_x + 1)): posx = ((x + 1) * module_width_px) color = ('white' if (matrix.get((x, y), (- 1)) == 0) else 'black') img.paste(color, (posx, posy, (posx + module_width_px), (posy + module_width_px))) img.paste('white', (((max_x + 2) * module_width_px), posy, ((max_x + 3) * module_width_px), (posy + module_width_px))) img.paste('white', (0, (module_width_px * ((max_y - min_y) + 2)), (module_width_px * ((max_x - min_x) + 3)), (module_width_px * ((max_y - min_y) + 3)))) image_bytes = BytesIO() img.save(image_bytes, 'png') return image_bytes
('shutil.rmtree') ('ciftify.bidsapp.fmriprep_ciftify.run') def test_ux16_will_rerun_incomplete_ciftify_recon_all_for_ds005(mock_run, mock_delete, outputdir): participant_label = '14' sub_dir = fake_complete_default_ciftify_recon_all_dir(outputdir, participant_label, logtext=incomplete_log_tail, with_T1w32k=False) uargs = [ds005_bids, outputdir, 'participant', '--participant_label={}'.format(participant_label), '--rerun-if-incomplete', '--surf-reg', 'FS'] ret = simple_main_run(uargs) call_list = parse_call_list_into_strings(mock_run.call_args_list) assert (count_calls_to('ciftify_recon_all', call_list) == 1) assert (mock_delete.call_args_list[0][0][0] == sub_dir)
def log_disk_setting(data, fos): vdom = data['vdom'] log_disk_setting_data = data['log_disk_setting'] log_disk_setting_data = flatten_multilists_attributes(log_disk_setting_data) filtered_data = underscore_to_hyphen(filter_log_disk_setting_data(log_disk_setting_data)) return fos.set('log.disk', 'setting', data=filtered_data, vdom=vdom)
def python_2_unicode_compatible(klass): if (sys.version_info[0] == 2): if ('__str__' not in klass.__dict__): raise ValueError(("_2_unicode_compatible cannot be applied to %s because it doesn't define __str__()." % klass.__name__)) klass.__unicode__ = klass.__str__ klass.__str__ = (lambda self: self.__unicode__().encode('utf-8')) return klass
class ValueRecordFactory(object): def __init__(self, valueFormat): format = [] for (mask, name, isDevice, signed) in valueRecordFormat: if (valueFormat & mask): format.append((name, isDevice, signed)) self.format = format def __len__(self): return len(self.format) def readValueRecord(self, reader, font): format = self.format if (not format): return None valueRecord = ValueRecord() for (name, isDevice, signed) in format: if signed: value = reader.readShort() else: value = reader.readUShort() if isDevice: if value: from . import otTables subReader = reader.getSubReader(value) value = getattr(otTables, name)() value.decompile(subReader, font) else: value = None setattr(valueRecord, name, value) return valueRecord def writeValueRecord(self, writer, font, valueRecord): for (name, isDevice, signed) in self.format: value = getattr(valueRecord, name, 0) if isDevice: if value: subWriter = writer.getSubWriter() writer.writeSubTable(subWriter, offsetSize=2) value.compile(subWriter, font) else: writer.writeUShort(0) elif signed: writer.writeShort(value) else: writer.writeUShort(value)
class Notmuch(Backend): settings = (('db_path', 'Path to the directory of your notmuch database'), ('query', "Same query notmuch would accept, by default 'tag:unread and tag:inbox'")) db_path = None query = 'tag:unread and tag:inbox' def init(self): if (not self.db_path): defaultConfigFilename = os.path.expanduser('~/.notmuch-config') config = configparser.RawConfigParser() successful = config.read([os.environ.get('NOTMUCH_CONFIG', defaultConfigFilename), defaultConfigFilename]) self.db_path = config.get('database', 'path') def unread(self): db = notmuch.Database(self.db_path) result = notmuch.Query(db, self.query).count_messages() db.close() return result
def test_liquid_rescale(): with Image(filename='wizard:') as orig: with orig.clone() as img: try: img.liquid_rescale(600, 600) except MissingDelegateError: warnings.warn('skip liquid_rescale test; has no LQR delegate') else: assert (img.size == (600, 600))
def test_simple_renaming_relation(graph_with_relation): (task, interference_graph) = graph_with_relation simple_variable_renamer = SimpleVariableRenamer(task, interference_graph) var_28 = Variable('var_28', Pointer(Integer(32, True), 32), 1, False, None) var_28_new = Variable('var_28_1', Pointer(Integer(32, True), 32)) var_1c = [Variable('var_1c', Integer(32, True), i, True, None) for i in range(5)] var_1c_new = [Variable(f'var_1c_{i}', Integer(32, True)) for i in range(5)] edx_3 = Variable('edx_3', Integer(32, True), 4, False, None) edx_3_new = Variable('edx_3_4', Integer(32, True)) eax_7 = Variable('eax_7', Integer(32, True), 8, False, None) eax_7_new = Variable('eax_7_8', Integer(32, True)) assert (simple_variable_renamer.renaming_map == {var_28: var_28_new, edx_3: edx_3_new, eax_7: eax_7_new, var_1c[0]: var_1c_new[0], var_1c[2]: var_1c_new[0], var_1c[3]: var_1c_new[3], var_1c[4]: var_1c_new[4]})
def test_migrate_surface(data, storage, parameter, ens_config): parameters = bf._migrate_surface_info(parameter, ens_config) experiment = storage.create_experiment(parameters=parameters) ensemble = experiment.create_ensemble(name='default', ensemble_size=5) bf._migrate_surface(ensemble, parameter, ens_config) for (key, var) in data['/REAL_0/SURFACE'].groups.items(): expect = sorted_surface(var) actual = ensemble.load_parameters(key, 0)['values'].values.ravel() assert (list(expect) == list(actual)), key
def extractSomesoltranslationsBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def extractWwwJukepopCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def validate_saas_secrets_external_references(db: Session, schema: SaaSSchema, connection_secrets: ConnectionConfigSecretsSchema) -> None: external_references = schema.external_references() for external_reference in external_references: dataset_reference: FidesDatasetReference = getattr(connection_secrets, external_reference) if (dataset_reference.direction == 'to'): raise ValidationError("External references can only have a direction of 'from', found 'to'") validate_dataset_reference(db, dataset_reference)
def shape(array: Union[(float, ArrayLike)]) -> Tuple[(int, ...)]: if isinstance(array, Sequence): s = (len(array),) if (not s[0]): return s is_scalar = False all_scalar = True for a in array: if (not isinstance(a, Sequence)): is_scalar = True if (not all_scalar): break else: all_scalar = False if is_scalar: if all_scalar: return s raise ValueError('Ragged lists are not supported') return (s + _shape(array, len(cast(ArrayLike, array[0])))) else: return tuple()
_stats_reply_type(ofproto.OFPST_FLOW) class OFPFlowStats(StringifyMixin): def __init__(self, table_id, duration_sec, duration_nsec, priority, idle_timeout, hard_timeout, cookie, packet_count, byte_count, match, instructions=None, length=None): super(OFPFlowStats, self).__init__() self.table_id = table_id self.duration_sec = duration_sec self.duration_nsec = duration_nsec self.priority = priority self.idle_timeout = idle_timeout self.hard_timeout = hard_timeout self.cookie = cookie self.packet_count = packet_count self.byte_count = byte_count self.match = match self.instructions = instructions self.length = length def parser(cls, buf, offset): (length, table_id, duration_sec, duration_nsec, priority, idle_timeout, hard_timeout, cookie, packet_count, byte_count) = struct.unpack_from(ofproto.OFP_FLOW_STATS_PACK_STR, buf, offset) offset += (ofproto.OFP_FLOW_STATS_SIZE - ofproto.OFP_MATCH_SIZE) match = OFPMatch.parser(buf, offset) match_length = utils.round_up(match.length, 8) inst_length = (length - ((ofproto.OFP_FLOW_STATS_SIZE - ofproto.OFP_MATCH_SIZE) + match_length)) offset += match_length instructions = [] while (inst_length > 0): inst = OFPInstruction.parser(buf, offset) instructions.append(inst) offset += inst.len inst_length -= inst.len o = cls(table_id, duration_sec, duration_nsec, priority, idle_timeout, hard_timeout, cookie, packet_count, byte_count, match, instructions) o.length = length return o
def main(n_epochs) -> None: env = GymMazeEnv(env='CartPole-v0') distribution_mapper = DistributionMapper(action_space=env.action_space, distribution_mapper_config={}) obs_shapes = observation_spaces_to_in_shapes(env.observation_spaces_dict) action_shapes = {step_key: {action_head: distribution_mapper.required_logits_shape(action_head) for action_head in env.action_spaces_dict[step_key].spaces.keys()} for step_key in env.action_spaces_dict.keys()} policies = [PolicyNet(obs_shapes=obs_shapes[0], action_logits_shapes=action_shapes[0], non_lin=nn.SELU)] policy = TorchPolicy(networks=list_to_dict(policies), distribution_mapper=distribution_mapper, device='cpu') shared_noise = SharedNoiseTable(count=1000000) algorithm_config = ESAlgorithmConfig(n_rollouts_per_update=100, n_timesteps_per_update=0, max_steps=0, optimizer=Adam(step_size=0.01), l2_penalty=0.005, noise_stddev=0.02, n_epochs=n_epochs, policy_wrapper=None) trainer = ESTrainer(algorithm_config=algorithm_config, torch_policy=policy, shared_noise=shared_noise, normalization_stats=None) setup_logging(job_config=None) maze_rng = np.random.RandomState(None) trainer.train(ESDummyDistributedRollouts(env=env, n_eval_rollouts=10, shared_noise=shared_noise, agent_instance_seed=MazeSeeding.generate_seed_from_random_state(maze_rng)), model_selection=None)
def gen_proto_py(out_path): output_def = io.StringIO() output_mid = io.StringIO() output_name = io.StringIO() output_obj = io.StringIO() for (k, v) in method_to_id.items(): output_def.write("proto_id2name[{}] = '{}'\n".format(v, k)) output_mid.write("proto_name2id['{}'] = {}\n".format(k, v)) output_name.write("str_{0} = '{0}'\n".format(k)) output_obj.write("proto_name2type['{0}'] = {0}\n".format(k)) final_str = output_def.getvalue() final_str += output_mid.getvalue() final_str += output_name.getvalue() final_str += output_obj.getvalue() fout = open(out_path, 'w', encoding='UTF-8', newline='') fout.write(PY_PROTO_FACTORY_TEMPLATE.format(proto_id_code=final_str)) fout.close()
def test_address_email_address(): s = '>[192.168.2.1]<' iocs = find_iocs(s) assert (iocs['email_addresses_complete'] == ['[192.168.2.1]']) assert (iocs['email_addresses'] == ['[192.168.2.1]']) assert (iocs['ipv4s'] == ['192.168.2.1']) s = '[192.168.7.3]' iocs = find_iocs(s) assert (iocs['ipv4s'] == ['192.168.7.3']) assert (iocs['email_addresses_complete'] == ['[192.168.7.3]']) assert (iocs['email_addresses'] == ['[192.168.7.3]']) s = '[192.168.7.3]aaaaa' iocs = find_iocs(s) assert (iocs['email_addresses_complete'] == ['[192.168.7.3]']) assert (iocs['email_addresses'] == ['[192.168.7.3]']) s = '[IPv6:2001:db8::1]' iocs = find_iocs(s) assert (iocs['email_addresses_complete'] == ['[IPv6:2001:db8::1]']) assert (iocs['email_addresses'] == ['[IPv6:2001:db8::1]']) assert (iocs['ipv6s'] == ['2001:db8::1'])
def extractWwwLightbringersandlamplightersCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_cli_defaults(): cli = Radicli() ran = False ('test', a=Arg(), b=Arg(), c=Arg('--c'), d=Arg('--d')) def test(a: str, b: str='hey', *, c: List[str], d: Optional[List[int]]=None): assert (a == 'yo') assert (b == 'hey') assert (c == ['one']) assert (d is None) nonlocal ran ran = True cli.run(['', 'test', 'yo', '--c', 'one']) assert ran
class BaseThrottle(): def allow_request(self, request, view): raise NotImplementedError('.allow_request() must be overridden') def get_ident(self, request): xff = request.META.get('HTTP_X_FORWARDED_FOR') remote_addr = request.META.get('REMOTE_ADDR') num_proxies = api_settings.NUM_PROXIES if (num_proxies is not None): if ((num_proxies == 0) or (xff is None)): return remote_addr addrs = xff.split(',') client_addr = addrs[(- min(num_proxies, len(addrs)))] return client_addr.strip() return (''.join(xff.split()) if xff else remote_addr) def wait(self): return None
def int3c2e3d_sph_122(ax, da, A, bx, db, B, cx, dc, C): result = numpy.zeros((3, 6, 6), dtype=float) x0 = (A[0] - B[0]) x1 = (ax + bx) x2 = (x1 ** (- 1.0)) x3 = ((- x2) * ((ax * A[0]) + (bx * B[0]))) x4 = (x3 + C[0]) x5 = (- x4) x6 = (cx + x1) x7 = (x6 ** (- 1.0)) x8 = ((- x2) * ((ax * A[1]) + (bx * B[1]))) x9 = (x8 + C[1]) x10 = (- x9) x11 = ((- x2) * ((ax * A[2]) + (bx * B[2]))) x12 = (x11 + C[2]) x13 = (- x12) x14 = (cx * x7) x15 = (x1 * x14) x16 = (x15 * (((x10 ** 2) + (x13 ** 2)) + (x5 ** 2))) x17 = boys(3, x16) x18 = (x17 * x7) x19 = (x3 + A[0]) x20 = (- x19) x21 = (cx ** (- 1.0)) x22 = (x21 * boys(2, x16)) x23 = ((x18 * x5) - (x20 * x22)) x24 = (x23 * x4) x25 = (x4 ** 2) x26 = (x9 ** 2) x27 = (x12 ** 2) x28 = (x15 * ((x25 + x26) + x27)) x29 = boys(2, x28) x30 = ((x2 * x21) * x29) x31 = ((2.0 * x24) + x30) x32 = (x18 - x22) x33 = (x2 * x32) x34 = (2.0 * x19) x35 = boys(4, x16) x36 = (x35 * x7) x37 = (x17 * x21) x38 = (((- x20) * x37) + (x36 * x5)) x39 = (2.0 * x4) x40 = (x14 * x39) x41 = ((((- x23) * x34) + x33) + (x38 * x40)) x42 = (- x41) x43 = (x2 * x23) x44 = ((x4 * x42) + (2.0 * x43)) x45 = ((x2 * x31) + (x4 * x44)) x46 = (x24 + x30) x47 = (x0 * x39) x48 = (1.5 * x2) x49 = (x36 - x37) x50 = (x2 * x49) x51 = (x7 * boys(5, x16)) x52 = (x14 * x4) x53 = (((x19 * x42) - ((2.0 * x2) * ((x14 * x38) - x23))) - (x52 * ((((2.0 * x19) * x38) + (x40 * (((x20 * x21) * x35) - (x5 * x51)))) - x50))) x54 = (0.5 * x4) x55 = (A[1] - B[1]) x56 = (A[2] - B[2]) x57 = ((((((17. * da) * db) * dc) * x1) * (x6 ** (- 2.5))) * numpy.exp(((((- ax) * bx) * x2) * (((x0 ** 2) + (x55 ** 2)) + (x56 ** 2))))) x58 = (0. * x57) x59 = (x7 * boys(3, x28)) x60 = (((x19 * x21) * x29) - (x4 * x59)) x61 = ((((- 2.0) * x2) * x60) + (x4 * x41)) x62 = (- x61) x63 = 1. x64 = (x58 * x63) x65 = (x64 * ((((x0 * x62) - (x0 * ((x0 * x31) + x61))) - (x4 * x53)) + (x41 * x48))) x66 = (2.0 * x0) x67 = (x58 * (((x0 * x41) + (x0 * ((x23 * x66) + x41))) + x53)) x68 = (x8 + A[1]) x69 = (- x68) x70 = ((x10 * x18) - (x22 * x69)) x71 = (x2 * x70) x72 = (- x71) x73 = (- x70) x74 = ((x10 * x36) - ((x17 * x21) * x69)) x75 = (- x74) x76 = (x14 * x75) x77 = (((- x20) * x73) + (x5 * x76)) x78 = (x4 * x77) x79 = (x72 + (2.0 * x78)) x80 = (x2 * (x70 + x76)) x81 = ((x10 * x51) - ((x21 * x35) * x69)) x82 = (- x81) x83 = (x14 * x5) x84 = ((((- x34) * x77) - (x40 * ((x20 * x75) - (x82 * x83)))) + x80) x85 = (- x84) x86 = (x2 * x77) x87 = (2.0 * x86) x88 = ((x4 * x85) + x87) x89 = (x72 + x78) x90 = ((x46 * x55) + x89) x91 = (x31 * x55) x92 = (2.0 * x9) x93 = (x70 * x9) x94 = (x30 + (2.0 * x93)) x95 = (x2 * x94) x96 = (- x43) x97 = (x77 * x9) x98 = (x96 + (2.0 * x97)) x99 = ((x39 * x98) - x95) x100 = ((x91 * x92) + x99) x101 = (0.5 * x0) x102 = ((- x2) * x42) x103 = (x85 * x9) x104 = (x2 * x98) x105 = (- x104) x106 = (x105 - (x54 * (x102 + (2.0 * x103)))) x107 = (x79 + x91) x108 = (((- x19) * x70) + (x52 * x74)) x109 = ((((- 2.0) * x108) * x2) + (x4 * x84)) x110 = (x12 * x57) x111 = (x23 * x55) x112 = (x111 * x9) x113 = (x96 + x97) x114 = (x112 + x113) x115 = (x41 * x55) x116 = (x115 * x9) x117 = (x2 * x42) x118 = ((- x103) + x117) x119 = (x64 * x9) x120 = ((2.0 * x112) + x98) x121 = ((((- 0.5) * x2) * x41) + (x84 * x9)) x122 = (x111 + x77) x123 = (x27 * x64) x124 = (x11 + A[2]) x125 = (- x124) x126 = (((- x125) * x22) + (x13 * x18)) x127 = (x126 * x2) x128 = (- x127) x129 = (- x126) x130 = ((((- x125) * x17) * x21) + (x13 * x36)) x131 = (- x130) x132 = (x131 * x14) x133 = (((- x129) * x20) + (x132 * x5)) x134 = (x133 * x4) x135 = (x128 + (2.0 * x134)) x136 = (x2 * (x126 + x132)) x137 = ((((- x125) * x21) * x35) + (x13 * x51)) x138 = (- x137) x139 = ((((- x133) * x34) + x136) - (x40 * ((x131 * x20) - (x138 * x83)))) x140 = (- x139) x141 = (x133 * x2) x142 = (2.0 * x141) x143 = ((x140 * x4) + x142) x144 = (x128 + x134) x145 = (x144 + (x46 * x56)) x146 = (x31 * x56) x147 = (x135 + x146) x148 = (((- x126) * x19) + (x130 * x52)) x149 = ((x139 * x4) - ((2.0 * x148) * x2)) x150 = (x57 * x9) x151 = (2.0 * x12) x152 = (x12 * x126) x153 = ((2.0 * x152) + x30) x154 = (x153 * x2) x155 = (- x154) x156 = (x12 * x133) x157 = ((2.0 * x156) + x96) x158 = (x155 + (x157 * x39)) x159 = ((x146 * x151) + x158) x160 = (x12 * x140) x161 = (x157 * x2) x162 = (- x161) x163 = (x162 - (x54 * (x102 + (2.0 * x160)))) x164 = (x41 * x56) x165 = (x23 * x56) x166 = (x133 + x165) x167 = (x26 * x64) x168 = (x12 * x165) x169 = (x157 + (2.0 * x168)) x170 = (0.5 * x117) x171 = (- x160) x172 = ((x12 * x164) + x171) x173 = (x156 + x96) x174 = (x168 + x173) x175 = (x12 * x64) x176 = (2.0 * x55) x177 = (2.0 * x70) x178 = (x14 * x92) x179 = ((((- x177) * x68) + (x178 * x74)) + x33) x180 = (- x179) x181 = (x180 * x2) x182 = ((- x2) * x32) x183 = (2.0 * x69) x184 = ((- x2) * x49) x185 = (x10 * x14) x186 = (((- x20) * (((((- 2.0) * x10) * x76) + x182) + (x183 * x73))) + (x83 * (((x183 * x75) + x184) - ((2.0 * x185) * x82)))) x187 = (x186 * x4) x188 = (((x176 * x89) + x181) + x187) x189 = (x4 * x58) x190 = (0.5 * x55) x191 = (x186 * x9) x192 = (x191 - x87) x193 = ((x180 * x9) + (2.0 * x71)) x194 = (0.5 * x2) x195 = (((x190 * x99) + (x192 * x4)) + (x193 * x194)) x196 = (0.5 * x181) x197 = ((x187 + x196) + (x55 * x79)) x198 = (x192 * x9) x199 = (x176 * x9) x200 = (x113 * x199) x201 = (x55 * x98) x202 = ((x176 * x77) + x186) x203 = (x27 * x58) x204 = ((x10 * x132) - (x129 * x69)) x205 = (((- x131) * x69) + (x138 * x185)) x206 = ((x20 * x204) - (x205 * x83)) x207 = (x2 * x204) x208 = ((x206 * x4) - x207) x209 = (x208 + (x56 * x89)) x210 = (x4 * x57) x211 = (0. * x63) x212 = (x210 * x211) x213 = (x55 * x9) x214 = (0.5 * x56) x215 = (- x141) x216 = (x206 * x9) x217 = (x204 * x9) x218 = (x128 + (2.0 * x217)) x219 = (x2 * x218) x220 = (((- 0.5) * x219) + (x4 * (x215 + (2.0 * x216)))) x221 = ((x214 * x99) + x220) x222 = (x12 * x56) x223 = (- x86) x224 = (x12 * x206) x225 = (x223 + (2.0 * x224)) x226 = (x12 * x204) x227 = ((2.0 * x226) + x72) x228 = (x2 * x227) x229 = ((x225 * x4) - (0.5 * x228)) x230 = ((x222 * x79) + x229) x231 = (x215 + x216) x232 = ((x113 * x56) + x231) x233 = (x150 * x211) x234 = (((- 0.5) * x161) + (x225 * x9)) x235 = ((x222 * x98) + x234) x236 = (x223 + x224) x237 = ((x108 * x222) + x236) x238 = (x110 * x211) x239 = (2.0 * x56) x240 = (2.0 * x126) x241 = (x14 * x151) x242 = ((((- x124) * x240) + (x130 * x241)) + x33) x243 = (- x242) x244 = (x2 * x243) x245 = (2.0 * x125) x246 = (2.0 * x13) x247 = (((x129 * x245) - (x132 * x246)) + x182) x248 = (((x131 * x245) - ((x138 * x14) * x246)) + x184) x249 = (((- x20) * x247) + (x248 * x83)) x250 = (x249 * x4) x251 = (((x144 * x239) + x244) + x250) x252 = (0.5 * x244) x253 = (((x135 * x56) + x250) + x252) x254 = ((x12 * x249) - x142) x255 = ((x12 * x243) + (2.0 * x127)) x256 = (x194 * x255) x257 = (((x158 * x214) + (x254 * x4)) + x256) x258 = ((x133 * x239) + x249) x259 = (x26 * x58) x260 = (x12 * x254) x261 = (2.0 * x222) x262 = (x173 * x261) x263 = (x0 * x70) x264 = (x0 * x94) x265 = (x30 + x93) x266 = (x58 * x9) x267 = ((x177 * x55) + x179) x268 = (x0 * x4) x269 = (x267 * x268) x270 = (x4 * x64) x271 = (x55 * x94) x272 = (x193 - x271) x273 = ((x193 * x9) + x95) x274 = (((- x199) * x265) + x273) x275 = ((x179 * x9) - ((2.0 * x2) * (((x21 * x29) * x68) - (x59 * x9)))) x276 = ((- x271) - x275) x277 = (x56 * x70) x278 = (x204 + x277) x279 = (x56 * x94) x280 = (x218 + x279) x281 = (x12 * x277) x282 = (x227 + (2.0 * x281)) x283 = (x128 + x217) x284 = ((x265 * x56) + x283) x285 = (x155 + (x227 * x92)) x286 = ((x151 * x279) + x285) x287 = (x226 + x72) x288 = (x281 + x287) x289 = (x14 * x9) x290 = (((x180 * x68) + (x289 * (((x178 * x81) + x50) - ((2.0 * x68) * x74)))) + (2.0 * x80)) x291 = (((x179 * x55) + (x267 * x55)) + x290) x292 = (x25 * x58) x293 = ((- x290) * x9) x294 = (0.5 * x9) x295 = (- x275) x296 = (x179 * x56) x297 = ((x136 + (x178 * x205)) - ((2.0 * x204) * x68)) x298 = (x25 * x64) x299 = (- x297) x300 = (x299 * x9) x301 = (2.0 * x207) x302 = (x300 + x301) x303 = (x12 * x299) x304 = (- x303) x305 = ((x12 * x296) + x304) x306 = (- x228) x307 = ((x294 * ((x180 * x2) - (2.0 * x303))) + x306) x308 = ((x185 * x248) - (x247 * x69)) x309 = ((x204 * x239) + x308) x310 = (x308 * x9) x311 = (((x218 * x56) + x252) + x310) x312 = (- x301) x313 = ((x12 * x308) + x312) x314 = (((x239 * x283) + x244) + x310) x315 = (((x214 * x285) + x256) + (x313 * x9)) x316 = (x12 * x313) x317 = (x261 * x287) x318 = (x0 * x126) x319 = (x0 * x153) x320 = (x152 + x30) x321 = (x12 * x58) x322 = (x126 * x55) x323 = (x204 + x322) x324 = (x218 + (x322 * x92)) x325 = (x153 * x55) x326 = (x227 + x325) x327 = (x283 + (x322 * x9)) x328 = (x285 + (x325 * x92)) x329 = (x287 + (x320 * x55)) x330 = ((x240 * x56) + x242) x331 = (x268 * x330) x332 = (((- x153) * x56) + x255) x333 = (- x332) x334 = (- x330) x335 = ((x12 * x255) + x154) x336 = (((- x261) * x320) + x335) x337 = (x334 * x55) x338 = ((((x12 * x14) * (((((- 2.0) * x124) * x130) + (x137 * x241)) + x50)) + (x124 * x243)) + (2.0 * x136)) x339 = (((x242 * x56) + (x330 * x56)) + x338) x340 = ((- x12) * x338) x341 = (((((1.5 * x2) * x243) - (x255 * x56)) - (x332 * x56)) - x340) result[(0, 0, 0)] = numpy.sum((x58 * ((((x0 * x45) + (x0 * (x45 - (x46 * x47)))) - (x44 * x48)) - (x54 * (((3.0 * x2) * x42) + (x39 * x53)))))) result[(0, 0, 1)] = numpy.sum((x65 * x9)) result[(0, 0, 2)] = numpy.sum((x12 * x65)) result[(0, 0, 3)] = numpy.sum(((- x26) * x67)) result[(0, 0, 4)] = numpy.sum(((((- x12) * x63) * x67) * x9)) result[(0, 0, 5)] = numpy.sum(((- x27) * x67)) result[(0, 1, 0)] = numpy.sum((x64 * ((((x2 * x79) + (x4 * x88)) + (x45 * x55)) - (x47 * x90)))) result[(0, 1, 1)] = numpy.sum((x57 * ((((- x100) * x101) - x106) + ((x55 * x62) * x9)))) result[(0, 1, 2)] = numpy.sum((x110 * ((((- x0) * x107) - x109) + (x55 * x62)))) result[(0, 1, 3)] = numpy.sum(((- x119) * (((x114 * x66) + x116) + x118))) result[(0, 1, 4)] = numpy.sum(((- x110) * (((x0 * x120) + x116) + x121))) result[(0, 1, 5)] = numpy.sum(((- x123) * ((x115 + (x122 * x66)) + x84))) result[(0, 2, 0)] = numpy.sum((x64 * ((((x135 * x2) + (x143 * x4)) - (x145 * x47)) + (x45 * x56)))) result[(0, 2, 1)] = numpy.sum((x150 * ((((- x0) * x147) - x149) + (x56 * x62)))) result[(0, 2, 2)] = numpy.sum((x57 * ((((- x101) * x159) + ((x12 * x56) * x62)) - x163))) result[(0, 2, 3)] = numpy.sum(((- x167) * ((x139 + x164) + (x166 * x66)))) result[(0, 2, 4)] = numpy.sum(((- x150) * (((x0 * x169) + x170) + x172))) result[(0, 2, 5)] = numpy.sum(((- x175) * ((x117 + x172) + (x174 * x66)))) result[(0, 3, 0)] = numpy.sum(((- x189) * ((x176 * x90) + x188))) result[(0, 3, 1)] = numpy.sum(((- x64) * ((x100 * x190) + x195))) result[(0, 3, 2)] = numpy.sum(((- x175) * ((x107 * x55) + x197))) result[(0, 3, 3)] = numpy.sum(((- x58) * (((x105 + (x114 * x199)) + x198) + x200))) result[(0, 3, 4)] = numpy.sum(((- x175) * (((x120 * x55) + x192) + x201))) result[(0, 3, 5)] = numpy.sum(((- x203) * ((x122 * x176) + x202))) result[(0, 4, 0)] = numpy.sum(((- x212) * ((x145 * x55) + x209))) result[(0, 4, 1)] = numpy.sum(((- x57) * ((x147 * x213) + x221))) result[(0, 4, 2)] = numpy.sum(((- x57) * ((x159 * x190) + x230))) result[(0, 4, 3)] = numpy.sum(((- x233) * ((x166 * x213) + x232))) result[(0, 4, 4)] = numpy.sum(((- x57) * ((x169 * x213) + x235))) result[(0, 4, 5)] = numpy.sum(((- x238) * ((x174 * x55) + x237))) result[(0, 5, 0)] = numpy.sum(((- x189) * ((x145 * x239) + x251))) result[(0, 5, 1)] = numpy.sum(((- x119) * ((x147 * x56) + x253))) result[(0, 5, 2)] = numpy.sum(((- x64) * ((x159 * x214) + x257))) result[(0, 5, 3)] = numpy.sum(((- x259) * ((x166 * x239) + x258))) result[(0, 5, 4)] = numpy.sum(((- x119) * (((x157 * x56) + (x169 * x56)) + x254))) result[(0, 5, 5)] = numpy.sum(((- x58) * (((x162 + (x174 * x261)) + x260) + x262))) result[(1, 0, 0)] = numpy.sum((x58 * ((((x2 * x79) + (x4 * x88)) - (x47 * x89)) - (x47 * ((x263 * x4) + x89))))) result[(1, 0, 1)] = numpy.sum(((- x64) * (((x101 * x99) + (x101 * ((x264 * x39) + x99))) + x106))) result[(1, 0, 2)] = numpy.sum(((- x175) * (((x0 * x79) + (x0 * ((x263 * x39) + x79))) + x109))) result[(1, 0, 3)] = numpy.sum(((- x266) * (((x113 * x66) + x118) + (x66 * ((x0 * x265) + x113))))) result[(1, 0, 4)] = numpy.sum(((- x175) * (((x0 * x98) + (x0 * (x264 + x98))) + x121))) result[(1, 0, 5)] = numpy.sum(((- x203) * (((x108 * x66) + (x66 * (x263 + x77))) + x84))) result[(1, 1, 0)] = numpy.sum(((- x270) * (x188 + x269))) result[(1, 1, 1)] = numpy.sum((x57 * ((- x195) + (x268 * x272)))) result[(1, 1, 2)] = numpy.sum(((- x110) * (x197 + x269))) result[(1, 1, 3)] = numpy.sum((x64 * ((((x0 * x274) + x104) - x198) - x200))) result[(1, 1, 4)] = numpy.sum((x110 * ((((x0 * x276) - x191) - x201) + x87))) result[(1, 1, 5)] = numpy.sum(((- x123) * ((x0 * x267) + x202))) result[(1, 2, 0)] = numpy.sum(((- x212) * (x209 + (x268 * x278)))) result[(1, 2, 1)] = numpy.sum(((- x57) * (x221 + (x268 * x280)))) result[(1, 2, 2)] = numpy.sum(((- x57) * (x230 + (x268 * x282)))) result[(1, 2, 3)] = numpy.sum(((- x233) * ((x0 * x284) + x232))) result[(1, 2, 4)] = numpy.sum(((- x57) * ((x101 * x286) + x235))) result[(1, 2, 5)] = numpy.sum(((- x238) * ((x0 * x288) + x237))) result[(1, 3, 0)] = numpy.sum(((- x291) * x292)) result[(1, 3, 1)] = numpy.sum(((0.5 * x270) * ((((((- 3.0) * x180) * x2) + ((2.0 * x193) * x55)) + ((2.0 * x272) * x55)) + (2.0 * x293)))) result[(1, 3, 2)] = numpy.sum((((- x175) * x291) * x4)) result[(1, 3, 3)] = numpy.sum(((- x58) * ((((x193 * x48) - (x273 * x55)) - (x274 * x55)) + (x294 * (((3.0 * x180) * x2) - (2.0 * x293)))))) result[(1, 3, 4)] = numpy.sum((x175 * ((((x179 * x48) + (x276 * x55)) - (x290 * x9)) + (x295 * x55)))) result[(1, 3, 5)] = numpy.sum(((- x203) * x291)) result[(1, 4, 0)] = numpy.sum(((- x298) * (((x176 * x278) + x296) + x297))) result[(1, 4, 1)] = numpy.sum((x210 * (((x193 * x56) - (x280 * x55)) + x302))) result[(1, 4, 2)] = numpy.sum(((- x210) * ((x196 + (x282 * x55)) + x305))) result[(1, 4, 3)] = numpy.sum((x64 * (((((- x199) * x284) + x219) + (x273 * x56)) + (x302 * x9)))) result[(1, 4, 4)] = numpy.sum((x57 * ((((x12 * x295) * x56) - (x190 * x286)) - x307))) result[(1, 4, 5)] = numpy.sum(((- x175) * (((x176 * x288) + x181) + x305))) result[(1, 5, 0)] = numpy.sum(((- x292) * ((x239 * x278) + x309))) result[(1, 5, 1)] = numpy.sum(((- x270) * ((x280 * x56) + x311))) result[(1, 5, 2)] = numpy.sum(((- x270) * (((x227 * x56) + (x282 * x56)) + x313))) result[(1, 5, 3)] = numpy.sum(((- x266) * ((x239 * x284) + x314))) result[(1, 5, 4)] = numpy.sum(((- x64) * ((x214 * x286) + x315))) result[(1, 5, 5)] = numpy.sum(((- x58) * ((((x261 * x288) + x306) + x316) + x317))) result[(2, 0, 0)] = numpy.sum((x58 * ((((x135 * x2) + (x143 * x4)) - (x144 * x47)) - (x47 * (x144 + (x318 * x4)))))) result[(2, 0, 1)] = numpy.sum(((- x119) * (((x0 * x135) + (x0 * (x135 + (x318 * x39)))) + x149))) result[(2, 0, 2)] = numpy.sum(((- x64) * (((x101 * x158) + (x101 * (x158 + (x319 * x39)))) + x163))) result[(2, 0, 3)] = numpy.sum(((- x259) * ((x139 + (x148 * x66)) + (x66 * (x133 + x318))))) result[(2, 0, 4)] = numpy.sum(((- x119) * ((((x0 * x157) + (x0 * (x157 + x319))) + x170) + x171))) result[(2, 0, 5)] = numpy.sum(((- x321) * (((x117 + x171) + (x173 * x66)) + (x66 * ((x0 * x320) + x173))))) result[(2, 1, 0)] = numpy.sum(((- x212) * (((x144 * x55) + x208) + (x268 * x323)))) result[(2, 1, 1)] = numpy.sum(((- x57) * (((x135 * x213) + x220) + (x268 * x324)))) result[(2, 1, 2)] = numpy.sum(((- x57) * (((x158 * x190) + x229) + (x268 * x326)))) result[(2, 1, 3)] = numpy.sum(((- x233) * (((x0 * x327) + (x148 * x213)) + x231))) result[(2, 1, 4)] = numpy.sum(((- x57) * (((x101 * x328) + (x213 * ((x148 * x151) - (x2 * x60)))) + x234))) result[(2, 1, 5)] = numpy.sum(((- x238) * (((x0 * x329) + (x173 * x55)) + x236))) result[(2, 2, 0)] = numpy.sum(((- x270) * (x251 + x331))) result[(2, 2, 1)] = numpy.sum(((- x150) * (x253 + x331))) result[(2, 2, 2)] = numpy.sum(((- x57) * (x257 + (x268 * x333)))) result[(2, 2, 3)] = numpy.sum((x167 * ((x0 * x334) - x258))) result[(2, 2, 4)] = numpy.sum((x150 * ((((x0 * x332) - (x12 * x249)) + x142) - (x157 * x56)))) result[(2, 2, 5)] = numpy.sum((x64 * ((((x0 * x336) + x161) - x260) - x262))) result[(2, 3, 0)] = numpy.sum(((- x292) * (((x176 * x323) - (x176 * ((x126 * x68) - (x130 * x289)))) + x297))) result[(2, 3, 1)] = numpy.sum((x270 * (((((- x218) * x55) + x300) - x312) - (x324 * x55)))) result[(2, 3, 2)] = numpy.sum(((- x270) * (((x196 + (x227 * x55)) + x304) + (x326 * x55)))) result[(2, 3, 3)] = numpy.sum((x58 * (((((- x199) * x283) - (x199 * x327)) + (x2 * x218)) + (x302 * x9)))) result[(2, 3, 4)] = numpy.sum(((- x64) * (((x190 * x285) + (x190 * x328)) + x307))) result[(2, 3, 5)] = numpy.sum(((- x321) * ((((x176 * x287) + (x176 * x329)) + x181) + x304))) result[(2, 4, 0)] = numpy.sum((x298 * ((- x309) + x337))) result[(2, 4, 1)] = numpy.sum((x210 * ((- x311) + (x337 * x9)))) result[(2, 4, 2)] = numpy.sum((x210 * (((((- x12) * x308) - (x227 * x56)) + x301) + (x332 * x55)))) result[(2, 4, 3)] = numpy.sum(((- x119) * ((x213 * x330) + x314))) result[(2, 4, 4)] = numpy.sum(((- x57) * ((x213 * x333) + x315))) result[(2, 4, 5)] = numpy.sum((x64 * (((x228 - x316) - x317) + (x336 * x55)))) result[(2, 5, 0)] = numpy.sum(((- x292) * x339)) result[(2, 5, 1)] = numpy.sum((((- x119) * x339) * x4)) result[(2, 5, 2)] = numpy.sum(((- x270) * x341)) result[(2, 5, 3)] = numpy.sum(((- x259) * x339)) result[(2, 5, 4)] = numpy.sum(((- x119) * x341)) result[(2, 5, 5)] = numpy.sum((((- 0.5) * x58) * ((((x12 * (((3.0 * x2) * x243) - (2.0 * x340))) + ((2.0 * x255) * x48)) - ((2.0 * x335) * x56)) - ((2.0 * x336) * x56)))) return result
class FormCommand(Command): def _setup(self): form_class = self._form_class() self.form = form_class(self.input_data) def _form_class(self): return self.Form def _check_if_valid(self): return self._check_if_form_valid() def _check_if_form_valid(self): result = self.form.is_valid() self._append_form_errors() self.cleaned_data = self.form.cleaned_data return result def _append_form_errors(self): if self.form.errors: for (field_name, messages) in self.form.errors.items(): for message in messages: self.add_error(field_name, message)
_view(['POST']) _classes([AllowAny]) def run2(request): url = ((((settings.ML_ROOT_URL + 'run/') + request.POST.get('username')) + '_') + request.POST.get('model_name')) file = request.FILES['file'] response = requests.post(url, files={'file': file}, data={'columns': request.data['columns']}) return Response(response.text, status=response.status_code)
class _IdentityGraph(DiGraph): def __init__(self, function_parameters: List[Variable]): super().__init__() self.function_parameters: Set[str] = {arg.name for arg in function_parameters} self.no_identity_of: Dict[(Variable, Set[Variable])] = dict() def add_assignment(self, assignment: Assignment, basic_block: BasicBlock) -> None: if ((not isinstance((defined_value := assignment.destination), Variable)) or isinstance(defined_value, GlobalVariable)): return required_values = self._get_variables_utilized_for_direct_assignment(assignment) self.add_node(defined_value, definition=assignment, block=basic_block, is_phi=isinstance(assignment, Phi)) for required_value in required_values: if isinstance(required_value, UnknownExpression): continue self.add_edge(defined_value, required_value) def initialize_no_identity_of(self, assignment: Assignment): if (isinstance(assignment.value, Variable) or isinstance(assignment, Phi)): no_identity_of_def = set() else: no_identity_of_def = set(assignment.requirements) for definition in assignment.definitions: self.no_identity_of[definition] = no_identity_of_def def prune_non_identity_phi_functions(self, consider_undefined_variables: bool=False): for weakly_cc in list(weakly_connected_components(self)): (phi_nodes, out_degree_zero_nodes) = self._parse_weakly_connected_component(weakly_cc, consider_undefined_variables) if (len(out_degree_zero_nodes) <= 1): continue for leaf in out_degree_zero_nodes: self.add_edge(leaf, 'leaf') for phi in phi_nodes: try: if ((self.out_degree(phi) <= 1) or (len(list(node_disjoint_paths(self, phi, 'leaf', cutoff=2))) <= 1)): continue except NetworkXNoPath: continue for node in list(self.successors(phi)): self.remove_edge(phi, node) self.add_edge(phi, 'leaf') self.remove_node('leaf') def _parse_weakly_connected_component(self, connected_component: Set[Variable], consider_undefined_variables: bool=False) -> Tuple[(List[Variable], List[Union[(Variable, Constant)]])]: has_conflict = (consider_undefined_variables and self._has_conflict(connected_component)) phi_nodes: List[Variable] = list() out_degree_zero_nodes: List[Union[(Variable, Constant)]] = list() for node in connected_component: if self.nodes[node].get('is_phi', False): phi_nodes.append(node) if ((self.out_degree(node) == 0) and (has_conflict or self._is_defining_value(node))): out_degree_zero_nodes.append(node) return (phi_nodes, out_degree_zero_nodes) def _has_conflict(self, connected_component: Set[Variable]) -> bool: for variable in [var for var in connected_component if (var in self.no_identity_of)]: if (self.no_identity_of[variable] & connected_component): return True return False def _is_defining_value(self, expression: Union[(Variable, Constant)]) -> bool: return ((not isinstance(expression, UnknownExpression)) and (isinstance(expression, Constant) or (expression in self.no_identity_of) or (expression.name in self.function_parameters))) def yield_identities(self) -> Iterator[Set[Variable]]: self.prune_non_identity_phi_functions(False) self.prune_non_identity_phi_functions(True) for identity_candidates in weakly_connected_components(self): if (len(identity_candidates) > 1): (yield identity_candidates) def _get_variables_utilized_for_direct_assignment(self, assignment: Assignment) -> Set[Union[(Constant, Variable)]]: defined_variable: Variable = assignment.destination required_variables = set() if isinstance(assignment, Phi): required_variables = self._required_variables_for_phi_function(assignment, defined_variable) elif (isinstance((required_variable := assignment.value), Variable) and self._is_required_variable_for_assignment(required_variable, defined_variable)): required_variables = {required_variable} return required_variables def _required_variables_for_phi_function(self, phi_function: Phi, defined_variable: Variable) -> Set[Union[(Constant, Variable)]]: if ((defined_variable.is_aliased is True) and self._not_all_variables_have_same_name(phi_function)): return set() if ((defined_variable.is_aliased is False) and self._is_aliased_variable_in(phi_function.value)): return set() return set(phi_function.value) def _is_required_variable_for_assignment(required_variable: Variable, defined_variable: Variable) -> bool: return (((defined_variable.is_aliased is False) and (required_variable.is_aliased is False)) or (required_variable.name == defined_variable.name)) def _not_all_variables_have_same_name(assignment: Assignment) -> bool: defined_variable = assignment.destination required_values = assignment.value return any((((not isinstance(required_value, UnknownExpression)) and (isinstance(required_value, Constant) or (required_value.name != defined_variable.name))) for required_value in required_values)) def _is_aliased_variable_in(required_values: List[Union[(Constant, Variable)]]) -> bool: return (not all((((not isinstance(required_variable, UnknownExpression)) and (isinstance(required_variable, Constant) or (required_variable.is_aliased is False))) for required_variable in required_values))) def find_replacement_variable_of_group(self, identity_group: Set[Variable]) -> Variable: replacement_variable = None optional_variable = None for variable in identity_group: if self.out_degree(variable): continue if (not self._is_defining_value(variable)): optional_variable = variable continue if (replacement_variable is None): replacement_variable = variable else: message = f'At least two variables in the identity group {identity_group} have out degree zero, namely {replacement_variable} and {variable}, i.e., these set of vertices is not an identity group' error(message) raise ValueError(message) if replacement_variable: return replacement_variable elif optional_variable: return optional_variable else: message = f'No variable in the identity group {identity_group} has out degree zero, i.e., these set of vertices has no initialdefinition.' error(message) raise ValueError(message)
def create_test_firmware(device_class='Router', device_name='test_router', vendor='test_vendor', bin_path='container/test.zip', all_files_included_set=False, version='0.1'): fw = Firmware(file_path=os.path.join(get_test_data_dir(), bin_path)) fw.device_class = device_class fw.device_name = device_name fw.vendor = vendor fw.tags = {'test_tag': TagColor.GRAY} fw.release_date = '1970-01-01' fw.version = version processed_analysis = {'dummy': {'summary': ['sum a', 'fw exclusive sum a'], 'result': {'content': 'abcd'}, 'plugin_version': '0', 'analysis_date': 0.0}, 'unpacker': {'result': {'plugin_used': 'used_unpack_plugin'}, 'plugin_version': '1.0', 'analysis_date': 0.0}, 'file_type': {'result': {'mime': 'test_type', 'full': 'Not a PE file'}, 'summary': ['a summary'], 'plugin_version': '1.0', 'analysis_date': 0.0}} fw.processed_analysis.update(processed_analysis) if all_files_included_set: fw.list_of_all_included_files = list(fw.files_included) fw.list_of_all_included_files.append(fw.uid) return fw
class TestCommandHandler(AsyncTestCase): def setUp(self) -> None: super().setUp() self.mock_options = {} self._mocks = MockService(self.mock_options, loop=self._loop) self.stats_mgr = Mock() self.cmd_handler = CommandHandler(self._mocks) def tearDown(self) -> None: self._mocks.tearDown() super().tearDown() def mock_device(self, name: str, console: str='', command_prompts: typing.Optional[typing.Dict[(str, str)]]=None) -> Mock: return Mock(hostname=name, console=console, command_prompts=command_prompts, pre_setup_commands=[], clear_command=None) _test async def test_run_success(self) -> None: device = self.mock_device('test-dev-1') result = (await self.cmd_handler.run('show version\n', device, 5, 5, client_ip, client_port, uuid)) self.assertEqual(result.status, 'success') self.assertEqual(result.output, '$ show version\nMock response for show version') _test async def test_run_no_device(self) -> None: device = self.mock_device('test-dev-100') with self.assertRaises(ttypes.SessionException) as exc: (await self.cmd_handler.run('show version\n', device, 5, 5, client_ip, client_port, uuid)) self.assertEqual(exc.exception.code, ttypes.FcrErrorCode.LOOKUP_ERROR) self.assertIn("'Device not found', 'test-dev-100'", exc.exception.message) _test async def test_run_connect_timeout(self) -> None: device = self.mock_device('test-dev-2') self.mock_options['connect_drop'] = True with self.assertRaises(ttypes.SessionException) as exc: (await self.cmd_handler.run('show version\n', device, 0, 0, client_ip, client_port, uuid)) self.assertEqual(exc.exception.code, ttypes.FcrErrorCode.CONNECTION_TIMEOUT_ERROR) self.assertIn('Timeout during connection setup', exc.exception.message) _test async def test_run_command_timeout(self) -> None: device = self.mock_device('test-dev-2') with self.assertRaises(ttypes.SessionException) as exc: (await self.cmd_handler.run('command timeout\n', device, 0, 0, client_ip, client_port, uuid)) self.assertEqual(exc.exception.code, ttypes.FcrErrorCode.CONNECTION_TIMEOUT_ERROR) self.assertIn('Timeout during connection setup', exc.exception.message) _test async def test_run_success_user_prompt(self) -> None: command_prompts = {'user prompt test': '<<<User Magic Prompt>>>'} device = self.mock_device('test-dev-1', command_prompts=command_prompts) result = (await self.cmd_handler.run('show version\n', device, 5, 5, client_ip, client_port, uuid)) self.assertEqual(result.status, 'success') self.assertEqual(result.output, '$ show version\nMock response for show version') result = (await self.cmd_handler.run('user prompt test\n', device, 5, 5, client_ip, client_port, uuid)) self.assertEqual(result.status, 'success') self.assertEqual(result.output, '<<<User Magic Prompt>>> user prompt test\nTest for user prompts') _test async def test_run_success_user_prompt_failed(self) -> None: command_prompts = {'user prompt test': '<<<XX User Magic Prompt>>>'} device = self.mock_device('test-dev-1', command_prompts=command_prompts) result = (await self.cmd_handler.run('show version\n', device, 5, 5, client_ip, client_port, uuid)) self.assertEqual(result.status, 'success') self.assertEqual(result.output, '$ show version\nMock response for show version') with self.assertRaises(ttypes.SessionException) as exc: result = (await self.cmd_handler.run('user prompt test\n', device, 1, 1, client_ip, client_port, uuid)) self.assertEqual(exc.exception.code, ttypes.FcrErrorCode.COMMAND_EXECUTION_TIMEOUT_ERROR) self.assertIn("'Command Response Timeout', b'user prompt test\\nTest for user prompts\\n<<<User Magic Prompt>>>'", exc.exception.message) self.assertIn('Failed (session: MockCommandSession, peer: (test-ip, True, 22))', exc.exception.message) _test async def test_open_session(self) -> None: device = self.mock_device('test-dev-1') session = (await self.cmd_handler.open_session(device, 5, 5, client_ip, client_port, uuid)) self.assertIsNotNone(session) self.assertEqual(session.name, device.hostname) self.assertEqual(session.hostname, device.hostname) _test async def test_open_session_no_device(self) -> None: device = self.mock_device('test-dev-10') with self.assertRaises(ttypes.SessionException) as exc: (await self.cmd_handler.open_session(device, 0.01, 0.01, client_ip, client_port, uuid)) self.assertEqual(exc.exception.code, ttypes.FcrErrorCode.LOOKUP_ERROR) self.assertIn('open_session failed', exc.exception.message) self.assertIn('Device not found', exc.exception.message) self.assertIn('test-dev-10', exc.exception.message) _test async def test_open_session_timeout(self) -> None: device = self.mock_device('test-dev-2') self.mock_options['connect_drop'] = True with self.assertRaises(ttypes.SessionException) as exc: (await self.cmd_handler.open_session(device, 0.01, 0.01, client_ip, client_port, uuid)) self.assertEqual(exc.exception.code, ttypes.FcrErrorCode.CONNECTION_TIMEOUT_ERROR) self.assertIn('open_session failed', exc.exception.message) self.assertIn('Timeout during connection setup', exc.exception.message) _test async def test_run_session(self) -> None: device = self.mock_device('test-dev-1') session = (await self.cmd_handler.open_session(device, 5, 5, client_ip, client_port, uuid)) result = (await self.cmd_handler.run_session(session, 'show version\n', 5, client_ip, client_port, uuid)) self.assertEqual(result.status, 'success') self.assertEqual(result.output, '$ show version\nMock response for show version') _test async def test_run_session_invalid(self) -> None: session = Mock(id=1234) with self.assertRaises(ttypes.SessionException) as exc: (await self.cmd_handler.run_session(session, 'show version\n', 5, client_ip, client_port, uuid)) self.assertEqual(exc.exception.code, ttypes.FcrErrorCode.LOOKUP_ERROR) self.assertIn('run_session failed', exc.exception.message) self.assertIn("'Session not found', (1234, '127.0.0.1', 5000)", exc.exception.message) _test async def test_run_session_command_timeout(self) -> None: device = self.mock_device('test-dev-1') session = (await self.cmd_handler.open_session(device, 5, 5, client_ip, client_port, uuid)) with self.assertRaises(ttypes.SessionException) as exc: (await self.cmd_handler.run_session(session, 'command timeout\n', 1, client_ip, client_port, uuid)) self.assertEqual(exc.exception.code, ttypes.FcrErrorCode.COMMAND_EXECUTION_TIMEOUT_ERROR) self.assertIn('run_session failed', exc.exception.message) self.assertIn('Command Response Timeout', exc.exception.message) self.assertIn('command timeout\\nMock response for command timeout', exc.exception.message) _test async def test_close_session(self) -> None: device = self.mock_device('test-dev-1') session = (await self.cmd_handler.open_session(device, 5, 5, client_ip, client_port, uuid)) (await self.cmd_handler.close_session(session, client_ip, client_port, uuid)) _test async def test_close_session_invalid(self) -> None: session = Mock(id=1234) with self.assertRaises(ttypes.SessionException) as exc: (await self.cmd_handler.close_session(session, client_ip, client_port, uuid)) self.assertEqual(exc.exception.code, ttypes.FcrErrorCode.LOOKUP_ERROR) self.assertIn('close_session failed', exc.exception.message) self.assertIn("'Session not found', (1234, '127.0.0.1', 5000)", exc.exception.message) _test async def test_bulk_run_local(self) -> None: devices = [('test-dev-%d' % i) for i in range(1, 5)] commands = {self.mock_device(name): ['show version\n'] for name in devices} all_results = (await self.cmd_handler.bulk_run_local(commands, 1, 1, client_ip, client_port, uuid)) for host in devices: for result in all_results[host]: self.assert_command_result(result) _test async def test_bulk_run_local_with_invalid_devices(self) -> None: devices = [('test-dev-%d' % i) for i in range(0, 5)] commands = {self.mock_device(name): ['show version\n'] for name in devices} all_results = (await self.cmd_handler.bulk_run_local(commands, 1, 1, client_ip, client_port, uuid)) for host in devices: if (host == 'test-dev-0'): result = all_results[host][0] self.assertIn(('code=%s' % ttypes.FcrErrorCode.LOOKUP_ERROR), result.status) self.assertIn("'Device not found', 'test-dev-0'", result.status) continue for result in all_results[host]: self.assert_command_result(result) Option.config.lb_threshold = 20 _test async def test_bulk_run_local_with_command_timeout(self) -> None: devices = [('test-dev-%d' % i) for i in range(0, 5)] commands = {self.mock_device(name): ['show version\n'] for name in devices} onehost = next(iter(commands)) commands[onehost] = ['command timeout\n'] all_results = (await self.cmd_handler.bulk_run_local(commands, 1, 1, client_ip, client_port, uuid)) for host in devices: if (host == 'test-dev-0'): result = all_results[host][0] self.assertIn(('code=%s' % ttypes.FcrErrorCode.LOOKUP_ERROR), result.status) self.assertIn("'Device not found', 'test-dev-0'", result.status) continue for result in all_results[host]: self.assert_command_result(result) _test async def test_bulk_run_local_with_connect_timeout(self) -> None: devices = [('test-dev-%d' % i) for i in range(0, 2)] commands = {self.mock_device(name): ['show version\n'] for name in devices} onehost = next(iter(commands)) commands[onehost] = ['command timeout\n'] self.mock_options['connect_drop'] = True all_results = (await self.cmd_handler.bulk_run_local(commands, 1, 1, client_ip, client_port, uuid)) for host in devices: if (host == 'test-dev-0'): result = all_results[host][0] self.assertIn(('code=%s' % ttypes.FcrErrorCode.LOOKUP_ERROR), result.status) self.assertIn("'Device not found', 'test-dev-0'", result.status) continue for result in all_results[host]: self.assertIn(('code=%s' % ttypes.FcrErrorCode.CONNECTION_TIMEOUT_ERROR), result.status) _test async def test_bulk_run_local_overload(self) -> None: devices = [('test-dev-%d' % i) for i in range(1, 5)] commands = {self.mock_device(name): ['show version\n'] for name in devices} Option.config.bulk_session_limit = 4 CommandHandler._bulk_session_count = 4 with self.assertRaises(ttypes.InstanceOverloaded) as exc: (await self.cmd_handler.bulk_run_local(commands, 1, 1, client_ip, client_port, uuid)) self.assertIn('Too many session open: 4', exc.exception.message) _test async def test_bulk_run_load_balance(self) -> None: Option.config.lb_threshold = 2 device_names = {('test-dev-%d' % i) for i in range(0, 10)} commands = {self.mock_device(name): ['show version\n'] for name in device_names} command_chunks = [] async def _bulk_run_remote(chunk, *args): command_chunks.append(chunk) return {dev: ('%s: Success' % dev.hostname) for dev in chunk.keys()} self.cmd_handler._bulk_run_remote = _bulk_run_remote all_results = (await self.cmd_handler.bulk_run(commands, 10, 10, client_ip, client_port, uuid)) self.assertEqual(len(command_chunks), 5, 'Commands are run in chunks') devices = set(commands.keys()) res_devices = set(all_results.keys()) self.assertEqual(res_devices, devices, 'Responses are received for all devices') for (dev, resp) in all_results.items(): self.assertEqual(resp, ('%s: Success' % dev.hostname), 'Correct response is received') _test async def test_bulk_run_below_threshold(self) -> None: Option.config.lb_threshold = 20 device_names = {('test-dev-%d' % i) for i in range(0, 10)} commands = {self.mock_device(name): ['show version\n'] for name in device_names} command_chunks = [] local_commands = [] async def _bulk_run_remote(chunk, *args): command_chunks.append(chunk) return {dev: ('%s: Success' % dev.hostname) for dev in chunk.keys()} async def _bulk_run_local(chunk, *args): local_commands.append(chunk) return {dev: ('%s: Success' % dev.hostname) for dev in chunk.keys()} self.cmd_handler._bulk_run_remote = _bulk_run_remote self.cmd_handler._bulk_run_local = _bulk_run_local all_results = (await self.cmd_handler.bulk_run(commands, 10, 10, client_ip, client_port, uuid)) self.assertEqual(len(command_chunks), 0, 'Commands are not run in chunks') self.assertEqual(len(local_commands), 1, 'Commands are run locally') self.assertEqual(len(local_commands[0]), 10, 'Commands are run locally') devices = set(commands.keys()) res_devices = set(all_results.keys()) self.assertEqual(res_devices, devices, 'Responses are received for all devices') for (dev, resp) in all_results.items(): self.assertEqual(resp, ('%s: Success' % dev.hostname), 'Correct response is received') def assert_command_result(self, result: ttypes.CommandResult) -> None: if (result.command == 'show version\n'): self.assertEqual(result.status, 'success') self.assertEqual(result.output, '$ show version\nMock response for show version') elif (result.command == 'command timeout\n'): status_fmt = "'{0}', b'{2}\\nMock response for {2}'" self.assertIn(status_fmt.format('Command Response Timeout', 'command timeout'), result.status) self.assertIn('Failed (session: MockCommandSession, peer: (test-ip 22))', result.status) self.assertIn(('code=%s' % ttypes.FcrErrorCode.COMMAND_EXECUTION_TIMEOUT_ERROR), result.status) else: self.fail(('unexpected result: %r' % result))
def apply_info_from_latest_apk(apps, apks): for (appid, app) in apps.items(): bestver = UNSET_VERSION_CODE for apk in apks: if (apk['packageName'] == appid): if (apk['versionCode'] > bestver): bestver = apk['versionCode'] bestapk = apk if (not app['added']): logging.debug((("Don't know when " + appid) + ' was added')) if (not app['lastUpdated']): logging.debug((("Don't know when " + appid) + ' was last updated')) if (bestver == UNSET_VERSION_CODE): app['icon'] = None logging.debug((('Application ' + appid) + ' has no packages')) else: app.icon = (bestapk['icon'] if ('icon' in bestapk) else None) if (app.get('CurrentVersionCode') is None): app['CurrentVersionCode'] = bestver
.parametrize('pattern,input,match', [('a*c', 'abc', True), ('a*c', 'abcd', False), ('a*c*', 'abcd', True), ('a.c', 'abc', False), ('a?c', 'abc', False)]) def test_starmatch_to_regex(pattern, input, match): re_pattern = starmatch_to_regex(pattern) assert (bool(re_pattern.match(input)) is match), re_pattern.pattern
.parametrize('typ,_131,_132', [(str, '131', '132'), (int, 131, 132), (float, 131.0, 132.0)]) def test_aliases_grib_paramid_mutiple_true(typ, _131, _132): aliases_grib_paramid = normalize('x', type=typ, aliases={'u': typ(131), 'v': typ(132)}, multiple=True)(func_x) assert (aliases_grib_paramid('u') == [_131]) assert (aliases_grib_paramid(131) == [_131]) assert (aliases_grib_paramid('131') == [_131]) assert (aliases_grib_paramid(('131',)) == [_131]) assert (aliases_grib_paramid(['131']) == [_131]) assert (aliases_grib_paramid(['131', 'v']) == [_131, _132]) assert (aliases_grib_paramid([131, 'v']) == [_131, _132]) assert (aliases_grib_paramid(['u', 'v']) == [_131, _132]) assert (aliases_grib_paramid(('u', 'v')) == [_131, _132]) assert (aliases_grib_paramid([]) == []) assert (aliases_grib_paramid(tuple([])) == [])
class SpotifyPersonalisation(SpotifyBase): ([scope.user_top_read]) _and_process(single(FullArtistOffsetPaging)) _limit(50) def current_user_top_artists(self, time_range: str='medium_term', limit: int=20, offset: int=0) -> FullArtistOffsetPaging: return self._get('me/top/artists', time_range=time_range, limit=limit, offset=offset) ([scope.user_top_read]) _and_process(single(FullTrackPaging)) _limit(50) def current_user_top_tracks(self, time_range: str='medium_term', limit: int=20, offset: int=0) -> FullTrackPaging: return self._get('me/top/tracks', time_range=time_range, limit=limit, offset=offset)
def test_get_zone_tops_one_well_w_undef(testpath): single = xtgeo.well_from_file((testpath / WFILES1), zonelogname='Zonelog') wlist = [single] p1 = xtgeo.points_from_wells(wlist, use_undef=True) p2 = xtgeo.points_from_wells(wlist, use_undef=True) p3 = xtgeo.points_from_wells(wlist, use_undef=False) assert p1.dataframe.equals(p2.dataframe) assert (p2.dataframe['Zone'][0] == 0) assert (p3.dataframe['Zone'][0] == 1)
def init_process(rank, world_size, aggregator, models, file_loc, pipe, distributed_op): FLDistributedUtils.dist_init(rank=rank, world_size=world_size, init_method=f'file://{file_loc}', use_cuda=False) aggregator.zero_weights() for (i, m) in enumerate(models): if ((i % world_size) == rank): weight = (i + 1) aggregator.apply_weight_to_update(delta=m, weight=weight) aggregator.add_update(m, weight=weight) module = aggregator.aggregate(distributed_op) (sums, weights) = (0.0, 0.0) all_sum = [(p.sum(), p.numel()) for p in module.parameters()] for (s, w) in all_sum: sums += float(s) weights += float(w) pipe.send((sums / weights)) dist.destroy_process_group()
def _glib_wait_inner(timeout, glib_timeout_func): id_by_obj = weakref.WeakKeyDictionary() def waiter(function): callargs = inspect.getfullargspec(function) if ((len(callargs.args) == 0) or (callargs.args[0] != 'self')): raise RuntimeError('Must only use glib_wait* on instance methods!') def thunk(*args, **kwargs): id_by_obj[args[0]] = None if function(*args, **kwargs): delayer(*args, **kwargs) def delayer(*args, **kwargs): self = args[0] srcid = id_by_obj.get(self) if srcid: GLib.source_remove(srcid) id_by_obj[self] = glib_timeout_func(timeout, thunk, *args, **kwargs) return delayer return waiter
def create_frontmatter(path: Path, nb_metadata: Dict[(str, Dict[(str, str)])]) -> str: frontmatter_delimiter = ['---'] frontmatter = [f'{key}: {value}' for (key, value) in nb_metadata.get(path.stem, {'title': '', 'sidebar_label': '', 'path': '', 'nb_path': '', 'github': '', 'colab': ''}).items()] frontmatter = '\n'.join(((frontmatter_delimiter + frontmatter) + frontmatter_delimiter)) mdx = mdformat.text(frontmatter, options={'wrap': 88}, extensions={'myst'}) return f'''{mdx} '''
class EventNode(DjangoObjectType): url = graphene.String() class Meta(): model = Event filter_fields = ['slug', 'title'] def resolve_url(self, info, **kwargs): return (((((('/locations/' + self.location.slug) + '/events/') + str(self.id)) + '/') + self.slug) + '/')
class Migration(migrations.Migration): dependencies = [('blog', '0005_blogpost_convert_richtextfield_to_streamfield')] operations = [migrations.AlterField(model_name='blogpost', name='body', field=wagtail.core.fields.StreamField([('heading', wagtail.core.blocks.CharBlock()), ('rich_text', wagtail.core.blocks.RichTextBlock(features=['bold', 'italic', 'h2', 'h3', 'h4', 'ol', 'ul', 'hr', 'embed', 'link', 'document-link', 'image', 'code'])), ('image', wagtail.images.blocks.ImageChooserBlock())]))]
.parametrize('filename', _test_files()) def test_file(filename): with open(filename, 'rb') as fo: reader = fastavro.reader(fo) assert hasattr(reader, 'writer_schema'), 'no schema on file' if (basename(filename) in NO_DATA): return records = list(reader) assert (len(records) > 0), 'no records found' new_file = BytesIO() fastavro.writer(new_file, reader.writer_schema, records, reader.codec) new_file_bytes = new_file.getvalue() new_file = NoSeekBytesIO(new_file_bytes) new_reader = fastavro.reader(new_file) assert hasattr(new_reader, 'writer_schema'), "schema wasn't written" assert (new_reader.writer_schema == remove_legacy_fields(copy.deepcopy(reader.writer_schema))) assert (new_reader.codec == reader.codec) new_records = list(new_reader) assert (new_records == records) new_file = NoSeekBytesIO(new_file_bytes) schema_migration_reader = fastavro.reader(new_file, reader.writer_schema) assert (schema_migration_reader.reader_schema == reader.writer_schema) new_records = list(schema_migration_reader) assert (new_records == records)
class RMTTestRecordTxt3(object): def rmttest_pos_01(self): mstderr = StringIO() init_logger(mstderr) txt_doc = TxtRecord.from_string(tc1i, u'rmtoo', TxtIOConfig()) d = txt_doc.get_dict() assert ("The world needs a good, usable and free Requirements Management Tool.\\par It looks that there are no such programs out.\\par But: it's complex!" == d['Rationale'].get_content()) assert (11 == len(txt_doc)) assert ('' == txt_doc.get_comment()) tear_down_log_handler()
class Post(AbstractCrudObject): def __init__(self, fbid=None, parent_id=None, api=None): self._isPost = True super(Post, self).__init__(fbid, parent_id, api) class Field(AbstractObject.Field): actions = 'actions' admin_creator = 'admin_creator' allowed_advertising_objectives = 'allowed_advertising_objectives' application = 'application' backdated_time = 'backdated_time' call_to_action = 'call_to_action' can_reply_privately = 'can_reply_privately' caption = 'caption' child_attachments = 'child_attachments' comments_mirroring_domain = 'comments_mirroring_domain' coordinates = 'coordinates' created_time = 'created_time' description = 'description' event = 'event' expanded_height = 'expanded_height' expanded_width = 'expanded_width' feed_targeting = 'feed_targeting' field_from = 'from' full_picture = 'full_picture' height = 'height' icon = 'icon' id = 'id' instagram_eligibility = 'instagram_eligibility' is_app_share = 'is_app_share' is_eligible_for_promotion = 'is_eligible_for_promotion' is_expired = 'is_expired' is_hidden = 'is_hidden' is_inline_created = 'is_inline_created' is_instagram_eligible = 'is_instagram_eligible' is_popular = 'is_popular' is_published = 'is_published' is_spherical = 'is_spherical' link = 'link' message = 'message' message_tags = 'message_tags' multi_share_end_card = 'multi_share_end_card' multi_share_optimized = 'multi_share_optimized' name = 'name' object_id = 'object_id' parent_id = 'parent_id' permalink_url = 'permalink_url' picture = 'picture' place = 'place' privacy = 'privacy' promotable_id = 'promotable_id' promotion_status = 'promotion_status' properties = 'properties' scheduled_publish_time = 'scheduled_publish_time' shares = 'shares' source = 'source' status_type = 'status_type' story = 'story' story_tags = 'story_tags' subscribed = 'subscribed' target = 'target' targeting = 'targeting' timeline_visibility = 'timeline_visibility' type = 'type' updated_time = 'updated_time' via = 'via' video_buying_eligibility = 'video_buying_eligibility' width = 'width' class BackdatedTimeGranularity(): day = 'day' hour = 'hour' min = 'min' month = 'month' none = 'none' year = 'year' class Formatting(): markdown = 'MARKDOWN' plaintext = 'PLAINTEXT' class PlaceAttachmentSetting(): value_1 = '1' value_2 = '2' class PostSurfacesBlacklist(): value_1 = '1' value_2 = '2' value_3 = '3' value_4 = '4' value_5 = '5' class PostingToRedspace(): disabled = 'disabled' enabled = 'enabled' class TargetSurface(): story = 'STORY' timeline = 'TIMELINE' class UnpublishedContentType(): ads_post = 'ADS_POST' draft = 'DRAFT' inline_created = 'INLINE_CREATED' published = 'PUBLISHED' reviewable_branded_content = 'REVIEWABLE_BRANDED_CONTENT' scheduled = 'SCHEDULED' scheduled_recurring = 'SCHEDULED_RECURRING' class FeedStoryVisibility(): hidden = 'hidden' visible = 'visible' class TimelineVisibility(): forced_allow = 'forced_allow' hidden = 'hidden' normal = 'normal' def api_delete(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Post, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def api_update(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {'attached_media': 'list<Object>', 'backdated_time': 'datetime', 'backdated_time_granularity': 'backdated_time_granularity_enum', 'composer_session_id': 'string', 'direct_share_status': 'unsigned int', 'explicitly_added_mentionee_ids': 'list<unsigned int>', 'feed_story_visibility': 'feed_story_visibility_enum', 'is_explicit_location': 'bool', 'is_hidden': 'bool', 'is_pinned': 'bool', 'is_published': 'bool', 'message': 'string', 'og_action_type_id': 'string', 'og_hide_object_attachment': 'bool', 'og_icon_id': 'string', 'og_object_id': 'string', 'og_phrase': 'string', 'og_set_profile_badge': 'bool', 'og_suggestion_mechanism': 'string', 'place': 'Object', 'privacy': 'string', 'product_item': 'Object', 'scheduled_publish_time': 'unsigned int', 'should_sync_product_edit': 'bool', 'source_type': 'string', 'sponsor_id': 'string', 'sponsor_relationship': 'unsigned int', 'tags': 'list<int>', 'text_format_preset_id': 'string', 'timeline_visibility': 'timeline_visibility_enum', 'tracking': 'string'} enums = {'backdated_time_granularity_enum': Post.BackdatedTimeGranularity.__dict__.values(), 'feed_story_visibility_enum': Post.FeedStoryVisibility.__dict__.values(), 'timeline_visibility_enum': Post.TimelineVisibility.__dict__.values()} request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Post, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_attachments(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/attachments', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_comments(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') from facebook_business.adobjects.comment import Comment param_types = {'filter': 'filter_enum', 'live_filter': 'live_filter_enum', 'order': 'order_enum', 'since': 'datetime'} enums = {'filter_enum': Comment.Filter.__dict__.values(), 'live_filter_enum': Comment.LiveFilter.__dict__.values(), 'order_enum': Comment.Order.__dict__.values()} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/comments', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Comment, api_type='EDGE', response_parser=ObjectParser(target_class=Comment, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def create_comment(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') from facebook_business.adobjects.comment import Comment param_types = {'attachment_id': 'string', 'attachment_share_url': 'string', 'attachment_url': 'string', 'comment': 'string', 'comment_privacy_value': 'comment_privacy_value_enum', 'feedback_source': 'string', 'message': 'string', 'nectar_module': 'string', 'parent_comment_id': 'Object', 'post_id': 'string', 'tracking': 'string'} enums = {'comment_privacy_value_enum': Comment.CommentPrivacyValue.__dict__.values()} request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/comments', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Comment, api_type='EDGE', response_parser=ObjectParser(target_class=Comment, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_dynamic_posts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') from facebook_business.adobjects.rtbdynamicpost import RTBDynamicPost param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/dynamic_posts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=RTBDynamicPost, api_type='EDGE', response_parser=ObjectParser(target_class=RTBDynamicPost, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_insights(self, fields=None, params=None, is_async=False, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') from facebook_business.adobjects.insightsresult import InsightsResult if is_async: return self.get_insights_async(fields, params, batch, success, failure, pending) param_types = {'date_preset': 'date_preset_enum', 'metric': 'list<Object>', 'period': 'period_enum', 'since': 'datetime', 'until': 'datetime'} enums = {'date_preset_enum': InsightsResult.DatePreset.__dict__.values(), 'period_enum': InsightsResult.Period.__dict__.values()} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/insights', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=InsightsResult, api_type='EDGE', response_parser=ObjectParser(target_class=InsightsResult, api=self._api), include_summary=False) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def delete_likes(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {'nectar_module': 'string', 'tracking': 'string'} enums = {} request = FacebookRequest(node_id=self['id'], method='DELETE', endpoint='/likes', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=AbstractCrudObject, api_type='EDGE', response_parser=ObjectParser(target_class=AbstractCrudObject, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def create_like(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {'feedback_source': 'string', 'nectar_module': 'string', 'tracking': 'string'} enums = {} request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/likes', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Post, api_type='EDGE', response_parser=ObjectParser(target_class=Post, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_reactions(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') from facebook_business.adobjects.profile import Profile param_types = {'type': 'type_enum'} enums = {'type_enum': Profile.Type.__dict__.values()} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/reactions', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Profile, api_type='EDGE', response_parser=ObjectParser(target_class=Profile, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_shared_posts(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/sharedposts', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Post, api_type='EDGE', response_parser=ObjectParser(target_class=Post, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_sponsor_tags(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') from facebook_business.adobjects.page import Page param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/sponsor_tags', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Page, api_type='EDGE', response_parser=ObjectParser(target_class=Page, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_to(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') from facebook_business.adobjects.profile import Profile param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/to', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Profile, api_type='EDGE', response_parser=ObjectParser(target_class=Profile, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() _field_types = {'actions': 'list', 'admin_creator': 'Object', 'allowed_advertising_objectives': 'list<string>', 'application': 'Application', 'backdated_time': 'datetime', 'call_to_action': 'Object', 'can_reply_privately': 'bool', 'caption': 'string', 'child_attachments': 'list', 'comments_mirroring_domain': 'string', 'coordinates': 'Object', 'created_time': 'datetime', 'description': 'string', 'event': 'Event', 'expanded_height': 'unsigned int', 'expanded_width': 'unsigned int', 'feed_targeting': 'Object', 'from': 'Object', 'full_picture': 'string', 'height': 'unsigned int', 'icon': 'string', 'id': 'string', 'instagram_eligibility': 'string', 'is_app_share': 'bool', 'is_eligible_for_promotion': 'bool', 'is_expired': 'bool', 'is_hidden': 'bool', 'is_inline_created': 'bool', 'is_instagram_eligible': 'bool', 'is_popular': 'bool', 'is_published': 'bool', 'is_spherical': 'bool', 'link': 'string', 'message': 'string', 'message_tags': 'list', 'multi_share_end_card': 'bool', 'multi_share_optimized': 'bool', 'name': 'string', 'object_id': 'string', 'parent_id': 'string', 'permalink_url': 'string', 'picture': 'string', 'place': 'Place', 'privacy': 'Privacy', 'promotable_id': 'string', 'promotion_status': 'string', 'properties': 'list', 'scheduled_publish_time': 'float', 'shares': 'Object', 'source': 'string', 'status_type': 'string', 'story': 'string', 'story_tags': 'list', 'subscribed': 'bool', 'target': 'Profile', 'targeting': 'Object', 'timeline_visibility': 'string', 'type': 'string', 'updated_time': 'datetime', 'via': 'Object', 'video_buying_eligibility': 'list<string>', 'width': 'unsigned int'} def _get_field_enum_info(cls): field_enum_info = {} field_enum_info['BackdatedTimeGranularity'] = Post.BackdatedTimeGranularity.__dict__.values() field_enum_info['Formatting'] = Post.Formatting.__dict__.values() field_enum_info['PlaceAttachmentSetting'] = Post.PlaceAttachmentSetting.__dict__.values() field_enum_info['PostSurfacesBlacklist'] = Post.PostSurfacesBlacklist.__dict__.values() field_enum_info['PostingToRedspace'] = Post.PostingToRedspace.__dict__.values() field_enum_info['TargetSurface'] = Post.TargetSurface.__dict__.values() field_enum_info['UnpublishedContentType'] = Post.UnpublishedContentType.__dict__.values() field_enum_info['FeedStoryVisibility'] = Post.FeedStoryVisibility.__dict__.values() field_enum_info['TimelineVisibility'] = Post.TimelineVisibility.__dict__.values() return field_enum_info
class ActivityContext(): client: 'WorkflowClient' = None activity_task: ActivityTask = None namespace: str = None do_not_complete: bool = False def get() -> 'ActivityContext': return current_activity_context.get() def set(context: Optional['ActivityContext']): current_activity_context.set(context) async def heartbeat(self, details: object): (await heartbeat(self.client, self.activity_task.task_token, details)) def get_heartbeat_details(self) -> object: details: Payloads = self.activity_task.heartbeat_details if (not self.activity_task.heartbeat_details): return None payloads: List[object] = self.client.data_converter.from_payloads(details) return payloads[0] def do_not_complete_on_return(self): self.do_not_complete = True
def awards_over_different_date_ranges(): award_category_list = ['contracts', 'direct_payments', 'grants', 'idvs', 'loans', 'other_financial_assistance'] date_range_list = [{'date_signed': datetime(2014, 1, 1), 'action_date': datetime(2014, 5, 1)}, {'date_signed': datetime(2014, 3, 1), 'action_date': datetime(2015, 4, 15)}, {'date_signed': datetime(2015, 2, 1), 'action_date': datetime(2015, 7, 1)}, {'date_signed': datetime(2015, 2, 1), 'action_date': datetime(2015, 4, 17)}, {'date_signed': datetime(2014, 12, 1), 'action_date': datetime(2016, 1, 1)}, {'date_signed': datetime(2015, 11, 1), 'action_date': datetime(2016, 3, 1)}, {'date_signed': datetime(2016, 2, 23), 'action_date': datetime(2016, 7, 19)}, {'date_signed': datetime(2016, 11, 26), 'action_date': datetime(2017, 3, 1)}, {'date_signed': datetime(2017, 5, 1), 'action_date': datetime(2017, 7, 1)}, {'date_signed': datetime(2017, 1, 1), 'action_date': datetime(2017, 12, 1)}, {'date_signed': datetime(2017, 9, 1), 'action_date': datetime(2017, 12, 17)}, {'date_signed': datetime(2018, 2, 1), 'action_date': datetime(2018, 7, 1)}, {'date_signed': datetime(2014, 12, 1), 'action_date': datetime(2017, 12, 5)}, {'date_signed': datetime(2015, 7, 1), 'action_date': datetime(2017, 5, 1)}, {'date_signed': datetime(2014, 10, 3), 'action_date': datetime(2017, 4, 8)}, {'date_signed': datetime(2015, 8, 1), 'action_date': datetime(2018, 1, 2)}] award_id = 0 for award_category in award_category_list: for date_range in date_range_list: award_id += 1 guai = 'AWARD_{}'.format(award_id) award_type_list = all_award_types_mappings[award_category] award_type = award_type_list[(award_id % len(award_type_list))] if (award_category in ('contracts', 'idvs')): display_award_id = 'abcdefg{}'.format(award_id) else: display_award_id = 'xyz{}'.format(award_id) award = baker.make('search.AwardSearch', award_id=award_id, generated_unique_award_id=guai, type=award_type, category=award_category, latest_transaction_id=(1000 + award_id), date_signed=date_range['date_signed'], display_award_id=display_award_id, piid='abcdefg{}'.format(award_id), fain='xyz{}'.format(award_id), uri='abcxyx{}'.format(award_id), action_date=date_range['action_date']) baker.make('search.TransactionSearch', transaction_id=(1000 + award_id), award=award, action_date=date_range['action_date'])
def _get_neighs_mean_score(model, neighs): scores = [] for (neigh, score) in neighs: if (not scores): scores.append(score) similar = model.most_similar(neigh, topn=1) if (not similar): continue scores.extend([s[1] for s in similar]) return (sum(scores) / len(scores))
def parse_message(msg: IncomingMessage) -> dict: this_object_data = attr.base_attr.parse_message(msg) this_object_data['forces'] = msg.read_float32_list() this_object_data['positions'] = np.array(msg.read_float32_list()).reshape((- 1), 3).tolist() count = msg.read_int32() this_object_data['ids'] = [msg.read_int32() for _ in range(count)] return this_object_data
def deploy(): conf_file = importlib.machinery.SourceFileLoader('conf', 'source/conf.py').load_module() latest_version = conf_file.latest_version legacy_versions = conf_file.legacy_versions if subprocess.call(['git', 'status', '--untracked=no', '--porcelain']): print('There are uncommitted or untracked changes. Make sure to commit everything in your current branch first.') sys.exit(1) os.system('git fetch') os.system('git checkout gh-pages') os.system('pwd') os.system('ls') names_to_skip = (legacy_versions + ['build']) for file_path in glob.glob('*'): if (file_path in names_to_skip): continue else: os.system(f'rm -Rf {file_path}') print(f'removed file_path: {file_path}') os.system('ls') os.system('cp -Rf build/html/* .') os.system('ls') os.system(f'ln -s {latest_version}/index.html .') os.system('ls') if (not DISABLE_GIT_PUSH): print('committing and pushing docs ...') os.system('git add .') os.system('git commit -a -m "Updated HTML docs."') os.system('git push origin gh-pages') else: print('Skipped git push.') print('Deployed to evennia.github.io/evennia/')
class Jupyter_queue(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) token = models.TextField(blank=False, null=True) created_at = models.DateTimeField(auto_now_add=True) class Meta(): db_table = 'jupyter_queue' def __str__(self): return ('user: %s' % self.user.id)
def get_module_names_from_code(paths: List[str]) -> List[ImportInfo]: resolved_paths: List[Path] = [] imports = [] for path in paths: if Path(path).is_dir(): resolved_paths.extend(list(Path(path).glob('**/*.py'))) else: resolved_paths.append(Path(path).resolve()) for resolved_path in resolved_paths: logger.debug(f'Parsing {resolved_path}') for import_info in get_module_info_from_python_file(path=str(resolved_path)): imports.append(import_info) imports_with_dupes_removed = [] for import_info in imports: if (import_info not in imports_with_dupes_removed): imports_with_dupes_removed.append(import_info) logger.debug('Imports found in code:') for imp in imports_with_dupes_removed: logger.debug(f'- {imp}') return imports_with_dupes_removed
def test_action_python_only(): m = MyObject() with raises(TypeError): event.action(3) if ('__pypy__' in sys.builtin_module_names): pass else: with raises(TypeError): event.action(isinstance) assert isinstance(m.set_foo, event._action.Action) with raises(AttributeError): m.set_foo = 3 with raises(AttributeError): del m.set_foo assert ('action' in repr(m.__class__.set_foo).lower()) assert ('action' in repr(m.set_foo).lower()) assert ('foo' in repr(m.set_foo)) m = MyObject_autoaction() assert ('action' in repr(m.__class__.set_foo).lower()) assert ('action' in repr(m.set_foo).lower()) assert ('foo' in repr(m.set_foo))
_op([AllocCursorA, NewExprA('buf_cursor'), NewExprA('buf_cursor'), BoolA]) def expand_dim(proc, buf_cursor, alloc_dim, indexing_expr, unsafe_disable_checks=False): stmt_c = buf_cursor._impl (ir, fwd) = scheduling.DoExpandDim(stmt_c, alloc_dim, indexing_expr) return Procedure(ir, _provenance_eq_Procedure=proc, _forward=fwd)
class SaaSConfig(SaaSConfigBase): description: str version: str replaceable: bool = False connector_params: List[ConnectorParam] external_references: Optional[List[ExternalDatasetReference]] client_config: ClientConfig endpoints: List[Endpoint] test_request: SaaSRequest data_protection_request: Optional[SaaSRequest] = None rate_limit_config: Optional[RateLimitConfig] consent_requests: Optional[ConsentRequestMap] user_guide: Optional[str] def top_level_endpoint_dict(self) -> Dict[(str, Endpoint)]: return {endpoint.name: endpoint for endpoint in self.endpoints} def get_graph(self, secrets: Dict[(str, Any)]) -> GraphDataset: collections = [] for endpoint in self.endpoints: fields: List[Field] = [] read_requests: List[SaaSRequest] = [] if endpoint.requests.read: read_requests = (endpoint.requests.read if isinstance(endpoint.requests.read, list) else [endpoint.requests.read]) delete_request = endpoint.requests.delete for read_request in read_requests: self._process_param_values(fields, read_request.param_values, secrets) if ((not read_requests) and delete_request): self._process_param_values(fields, delete_request.param_values, secrets) if fields: fields[0].primary_key = True if fields: grouped_inputs: Set[str] = set() if read_requests: grouped_inputs = set((read_requests[0].grouped_inputs or [])) collections.append(Collection(name=endpoint.name, fields=fields, grouped_inputs=grouped_inputs, skip_processing=endpoint.skip_processing, after={CollectionAddress(*s.split('.')) for s in endpoint.after}, erase_after={CollectionAddress(*s.split('.')) for s in endpoint.erase_after})) return GraphDataset(name=super().name_prop, collections=collections, connection_key=super().fides_key_prop) def _process_param_values(self, fields: List[Field], param_values: Optional[List[ParamValue]], secrets: Dict[(str, Any)]) -> None: for param in (param_values or []): if param.references: references = [] for reference in param.references: resolved_reference = self.resolve_param_reference(reference, secrets) (first, *rest) = resolved_reference.field.split('.') references.append((FieldAddress(resolved_reference.dataset, first, *rest), resolved_reference.direction)) fields.append(ScalarField(name=param.name, references=references)) if param.identity: fields.append(ScalarField(name=param.name, identity=param.identity)) def resolve_param_reference(reference: Union[(str, FidesDatasetReference)], secrets: Dict[(str, Any)]) -> FidesDatasetReference: if isinstance(reference, str): if (reference not in secrets.keys()): raise ValidationError(f"External dataset reference with provided name {reference} not found in connector's secrets.") reference = FidesDatasetReference.parse_obj(secrets[reference]) return reference def supported_actions(self) -> List[ActionType]: supported_actions = [] if any((requests.read for requests in [endpoint.requests for endpoint in self.endpoints])): supported_actions.append(ActionType.access) if (any(((request.update or request.delete) for request in [endpoint.requests for endpoint in self.endpoints])) or self.data_protection_request): supported_actions.append(ActionType.erasure) if self.consent_requests: supported_actions.append(ActionType.consent) return supported_actions
class ExecutablesBuilder(threading.Thread): def __init__(self, repo, work_queue, queue_lock, **kwargs): threading.Thread.__init__(self) raw_args = kwargs.get('raw_args', None) (self.args, self.unknowns) = parser.parse_known_args(raw_args) self.repo = repo self.work_queue = work_queue self.queue_lock = queue_lock self.current_commit_hash = None def run(self): try: if self.args.interval: while (not stopRun(self.args.status_file)): self._buildExecutables() time.sleep(self.args.interval) else: self._buildExecutables() except Exception: setRunStatus(2) getLogger().exception('Error building executable.') def _buildExecutables(self): platforms = self.args.platforms.split(',') while ((not stopRun(self.args.status_file)) and self._pullNewCommits()): for platform in platforms: self._saveOneCommitExecutable(platform) def _saveOneCommitExecutable(self, platform): getLogger().info(('Building executable on {} '.format(platform) + ' {}'.format(self.current_commit_hash))) same_host = self.args.same_host if same_host: self.queue_lock.acquire() repo_info = self._buildOneCommitExecutable(platform, self.current_commit_hash) if (repo_info is None): getLogger().error('Failed to extract repo commands. Skip this commit.') else: if (not same_host): self.queue_lock.acquire() self.work_queue.append(repo_info) if self.queue_lock.locked(): self.queue_lock.release() def _buildOneCommitExecutable(self, platform, commit_hash): repo_info = {} repo_info_treatment = self._setupRepoStep(platform, commit_hash) if (repo_info_treatment is None): return None repo_info['treatment'] = repo_info_treatment if self.args.ab_testing: control_commit_hash = self._getControlCommit(repo_info_treatment['commit_time'], self.args.base_commit) repo_info_control = self._setupRepoStep(platform, control_commit_hash) if (repo_info_control is None): return None repo_info['control'] = repo_info_control meta = getMeta(self.args, platform) if meta: assert ('meta' not in self.info), 'info field already has a meta field' self.info['meta'] = meta if self.args.regression: repo_info['regression_commits'] = self._getCompareCommits(repo_info_treatment['commit']) repo_info['platform'] = platform return repo_info def _getCompareCommits(self, latest_commit): commits = self.repo.getPriorCommits(latest_commit, 12) if (not commits): return [] commits = commits.split('\n') if (commits[(- 1)] == ''): commits.pop() res = [] for commit in commits: c = commit.split(':') assert (len(c) == 2), 'Length is incorrect' res.append({'commit': c[0], 'commit_time': int(float(c[1]))}) return res def _pullNewCommits(self): new_commit_hash = None if _runIndividual(self.args.interval, self.args.regression, self.args.ab_testing): new_commit_hash = self.repo.getCurrentCommitHash() if (new_commit_hash is None): getLogger().error('Commit is not specified') return False else: self.repo.checkout(self.args.branch) self.repo.pull(self.args.remote_repository, self.args.branch) if (self.current_commit_hash is None): self.current_commit_hash = self._getSavedCommit() if (self.current_commit_hash is None): new_commit_hash = self.repo.getCommitHash(self.args.commit) else: new_commit_hash = self.repo.getNextCommitHash(self.current_commit_hash, self.args.step) if (new_commit_hash == self.current_commit_hash): getLogger().info('Commit %s is already processed, sleeping...', new_commit_hash) return False self.current_commit_hash = new_commit_hash return True def _getSavedCommit(self): if (self.args.commit_file and os.path.isfile(self.args.commit_file)): with open(self.args.commit_file, 'r') as file: commit_hash = file.read().strip() return self.repo.getCommitHash(commit_hash) else: return None def _setupRepoStep(self, platform, commit): repo_info = {} repo_info['commit'] = self.repo.getCommitHash(commit) repo_info['commit_time'] = self.repo.getCommitTime(repo_info['commit']) return (repo_info if self._buildProgram(platform, repo_info) else None) def _buildProgram(self, platform, repo_info): directory = getDirectory(repo_info['commit'], repo_info['commit_time']) program = (self.args.framework + '_benchmark') if (os.name == 'nt'): program = (program + '.exe') elif platform.startswith('ios'): program = (program + '.ipa') dst = os.path.join(self.args.exec_dir, self.args.framework, platform, directory, program) repo_info['program'] = dst repo_info['programs'] = {'program': {'location': dst}} filedir = os.path.dirname(dst) if ((not _runIndividual(self.args.interval, self.args.regression, self.args.ab_testing)) and os.path.isfile(dst)): return True else: result = self._buildProgramPlatform(repo_info, dst, platform) for fn in os.listdir(filedir): if (fn != program): repo_info['programs'][fn] = {'location': os.path.join(filedir, fn)} return result def _buildProgramPlatform(self, repo_info, dst, platform): self.repo.checkout(repo_info['commit']) return buildProgramPlatform(dst, self.args.repo_dir, self.args.framework, self.args.frameworks_dir, platform) def _getControlCommit(self, reference_time, base_commit): dt = datetime.datetime.utcfromtimestamp(reference_time) monday = (dt - datetime.timedelta(days=dt.weekday())) start_of_week = monday.replace(hour=0, minute=0, second=0, microsecond=0) if base_commit: base_commit_time = self.repo.getCommitTime(base_commit) base_commit_datetime = datetime.datetime.utcfromtimestamp(base_commit_time) if (base_commit_datetime >= start_of_week): return base_commit start = end = start_of_week repeat = True while repeat: logs_str = self.repo.getCommitsInRange(start, end) if (logs_str == ''): end = (end + datetime.timedelta(hours=1)) else: repeat = False logs = logs_str.split('\n') for row in logs: items = row.strip().split(':') assert (len(items) == 2), 'Repo log format is wrong' commit_hash = items[0].strip() unix_time = int(float(items[1].strip())) unix_datetime = datetime.datetime.utcfromtimestamp(unix_time) if (unix_datetime >= start_of_week): return commit_hash raise AssertionError('Cannot find the control commit') return None
def task_msgfmt(): languages = [i[(i.rfind('/') + 1):] for i in glob.glob('./readme_translations/locale/*_*')] with suppress(ValueError): languages.remove('zh_CN') languages.remove('en_US') sources = glob.glob('./**/*.po', recursive=True) dests = [(i[:(- 3)] + '.mo') for i in sources] actions = [['msgfmt', sources[i], '-o', dests[i]] for i in range(len(sources))] actions.append(['mkdir', './.cache/source']) actions.append(['cp', README_BASE, './.cache/source/README.rst']) locale_dirs = ((Path('.') / 'readme_translations') / 'locale').absolute() for i in languages: actions.append(['sphinx-build', '-E', '-b', 'rst', '-C', '-D', f'language={i}', '-D', f'locale_dirs={locale_dirs}', '-D', 'extensions=sphinxcontrib.restbuilder', '-D', 'master_doc=README', './.cache/source', f'./.cache/{i}']) actions.append(['mv', f'./.cache/{i}/README.rst', f'./readme_translations/{i}.rst']) actions.append(['rm', '-rf', f'./.cache/{i}']) actions.append(['rm', '-rf', './.cache/source']) return {'actions': actions, 'targets': dests, 'file_dep': sources, 'task_dep': ['crowdin', 'crowdin_pull']}
def make_complex_fourier_basis(grid, fourier_grid, sort_by_energy=True): c = np.array(grid.coords) modes = [Field(np.exp((1j * np.dot(p, c))), grid) for p in fourier_grid.points] energies = [np.dot(p, p) for p in fourier_grid.points] if sort_by_energy: ind = np.argsort(energies) modes = [modes[i] for i in ind] return ModeBasis(modes, grid)
class Solution(): def mergeKLists(self, lists: List[ListNode]) -> ListNode: hp = [] nodes = list(lists) for idx in range(len(nodes)): node = nodes[idx] if (node is not None): hp.append((node.val, idx)) heapq.heapify(hp) head = None prev = None while hp: (val, idx) = heapq.heappop(hp) node = nodes[idx] if (head is None): head = node if (prev is not None): prev.next = node ne = node.next if (ne is not None): heapq.heappush(hp, (ne.val, idx)) nodes[idx] = ne prev = node return head
def _check_inputs(form, tensor, bcs, diagonal): for mesh in form.ufl_domains(): mesh.init() if (diagonal and any((isinstance(bc, EquationBCSplit) for bc in bcs))): raise NotImplementedError('Diagonal assembly and EquationBC not supported') rank = len(form.arguments()) if (rank == 0): assert (tensor is None) assert (not bcs) elif (rank == 1): (test,) = form.arguments() if ((tensor is not None) and (test.function_space() != tensor.function_space())): raise ValueError("Form's argument does not match provided result tensor") elif ((rank == 2) and diagonal): (test, trial) = form.arguments() if (test.function_space() != trial.function_space()): raise ValueError('Can only assemble the diagonal of 2-form if the function spaces match') elif (rank == 2): if ((tensor is not None) and (tensor.a.arguments() != form.arguments())): raise ValueError("Form's arguments do not match provided result tensor") else: raise AssertionError if any(((c.dat.dtype != ScalarType) for c in form.coefficients())): raise ValueError('Cannot assemble a form containing coefficients where the dtype is not the PETSc scalar type.')
def test_create_full_time_periods(): start = date(2020, 9, 30) end = date(2020, 10, 1) years = fyh.create_full_time_periods(start, end, 'fy', {}) assert (len(years) == 2) assert (years[0]['time_period'] == {'fy': '2020'}) assert (years[1]['time_period'] == {'fy': '2021'}) quarters = fyh.create_full_time_periods(start, end, 'quarter', {}) assert (len(quarters) == 2) assert (quarters[0]['time_period'] == {'fy': '2020', 'quarter': '4'}) assert (quarters[1]['time_period'] == {'fy': '2021', 'quarter': '1'}) months = fyh.create_full_time_periods(start, end, 'month', {}) assert (len(months) == 2) assert (months[0]['time_period'] == {'fy': '2020', 'month': '12'}) assert (months[1]['time_period'] == {'fy': '2021', 'month': '1'})
class flow_stats_entry(loxi.OFObject): def __init__(self, table_id=None, duration_sec=None, duration_nsec=None, priority=None, idle_timeout=None, hard_timeout=None, flags=None, importance=None, cookie=None, packet_count=None, byte_count=None, match=None, instructions=None): if (table_id != None): self.table_id = table_id else: self.table_id = 0 if (duration_sec != None): self.duration_sec = duration_sec else: self.duration_sec = 0 if (duration_nsec != None): self.duration_nsec = duration_nsec else: self.duration_nsec = 0 if (priority != None): self.priority = priority else: self.priority = 0 if (idle_timeout != None): self.idle_timeout = idle_timeout else: self.idle_timeout = 0 if (hard_timeout != None): self.hard_timeout = hard_timeout else: self.hard_timeout = 0 if (flags != None): self.flags = flags else: self.flags = 0 if (importance != None): self.importance = importance else: self.importance = 0 if (cookie != None): self.cookie = cookie else: self.cookie = 0 if (packet_count != None): self.packet_count = packet_count else: self.packet_count = 0 if (byte_count != None): self.byte_count = byte_count else: self.byte_count = 0 if (match != None): self.match = match else: self.match = ofp.match() if (instructions != None): self.instructions = instructions else: self.instructions = [] return def pack(self): packed = [] packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!B', self.table_id)) packed.append(('\x00' * 1)) packed.append(struct.pack('!L', self.duration_sec)) packed.append(struct.pack('!L', self.duration_nsec)) packed.append(struct.pack('!H', self.priority)) packed.append(struct.pack('!H', self.idle_timeout)) packed.append(struct.pack('!H', self.hard_timeout)) packed.append(struct.pack('!H', self.flags)) packed.append(struct.pack('!H', self.importance)) packed.append(('\x00' * 2)) packed.append(struct.pack('!Q', self.cookie)) packed.append(struct.pack('!Q', self.packet_count)) packed.append(struct.pack('!Q', self.byte_count)) packed.append(self.match.pack()) packed.append(loxi.generic_util.pack_list(self.instructions)) length = sum([len(x) for x in packed]) packed[0] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = flow_stats_entry() _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 2) obj.table_id = reader.read('!B')[0] reader.skip(1) obj.duration_sec = reader.read('!L')[0] obj.duration_nsec = reader.read('!L')[0] obj.priority = reader.read('!H')[0] obj.idle_timeout = reader.read('!H')[0] obj.hard_timeout = reader.read('!H')[0] obj.flags = reader.read('!H')[0] obj.importance = reader.read('!H')[0] reader.skip(2) obj.cookie = reader.read('!Q')[0] obj.packet_count = reader.read('!Q')[0] obj.byte_count = reader.read('!Q')[0] obj.match = ofp.match.unpack(reader) obj.instructions = loxi.generic_util.unpack_list(reader, ofp.instruction.instruction.unpack) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.table_id != other.table_id): return False if (self.duration_sec != other.duration_sec): return False if (self.duration_nsec != other.duration_nsec): return False if (self.priority != other.priority): return False if (self.idle_timeout != other.idle_timeout): return False if (self.hard_timeout != other.hard_timeout): return False if (self.flags != other.flags): return False if (self.importance != other.importance): return False if (self.cookie != other.cookie): return False if (self.packet_count != other.packet_count): return False if (self.byte_count != other.byte_count): return False if (self.match != other.match): return False if (self.instructions != other.instructions): return False return True def pretty_print(self, q): q.text('flow_stats_entry {') with q.group(): with q.indent(2): q.breakable() q.text('table_id = ') q.text(('%#x' % self.table_id)) q.text(',') q.breakable() q.text('duration_sec = ') q.text(('%#x' % self.duration_sec)) q.text(',') q.breakable() q.text('duration_nsec = ') q.text(('%#x' % self.duration_nsec)) q.text(',') q.breakable() q.text('priority = ') q.text(('%#x' % self.priority)) q.text(',') q.breakable() q.text('idle_timeout = ') q.text(('%#x' % self.idle_timeout)) q.text(',') q.breakable() q.text('hard_timeout = ') q.text(('%#x' % self.hard_timeout)) q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPFF_SEND_FLOW_REM', 2: 'OFPFF_CHECK_OVERLAP', 4: 'OFPFF_RESET_COUNTS', 8: 'OFPFF_NO_PKT_COUNTS', 16: 'OFPFF_NO_BYT_COUNTS', 128: 'OFPFF_BSN_SEND_IDLE'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('importance = ') q.text(('%#x' % self.importance)) q.text(',') q.breakable() q.text('cookie = ') q.text(('%#x' % self.cookie)) q.text(',') q.breakable() q.text('packet_count = ') q.text(('%#x' % self.packet_count)) q.text(',') q.breakable() q.text('byte_count = ') q.text(('%#x' % self.byte_count)) q.text(',') q.breakable() q.text('match = ') q.pp(self.match) q.text(',') q.breakable() q.text('instructions = ') q.pp(self.instructions) q.breakable() q.text('}')
def monotone(p0: float, p1: float, p2: float, p3: float, t: float) -> float: t2 = (t ** 2) t3 = (t2 * t) s0 = (p1 - p0) s1 = (p2 - p1) s2 = (p3 - p2) m1 = ((s0 + s1) * 0.5) m2 = ((s1 + s2) * 0.5) if _math_isclose(p1, p2): m1 = m2 = 0.0 else: if (_math_isclose(p0, p1) or (math.copysign(1.0, s0) != math.copysign(1.0, s1))): m1 = 0.0 else: m1 *= min(((3.0 * s0) / m1), min(((3.0 * s1) / m1), 1.0)) if (_math_isclose(p2, p3) or (math.copysign(1.0, s1) != math.copysign(1.0, s2))): m2 = 0.0 else: m2 *= min(((3.0 * s1) / m2), min(((3.0 * s2) / m2), 1.0)) result = ((((((m1 + m2) - (2 * s1)) * t3) + ((((3.0 * s1) - (2.0 * m1)) - m2) * t2)) + (m1 * t)) + p1) mn = min(p1, p2) mx = max(p1, p2) return min(max(result, mn), mx)
def test_missing_envs_required(config, yaml_config_file_3): with open(yaml_config_file_3, 'w') as file: file.write('section:\n undefined: ${UNDEFINED}\n') with raises(ValueError, match='Missing required environment variable "UNDEFINED"'): config.from_yaml(yaml_config_file_3, envs_required=True)
class EventType(): HTTP_REQUEST = ' HTTP_RESPONSE_START = ' HTTP_RESPONSE_BODY = ' HTTP_DISCONNECT = ' LIFESPAN_STARTUP = 'lifespan.startup' LIFESPAN_STARTUP_COMPLETE = 'lifespan.startup.complete' LIFESPAN_STARTUP_FAILED = 'lifespan.startup.failed' LIFESPAN_SHUTDOWN = 'lifespan.shutdown' LIFESPAN_SHUTDOWN_COMPLETE = 'lifespan.shutdown.complete' LIFESPAN_SHUTDOWN_FAILED = 'lifespan.shutdown.failed' WS_CONNECT = 'websocket.connect' WS_ACCEPT = 'websocket.accept' WS_RECEIVE = 'websocket.receive' WS_SEND = 'websocket.send' WS_DISCONNECT = 'websocket.disconnect' WS_CLOSE = 'websocket.close'
def test_convert_dataset_to_graph(example_datasets): dataset = Dataset(**example_datasets[0]) graph = convert_dataset_to_graph(dataset, 'mock_connection_config_key') assert (graph is not None) assert (graph.name == 'postgres_example_test_dataset') assert (len(graph.collections) == 11) customer_collection = list(filter((lambda x: (x.name == 'customer')), graph.collections))[0] assert customer_collection assert (customer_collection.fields[0].name == 'address_id') assert (customer_collection.fields[0].data_categories == ['system.operations']) assert (customer_collection.fields[0].identity is None) assert (customer_collection.fields[0].references == [(FieldAddress('postgres_example_test_dataset', 'address', 'id'), 'to')]) employee_collection = list(filter((lambda x: (x.name == 'employee')), graph.collections))[0] assert employee_collection assert (employee_collection.fields[1].name == 'email') assert (employee_collection.fields[1].data_categories == ['user.contact.email']) assert (employee_collection.fields[1].identity == 'email') assert (employee_collection.fields[1].references == []) login_collection = list(filter((lambda x: (x.name == 'login')), graph.collections))[0] assert login_collection assert (login_collection.fields[0].name == 'customer_id') assert (login_collection.fields[0].data_categories == ['user.unique_id']) assert (login_collection.fields[0].identity is None) assert (login_collection.fields[0].references == [(FieldAddress('postgres_example_test_dataset', 'customer', 'id'), 'from')]) assert (field([graph], 'postgres_example_test_dataset', 'address', 'id').primary_key is True) assert (field([graph], 'postgres_example_test_dataset', 'customer', 'id').primary_key is True) assert (field([graph], 'postgres_example_test_dataset', 'employee', 'id').primary_key is True) assert (field([graph], 'postgres_example_test_dataset', 'visit', 'email').primary_key is False)
def _validate_job_control_object_with_options(job_control): assert (job_control is not None) assert (not job_control.remote) assert job_control.dockerized_benchmark assert (not job_control.scavenging_benchmark) assert (not job_control.binary_benchmark) assert ((job_control.source is not None) or (job_control.source != [])) assert (len(job_control.source) == 1) source = job_control.source[0] assert (source.identity == proto_source.SourceRepository.SourceIdentity.SRCID_ENVOY) assert (source.source_path == '/home/ubuntu/envoy') assert (not source.source_url) assert (source.branch == 'some_random_branch') assert (source.commit_hash == 'random_commit_hash_string') bazel_options = source.bazel_options assert (bazel_options is not None) for option in bazel_options: assert option.parameter if (option.parameter == '--jobs 4'): saw_jobs = True elif (option.parameter == '--define tcmalloc=gperftools'): saw_gperftools = True assert saw_jobs assert saw_gperftools
('cuda.gemm_rcr_bias_permute.gen_profiler') def gen_profiler(func_attrs, workdir, profiler_filename, dim_info_dict): return gemm_rcr_permute.common_gen_profiler(func_attrs, workdir, profiler_filename, dim_info_dict, common_bias.SRC_TEMPLATE, PROBLEM_ARGS_TEMPLATE, bias_ptr_arg='memory_pool->RequestTensorByIdx(3)', extra_code=common_permute.EXTRA_CODE.render())
def extractEntirelypresentingyouWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionPlotoptionsWindbarbSonificationDefaultspeechoptionsMappingTime(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def upgrade(): op.execute("DELETE FROM event_copyrights WHERE licence='' or licence IS NULL ", execution_options=None) op.execute("DELETE FROM sessions WHERE title='' or title IS NULL", execution_options=None) op.execute("DELETE FROM social_links WHERE link='' or link IS NULL", execution_options=None) op.execute("DELETE FROM speakers_calls WHERE announcement='' or announcement IS NULL", execution_options=None) op.execute("DELETE FROM ticket_holders WHERE firstname='' or firstname IS NULL", execution_options=None) op.execute("DELETE FROM tickets WHERE name='' or name IS NULL", execution_options=None) op.execute('DELETE FROM tickets WHERE sales_ends_at IS NULL', execution_options=None) op.execute('DELETE FROM tickets WHERE sales_starts_at IS NULL', execution_options=None) op.execute("DELETE FROM tickets WHERE type='' or type IS NULL", execution_options=None) op.execute("DELETE FROM users WHERE _email='' or _email IS NULL", execution_options=None) op.execute("DELETE FROM users WHERE _password='' or _password IS NULL", execution_options=None) op.alter_column('event_copyrights', 'licence', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('sessions', 'title', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('social_links', 'link', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('speakers_calls', 'announcement', existing_type=sa.TEXT(), nullable=False) op.alter_column('ticket_holders', 'firstname', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('tickets', 'name', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('tickets', 'sales_ends_at', existing_type=postgresql.TIMESTAMP(timezone=True), nullable=False) op.alter_column('tickets', 'sales_starts_at', existing_type=postgresql.TIMESTAMP(timezone=True), nullable=False) op.alter_column('tickets', 'type', existing_type=sa.VARCHAR(), nullable=False) op.alter_column('users', '_email', existing_type=sa.VARCHAR(length=120), nullable=False) op.alter_column('users', '_password', existing_type=sa.VARCHAR(length=128), nullable=False)
def test_ndarray_methods(): values = [[1, 2, 3], [(- 1), 0, 1]] w = Fxp(values, True, 16, 8) wa = np.array(values) func_list = ['all', 'any', 'max', 'min', 'mean', 'sum', 'cumsum', 'cumprod', 'prod'] for func in func_list: assert (np.array(getattr(w, func)()) == np.array(getattr(wa, func)())).all() assert (np.array(getattr(w, func)(axis=0)) == getattr(wa, func)(axis=0)).all() assert (np.array(getattr(w, func)(axis=1)) == getattr(wa, func)(axis=1)).all() func_list = ['var', 'std'] for func in func_list: assert np.allclose(np.array(getattr(w, func)()), np.array(getattr(wa, func)()), rtol=(1 / (2 ** 8))) assert np.allclose(np.array(getattr(w, func)(axis=0)), np.array(getattr(wa, func)(axis=0)), rtol=(1 / (2 ** 8))) assert np.allclose(np.array(getattr(w, func)(axis=1)), np.array(getattr(wa, func)(axis=1)), rtol=(1 / (2 ** 8))) func_list = ['conjugate', 'transpose', 'diagonal', 'trace'] for func in func_list: assert (np.array(getattr(w, func)()) == np.array(getattr(wa, func)())).all() func_list = ['sort'] for func in func_list: getattr(w, func)() getattr(wa, func)() assert (np.array(w) == np.array(wa)).all() func_list = ['argmin', 'argmax', 'argsort', 'nonzero'] for func in func_list: r = getattr(w, func)() ra = getattr(wa, func)() if isinstance(r, tuple): for (r_val, ra_val) in zip(r, ra): assert (r_val == ra_val).all() else: assert (r == ra).all()
def _add_java_paths_to_config(pathlist, thisconfig): def path_version_key(s): versionlist = [] for u in re.split('[^0-9]+', s): try: versionlist.append(int(u)) except ValueError: pass return versionlist for d in sorted(pathlist, key=path_version_key): if os.path.islink(d): continue j = os.path.basename(d) for regex in ['^1\\.([126-9][0-9]?)\\.0\\.jdk$', '^jdk1\\.([126-9][0-9]?)\\.0_[0-9]+.jdk$', '^jdk1\\.([126-9][0-9]?)\\.0_[0-9]+$', '^jdk([126-9][0-9]?)-openjdk$', '^java-([126-9][0-9]?)-openjdk$', '^java-([126-9][0-9]?)-jdk$', '^java-1\\.([126-9][0-9]?)\\.0-.*$', '^java-([126-9][0-9]?)-oracle$', '^jdk-([126-9][0-9]?)-oracle-.*$', '^java-([126-9][0-9]?)-openjdk-.*$', '^oracle-jdk-bin-1\\.([126-9][0-9]?).*$', '^icedtea-bin-([126-9][0-9]?).*$']: m = re.match(regex, j) if (not m): continue for p in [d, os.path.join(d, 'Contents', 'Home')]: if os.path.exists(os.path.join(p, 'bin', 'javac')): thisconfig['java_paths'][m.group(1)] = p
(confirm_variables=_confirm_variables_docstring, variables=_variables_all_docstring, missing_values=_missing_values_docstring, variables_=_variables_attribute_docstring, feature_names_in_=_feature_names_in_docstring, n_features_in_=_n_features_in_docstring, fit_transform=_fit_transform_docstring, get_support=_get_support_docstring) class DropDuplicateFeatures(BaseSelector): def __init__(self, variables: Variables=None, missing_values: str='ignore', confirm_variables: bool=False): if (missing_values not in ['raise', 'ignore']): raise ValueError("missing_values takes only values 'raise' or 'ignore'.") super().__init__(confirm_variables) self.variables = _check_variables_input_value(variables) self.missing_values = missing_values def fit(self, X: pd.DataFrame, y: pd.Series=None): X = check_X(X) self._confirm_variables(X) if (self.variables is None): self.variables_ = find_all_variables(X) else: self.variables_ = check_all_variables(X, self.variables_) self._check_variable_number() if (self.missing_values == 'raise'): _check_contains_na(X, self.variables_) _features_hashmap = defaultdict(list) _X_hash = pd.util.hash_pandas_object(X[self.variables_].T, index=False) for (feature, feature_hash) in _X_hash.items(): _features_hashmap[feature_hash].append(feature) self.duplicated_feature_sets_ = [set(duplicate) for duplicate in _features_hashmap.values() if (len(duplicate) > 1)] self.features_to_drop_ = {item for duplicates in _features_hashmap.values() for item in duplicates[1:] if (duplicates and (len(duplicates) > 1))} self._get_feature_names_in(X) return self def _more_tags(self): tags_dict = _return_tags() tags_dict['allow_nan'] = True tags_dict['variables'] = 'all' msg = 'transformers need more than 1 feature to work' tags_dict['_xfail_checks']['check_fit2d_1feature'] = msg return tags_dict
def version_callback(ctx, param, value): if ((not value) or ctx.resilient_parsing): return from jupyter_cache import __version__ as jcv from myst_nb import __version__ as mnbv from myst_parser import __version__ as mpv from nbclient import __version__ as ncv from sphinx_book_theme import __version__ as sbtv from sphinx_external_toc import __version__ as etoc from jupyter_book import __version__ as jbv versions = {'Jupyter Book': jbv, 'External ToC': etoc, 'MyST-Parser': mpv, 'MyST-NB': mnbv, 'Sphinx Book Theme': sbtv, 'Jupyter-Cache': jcv, 'NbClient': ncv} versions_string = '\n'.join((f'{tt:<18}: {vv}' for (tt, vv) in versions.items())) click.echo(versions_string) ctx.exit()
(side='long', window=400, threshold=0.5) def mmi_filter(ohlcv): window = mmi_filter.window side = mmi_filter.side threshold = mmi_filter.threshold median = ohlcv.close.rolling(window).median() p_gt_m = (ohlcv.close > median) yp_gt_m = (ohlcv.close.shift() > median) mmi = ((p_gt_m & yp_gt_m).rolling(window).mean() if (side == 'long') else ((~ p_gt_m) & (~ yp_gt_m)).rolling(window).mean()) figures = {'figures': {'mmi': mmi}} return ((mmi > threshold), figures)
class VanillaCrossAttention(Module): def __init__(self, dim, seq_len, seq_len_kv, num_heads, qkv_bias=False, attn_drop=0.0, proj_drop=0.0, has_residual=True, causal=False): super().__init__() assert ((dim % num_heads) == 0), f'dim {dim} should be divisible by num_heads {num_heads}' self.num_heads = num_heads self.causal = causal self.has_residual = has_residual self.dim = dim self.seqlen = seq_len self.seqlen_kv = seq_len_kv assert (not causal), 'Causal not implemented' self.op = vanilla_attention self.proj_q = Linear(dim, dim, bias=qkv_bias) self.proj_k = Linear(dim, dim, bias=qkv_bias) self.proj_v = Linear(dim, dim, bias=qkv_bias) self.attn_drop = Dropout(attn_drop) self.proj = Linear(dim, dim, specialization=('add' if has_residual else None)) self.proj_drop = Dropout(proj_drop) def attention(self, q, k, v): seqlen = self.seqlen seqlen_kv = self.seqlen_kv head_dim = (self.dim // self.num_heads) query = self.proj_q(q) key = self.proj_k(k) value = self.proj_v(v) query = ops.reshape()(query, [(- 1), seqlen, self.num_heads, head_dim]) key = ops.reshape()(key, [(- 1), seqlen_kv, self.num_heads, head_dim]) value = ops.reshape()(value, [(- 1), seqlen_kv, self.num_heads, head_dim]) return self.op(query, key, value) def forward(self, *args): assert (len(args) >= 3) x = args[0] seq = self.seqlen attn_output = self.attention(args[0], args[1], args[2]) attn_output = ops.reshape()(attn_output, [(- 1), seq, self.dim]) if self.has_residual: assert (len(args) == 4) x = self.proj(attn_output, args[3]) else: x = self.proj(attn_output) x = self.proj_drop(x) x = ops.reshape()(x, [(- 1), seq, self.dim]) return x
def _save_vm_status(task_id, vm, new_state, old_state=None, **kwargs): if (old_state is None): logger.warn('Detected status change %s->%s from vm_status(%s)', vm.status, new_state, vm.uuid) old_state = vm.status vm_status_changed(task_id, vm, new_state, old_state=old_state, **kwargs)
def access_services_nondefault(): cred = credentials.Certificate('path/to/service.json') other_cred = credentials.Certificate('path/to/other_service.json') default_app = firebase_admin.initialize_app(cred) other_app = firebase_admin.initialize_app(cred, name='other') print(default_app.name) print(other_app.name) firebase_admin.delete_app(default_app) firebase_admin.delete_app(other_app)
def _get_simple_privacy_field_value(obj, field_info): tags = field_info.metadata.get('tags') tags = ([] if (not tags) else tags.split(',')) is_privacy = False if (tags and ('privacy' in tags)): is_privacy = True value = getattr(obj, field_info.name) if ((not is_privacy) or (not value)): return value field_type = EnvArgumentParser._get_argparse_type(field_info.type) if (field_type is int): return (- 999) if (field_type is float): return (- 999.0) if (field_type is bool): return False if (len(value) > 5): return ((value[0] + '******') + value[(- 1)]) return '******'
class User(Model): id = IntField(pk=True) username = CharField(128, unique=True) password_hash = BinaryField(null=True) is_admin = BooleanField(default=False) def check_password(self, to_check: str) -> bool: if (self.password_hash is None): return False return check_password_hash(self.password_hash.decode(), to_check) def set_password(self, new_password: str): if self.is_public_account: raise ValueError('Cannot set password of public account') self.password_hash = generate_password_hash(new_password).encode() def is_public_account(self): return (self.username == PUBLIC_ACCOUNT_USERNAME) def is_external_account(self): return ((self.password_hash is None) and (self.username != PUBLIC_ACCOUNT_USERNAME))
class TestGenericCarver(TestUnpackerBase): def test_unpacker_selection_generic(self): self.check_unpacker_selection('generic/carver', 'generic_carver') def test_extraction(self): in_file = f'{get_test_data_dir()}/generic_carver_test' (files, meta_data) = self.unpacker._extract_files_from_file_using_specific_unpacker(in_file, self.tmp_dir.name, self.unpacker.unpacker_plugins['generic/carver']) files = set(files) assert (len(files) == 1), 'file number incorrect' assert (files == {f'{self.tmp_dir.name}/64.zip'}), 'not all files found' assert ('output' in meta_data) assert ('filter_log' in meta_data) def test_extraction_of_filtered_files(self): in_file = str((TEST_DATA_DIR / 'fake_xz.bin')) (files, meta_data) = self.unpacker._extract_files_from_file_using_specific_unpacker(in_file, self.tmp_dir.name, self.unpacker.unpacker_plugins['generic/carver']) assert (len(files) == 0) assert ('was removed' in meta_data['filter_log'])
class Chain(ContextModule): _modules: dict[(str, Module)] _provider: ContextProvider _tag = 'CHAIN' def __init__(self, *args: (Module | Iterable[Module])) -> None: super().__init__() self._provider = ContextProvider() modules = cast(tuple[Module], (tuple(args[0]) if ((len(args) == 1) and isinstance(args[0], Iterable) and (not isinstance(args[0], Chain))) else tuple(args))) for module in modules: assert ((not isinstance(module, ContextModule)) or (not module._can_refresh_parent) or (module.parent is None) or (module.parent == self)), f'{module.__class__.__name__} already has parent {module.parent.__class__.__name__}' self._regenerate_keys(modules) self._reset_context() for module in self: if (isinstance(module, ContextModule) and (module.parent != self)): module._set_parent(self) def __setattr__(self, name: str, value: Any) -> None: if isinstance(value, torch.nn.Module): raise ValueError('Chain does not support setting modules by attribute. Instead, use a mutation method like `append` or wrap it within a single element list to prevent pytorch from registering it as a submodule.') super().__setattr__(name, value) def provider(self) -> ContextProvider: return self._provider def init_context(self) -> Contexts: return {} def _register_provider(self, context: (Contexts | None)=None) -> None: if context: self._provider.update_contexts(context) for module in self: if isinstance(module, Chain): module._register_provider(context=self._provider.contexts) def _reset_context(self) -> None: self._register_provider(self.init_context()) def set_context(self, context: str, value: Any) -> None: self._provider.set_context(context, value) self._register_provider() def _show_error_in_tree(self, name: str, /, max_lines: int=20) -> str: tree = ModuleTree(module=self) classname_counter: dict[(str, int)] = defaultdict(int) first_ancestor = (self.get_parents()[(- 1)] if self.get_parents() else self) def find_state_dict_key(module: Module, /) -> (str | None): for (key, layer) in module.named_modules(): if (layer == self): return '.'.join((key, name)) return None for child in tree: (classname, count) = (name.rsplit(sep='_', maxsplit=1) if ('_' in name) else (name, '1')) if (child['class_name'] == classname): classname_counter[classname] += 1 if (classname_counter[classname] == int(count)): state_dict_key = find_state_dict_key(first_ancestor) child['value'] = f">>> {child['value']} | {state_dict_key}" break tree_repr = tree._generate_tree_repr(tree.root, depth=3) lines = tree_repr.split(sep='\n') error_line_idx = next((idx for (idx, line) in enumerate(iterable=lines) if line.startswith('>>>')), 0) return ModuleTree.shorten_tree_repr(tree_repr, line_index=error_line_idx, max_lines=max_lines) def _pretty_print_args(*args: Any) -> str: def _flatten_tuple(t: (Tensor | tuple[(Any, ...)]), /) -> list[Any]: if isinstance(t, tuple): return [item for subtuple in t for item in _flatten_tuple(subtuple)] else: return [t] flat_args = _flatten_tuple(args) return '\n'.join([f'{idx}: {(summarize_tensor(arg) if isinstance(arg, Tensor) else arg)}' for (idx, arg) in enumerate(iterable=flat_args)]) def _filter_traceback(self, *frames: traceback.FrameSummary) -> list[traceback.FrameSummary]: patterns_to_exclude = [('torch/nn/modules/', '^_call_impl$'), ('torch/nn/functional\\.py', ''), ('refiners/fluxion/layers/', '^_call_layer$'), ('refiners/fluxion/layers/', '^forward$'), ('refiners/fluxion/layers/chain\\.py', ''), ('', '^_')] def should_exclude(frame: traceback.FrameSummary, /) -> bool: for (filename_pattern, name_pattern) in patterns_to_exclude: if (re.search(pattern=filename_pattern, string=frame.filename) and re.search(pattern=name_pattern, string=frame.name)): return True return False return [frame for frame in frames if (not should_exclude(frame))] def _call_layer(self, layer: Module, name: str, /, *args: Any) -> Any: try: return layer(*args) except Exception as e: (exc_type, _, exc_traceback) = sys.exc_info() assert exc_type tb_list = traceback.extract_tb(tb=exc_traceback) filtered_tb_list = self._filter_traceback(*tb_list) formatted_tb = ''.join(traceback.format_list(extracted_list=filtered_tb_list)) pretty_args = Chain._pretty_print_args(args) error_tree = self._show_error_in_tree(name) exception_str = re.sub(pattern='\\n\\s*\\n', repl='\n', string=str(object=e)) message = f'''{formatted_tb} {exception_str} {error_tree} {pretty_args}''' if ('Error' not in exception_str): message = f'''{exc_type.__name__}: {message}''' raise ChainError(message) from None def forward(self, *args: Any) -> Any: result: (tuple[Any] | Any) = None intermediate_args: tuple[(Any, ...)] = args for (name, layer) in self._modules.items(): result = self._call_layer(layer, name, *intermediate_args) intermediate_args = ((result,) if (not isinstance(result, tuple)) else result) self._reset_context() return result def _regenerate_keys(self, modules: Iterable[Module]) -> None: self._modules = generate_unique_names(tuple(modules)) def __add__(self, other: 'Chain | Module | list[Module]') -> 'Chain': if isinstance(other, Module): other = Chain(other) if isinstance(other, list): other = Chain(*other) return Chain(*self, *other) def __getitem__(self, key: int) -> Module: ... def __getitem__(self, key: str) -> Module: ... def __getitem__(self, key: slice) -> 'Chain': ... def __getitem__(self, key: ((int | str) | slice)) -> Module: if isinstance(key, slice): copy = self.structural_copy() copy._regenerate_keys(modules=list(copy)[key]) return copy elif isinstance(key, str): return self._modules[key] else: return list(self)[key] def __iter__(self) -> Iterator[Module]: return iter(self._modules.values()) def __len__(self) -> int: return len(self._modules) def device(self) -> (Device | None): wm = self.find(WeightedModule) return (None if (wm is None) else wm.device) def dtype(self) -> (DType | None): wm = self.find(WeightedModule) return (None if (wm is None) else wm.dtype) def _walk(self, predicate: (Callable[([Module, 'Chain'], bool)] | None)=None, recurse: bool=False) -> Iterator[tuple[(Module, 'Chain')]]: if (predicate is None): predicate = (lambda _m, _p: True) for module in self: try: p = predicate(module, self) except StopIteration: continue if p: (yield (module, self)) if (not recurse): continue if isinstance(module, Chain): (yield from module.walk(predicate, recurse)) def walk(self, predicate: (Callable[([Module, 'Chain'], bool)] | None)=None, recurse: bool=False) -> Iterator[tuple[(Module, 'Chain')]]: ... def walk(self, predicate: type[T], recurse: bool=False) -> Iterator[tuple[(T, 'Chain')]]: ... def walk(self, predicate: ((type[T] | Callable[([Module, 'Chain'], bool)]) | None)=None, recurse: bool=False) -> (Iterator[tuple[(T, 'Chain')]] | Iterator[tuple[(Module, 'Chain')]]): if isinstance(predicate, type): return self._walk((lambda m, _: isinstance(m, predicate)), recurse) else: return self._walk(predicate, recurse) def layers(self, layer_type: type[T], recurse: bool=False) -> Iterator[T]: for (module, _) in self.walk(layer_type, recurse): (yield module) def find(self, layer_type: type[T]) -> (T | None): return next(self.layers(layer_type=layer_type), None) def ensure_find(self, layer_type: type[T]) -> T: r = self.find(layer_type) assert (r is not None), f'could not find {layer_type} in {self}' return r def find_parent(self, module: Module) -> 'Chain | None': if (module in self): return self for (_, parent) in self.walk((lambda m, _: (m == module))): return parent return None def ensure_find_parent(self, module: Module) -> 'Chain': r = self.find_parent(module) assert (r is not None), f'could not find {module} in {self}' return r def insert(self, index: int, module: Module) -> None: if (index < 0): index = max(0, ((len(self._modules) + index) + 1)) modules = list(self) modules.insert(index, module) self._regenerate_keys(modules) if isinstance(module, ContextModule): module._set_parent(self) self._register_provider() def insert_before_type(self, module_type: type[Module], new_module: Module) -> None: for (i, module) in enumerate(self): if isinstance(module, module_type): self.insert(i, new_module) return raise ValueError(f'No module of type {module_type.__name__} found in the chain.') def insert_after_type(self, module_type: type[Module], new_module: Module) -> None: for (i, module) in enumerate(self): if isinstance(module, module_type): self.insert((i + 1), new_module) return raise ValueError(f'No module of type {module_type.__name__} found in the chain.') def append(self, module: Module) -> None: self.insert((- 1), module) def pop(self, index: int=(- 1)) -> (Module | tuple[Module]): modules = list(self) if (index < 0): index = (len(modules) + index) if ((index < 0) or (index >= len(modules))): raise IndexError('Index out of range.') removed_module = modules.pop(index) if isinstance(removed_module, ContextModule): removed_module._set_parent(None) self._regenerate_keys(modules) return removed_module def remove(self, module: Module) -> None: modules = list(self) try: modules.remove(module) except ValueError: raise ValueError(f'{module} is not in {self}') self._regenerate_keys(modules) if isinstance(module, ContextModule): module._set_parent(None) def replace(self, old_module: Module, new_module: Module, old_module_parent: 'Chain | None'=None) -> None: modules = list(self) try: modules[modules.index(old_module)] = new_module except ValueError: raise ValueError(f'{old_module} is not in {self}') self._regenerate_keys(modules) if isinstance(new_module, ContextModule): new_module._set_parent(self) if isinstance(old_module, ContextModule): old_module._set_parent(old_module_parent) def structural_copy(self: TChain) -> TChain: if hasattr(self, '_pre_structural_copy'): self._pre_structural_copy() modules = [structural_copy(m) for m in self] clone = super().structural_copy() clone._provider = ContextProvider.create(clone.init_context()) for module in modules: clone.append(module=module) if hasattr(clone, '_post_structural_copy'): clone._post_structural_copy(self) return clone def _show_only_tag(self) -> bool: return (self.__class__ == Chain)
def validate_header_is_on_intended_dao_fork(support_dao_fork: bool, dao_fork_at: BlockNumber, header: BlockHeaderAPI) -> None: extra_data_block_nums = range(dao_fork_at, (dao_fork_at + 10)) if (header.block_number in extra_data_block_nums): if (support_dao_fork and (header.extra_data != DAO_FORK_MAINNET_EXTRA_DATA)): raise ValidationError(f'Block {header!r} must have extra data {encode_hex(DAO_FORK_MAINNET_EXTRA_DATA)} not {encode_hex(header.extra_data)} when supporting DAO fork') elif ((not support_dao_fork) and (header.extra_data == DAO_FORK_MAINNET_EXTRA_DATA)): raise ValidationError(f'Block {header!r} must not have extra data {encode_hex(DAO_FORK_MAINNET_EXTRA_DATA)} when declining the DAO fork')
def setLM(drive, threshold_a, threshold_b, threshold_c, threshold_d, enable_lm): cmd_options = ['nvme ocp set-latency-monitor-feature', ('%s' % drive), ('--active_bucket_timer_threshold=%d' % 2016), ('--active_threshold_a=%d' % threshold_a), ('--active_threshold_b=%d' % threshold_b), ('--active_threshold_c=%d' % threshold_c), ('--active_threshold_d=%d' % threshold_d), ('--active_latency_config=%s' % '0x0fff'), ('--active_latency_minimum_window=%d' % 0), ('--debug_log_trigger_enable=%d' % 0), ('--discard_debug_log=%d' % 1), ('--latency_monitor_feature_enable=%d' % enable_lm)] cmd_output = cmdline(' '.join(cmd_options)) return cmd_output
_meta(definition.DollControlCard) class DollControlCard(): name = '' illustrator = '' cv = '' description = ',,:<style=Card.Name></style>,' custom_ray = True def is_action_valid(self, c, tl): n = len(tl) if (n == 0): return (False, '') if (tl[0] is self.me): return (False, '') if all(((e.equipment_category != 'weapon') for e in tl[0].equips)): return (False, '!') if (n == 1): return (False, '') elif (n == 2): from thb.actions import LaunchCard from thb.cards.definition import AttackCard c = AttackCard() lc = LaunchCard(tl[0], [tl[1]], c) if (not lc.can_fire()): return (False, '<style=Card.Name></style>!') return (True, ',!') def sound_effect(self, act): return 'thb-cv-card_dollcontrol'
.skip .django_db def test_federal_account_spending_by_category_filter_date(client, financial_spending_data): payload = deepcopy(base_payload) payload['filters']['time_period'][0]['end_date'] = '2016-12-31' resp = client.post('/api/v2/federal_accounts/1/spending_by_category', content_type='application/json', data=json.dumps(payload)) results = resp.json()['results'] assert (results['Office of the Secretary'] == 1000000) assert (results['Under/Assistant Secretaries'] == 2000000)
class Radio(Html.Html): name = 'Radio' def __init__(self, page: primitives.PageModel, flag, label, group_name, icon, width, height, html_code, helper, options, profile): super(Radio, self).__init__(page, {'value': flag, 'text': label}, html_code=html_code, css_attrs={'width': width, 'height': height}, profile=profile) self.add_input('', position='before', css={'width': 'none', 'vertical-align': 'middle', 'margin-bottom': 0, 'height': '{}px'.format(Defaults.LINE_HEIGHT)}) self.add_label(label, html_code=self.htmlCode, position='after', css={'display': 'inline-block', 'width': 'None', 'float': 'none'}) self.input.set_attrs(name='data-content', value=label) if flag: self.input.set_attrs({'checked': json.dumps(flag)}) self.input.style.clear() if (group_name is not None): self.input.set_attrs(name='name', value=group_name) else: self.input.set_attrs(name='name', value=self.htmlCode) self.input.set_attrs(attrs={'type': 'radio'}) self.add_helper(helper, css={'line-height': ('%spx' % Defaults.LINE_HEIGHT)}) self.input.css({'cursor': 'pointer', 'display': 'inline-block', 'vertical-align': 'middle', 'min-width': 'none'}) self.css({'vertical-align': 'middle', 'text-align': 'left'}) self.add_icon(icon, html_code=self.htmlCode, position='after', family=options.get('icon_family'), css={'margin-left': '5px', 'color': self.page.theme.success[1]}) self.style.css.line_height = Defaults.LINE_HEIGHT def dom(self) -> JsHtmlField.Radio: if (self._dom is None): self._dom = JsHtmlField.Radio(self, page=self.page) return self._dom def js(self) -> JsComponents.Radio: if (self._js is None): self._js = JsComponents.Radio(self, page=self.page) return self._js def __str__(self): return ('<div %(strAttr)s>%(helper)s</div>' % {'strAttr': self.get_attrs(css_class_names=self.style.get_classes()), 'helper': self.helper})
class OptionSeriesAreaSonificationPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def saveSettings(): if (config.plugins.serienRec.autochecktype.value == '0'): config.plugins.serienRec.timeUpdate.value = False else: config.plugins.serienRec.timeUpdate.value = True if (not config.plugins.serienRec.selectBouquets.value): config.plugins.serienRec.MainBouquet.value = None config.plugins.serienRec.AlternativeBouquet.value = None config.plugins.serienRec.useAlternativeChannel.value = False if (not config.plugins.serienRec.seriensubdir.value): config.plugins.serienRec.seasonsubdir.value = False if (config.plugins.serienRec.autochecktype.value != '1'): config.plugins.serienRec.wakeUpDSB.value = False if (not config.plugins.serienRec.downloadCover.value): config.plugins.serienRec.showCover.value = False if (config.plugins.serienRec.TimerName.value == '1'): config.plugins.serienRec.sucheAufnahme.value = False if (int(config.plugins.serienRec.checkfordays.value) > int(config.plugins.serienRec.TimeSpanForRegularTimer.value)): config.plugins.serienRec.TimeSpanForRegularTimer.value = int(config.plugins.serienRec.checkfordays.value) if config.plugins.serienRec.preferMainBouquet.value: config.plugins.serienRec.sucheAufnahme.value = False config.plugins.serienRec.savetopath.value = os.path.join(str(config.plugins.serienRec.savetopath.value), '') config.plugins.serienRec.tvplaner_movies_filepath.value = os.path.join(str(config.plugins.serienRec.tvplaner_movies_filepath.value), '') config.plugins.serienRec.LogFilePath.value = os.path.join(str(config.plugins.serienRec.LogFilePath.value), '') config.plugins.serienRec.BackupPath.value = os.path.join(str(config.plugins.serienRec.BackupPath.value), '') config.plugins.serienRec.databasePath.value = os.path.join(str(config.plugins.serienRec.databasePath.value), '') config.plugins.serienRec.coverPath.value = os.path.join(str(config.plugins.serienRec.coverPath.value), '') config.plugins.serienRec.piconPath.value = os.path.join(str(config.plugins.serienRec.piconPath.value), '') config.plugins.serienRec.BoxID.save() config.plugins.serienRec.activateNewOnThisSTBOnly.save() config.plugins.serienRec.savetopath.save() config.plugins.serienRec.seriensubdir.save() config.plugins.serienRec.seriensubdirwithyear.save() config.plugins.serienRec.seasonsubdir.save() config.plugins.serienRec.seasonsubdirnumerlength.save() config.plugins.serienRec.seasonsubdirfillchar.save() config.plugins.serienRec.Autoupdate.save() config.plugins.serienRec.databasePath.save() config.plugins.serienRec.AutoBackup.save() config.plugins.serienRec.backupAtManualCheck.save() config.plugins.serienRec.BackupPath.save() config.plugins.serienRec.deleteBackupFilesOlderThan.save() config.plugins.serienRec.createCompressedBackup.save() config.plugins.serienRec.autochecktype.save() config.plugins.serienRec.deltime.save() config.plugins.serienRec.maxDelayForAutocheck.save() config.plugins.serienRec.checkfordays.save() config.plugins.serienRec.globalFromTime.save() config.plugins.serienRec.globalToTime.save() config.plugins.serienRec.eventid.save() config.plugins.serienRec.epgTimeSpan.save() config.plugins.serienRec.forceRecording.save() config.plugins.serienRec.TimeSpanForRegularTimer.save() config.plugins.serienRec.NoOfRecords.save() config.plugins.serienRec.selectNoOfTuners.save() config.plugins.serienRec.tuner.save() config.plugins.serienRec.wakeUpDSB.save() config.plugins.serienRec.afterAutocheck.save() config.plugins.serienRec.DSBTimeout.save() config.plugins.serienRec.tvplaner.save() config.plugins.serienRec.imap_server.save() config.plugins.serienRec.imap_server_ssl.save() config.plugins.serienRec.imap_server_port.save() if (config.plugins.serienRec.imap_login.value != '*'): config.plugins.serienRec.imap_login_hidden.value = encrypt(STBHelpers.getmac('eth0'), config.plugins.serienRec.imap_login.value) config.plugins.serienRec.imap_login.value = '*' config.plugins.serienRec.imap_login.save() config.plugins.serienRec.imap_login_hidden.save() if (config.plugins.serienRec.imap_password.value != '*'): config.plugins.serienRec.imap_password_hidden.value = encrypt(STBHelpers.getmac('eth0'), config.plugins.serienRec.imap_password.value) config.plugins.serienRec.imap_password.value = '*' config.plugins.serienRec.imap_password.save() config.plugins.serienRec.imap_password_hidden.save() config.plugins.serienRec.imap_mailbox.save() config.plugins.serienRec.imap_mail_subject.save() config.plugins.serienRec.imap_mail_age.save() config.plugins.serienRec.tvplaner_full_check.save() config.plugins.serienRec.tvplaner_skipSerienServer.save() config.plugins.serienRec.tvplaner_series.save() config.plugins.serienRec.tvplaner_series_activeSTB.save() config.plugins.serienRec.tvplaner_movies.save() config.plugins.serienRec.tvplaner_movies_activeSTB.save() config.plugins.serienRec.tvplaner_movies_filepath.save() config.plugins.serienRec.tvplaner_movies_createsubdir.save() config.plugins.serienRec.afterEvent.save() config.plugins.serienRec.margin_before.save() config.plugins.serienRec.margin_after.save() config.plugins.serienRec.TimerName.save() config.plugins.serienRec.TimerDescription.save() config.plugins.serienRec.forceManualRecording.save() config.plugins.serienRec.forceBookmarkRecording.save() config.plugins.serienRec.splitEventTimer.save() config.plugins.serienRec.splitEventTimerCompareTitle.save() config.plugins.serienRec.addSingleTimersForEvent.save() config.plugins.serienRec.selectBouquets.save() config.plugins.serienRec.MainBouquet.save() config.plugins.serienRec.AlternativeBouquet.save() config.plugins.serienRec.useAlternativeChannel.save() config.plugins.serienRec.preferMainBouquet.save() config.plugins.serienRec.intensiveTimersuche.save() config.plugins.serienRec.sucheAufnahme.save() config.plugins.serienRec.SkinType.save() config.plugins.serienRec.showAllButtons.save() config.plugins.serienRec.DisplayRefreshRate.save() config.plugins.serienRec.firstscreen.save() config.plugins.serienRec.showPicons.save() config.plugins.serienRec.piconPath.save() config.plugins.serienRec.downloadCover.save() config.plugins.serienRec.coverPath.save() config.plugins.serienRec.showCover.save() config.plugins.serienRec.createPlaceholderCover.save() config.plugins.serienRec.refreshPlaceholderCover.save() config.plugins.serienRec.copyCoverToFolder.save() config.plugins.serienRec.listFontsize.save() config.plugins.serienRec.markerColumnWidth.save() config.plugins.serienRec.markerNameInset.save() config.plugins.serienRec.showDeactivatedBoxIDs.save() config.plugins.serienRec.seasonFilter.save() config.plugins.serienRec.timerFilter.save() config.plugins.serienRec.markerSort.save() config.plugins.serienRec.max_season.save() config.plugins.serienRec.openMarkerScreen.save() config.plugins.serienRec.confirmOnDelete.save() config.plugins.serienRec.alphaSortBoxChannels.save() if (os.path.isdir(('%s/web-data' % os.path.dirname(__file__))) is False): config.plugins.serienRec.enableWebinterface.value = False config.plugins.serienRec.enableWebinterface.save() config.plugins.serienRec.showNotification.save() config.plugins.serienRec.showMessageOnConflicts.save() config.plugins.serienRec.showMessageOnTVPlanerError.save() config.plugins.serienRec.showMessageOnEventNotFound.save() config.plugins.serienRec.showMessageTimeout.save() config.plugins.serienRec.channelUpdateNotification.save() config.plugins.serienRec.LogFilePath.save() config.plugins.serienRec.longLogFileName.save() config.plugins.serienRec.deleteLogFilesOlderThan.save() config.plugins.serienRec.writeLog.save() config.plugins.serienRec.writeLogVersion.save() config.plugins.serienRec.writeLogChannels.save() config.plugins.serienRec.writeLogAllowedEpisodes.save() config.plugins.serienRec.writeLogAdded.save() config.plugins.serienRec.writeLogDisk.save() config.plugins.serienRec.writeLogTimeRange.save() config.plugins.serienRec.writeLogTimeLimit.save() config.plugins.serienRec.writeLogTimerDebug.save() config.plugins.serienRec.tvplaner_backupHTML.save() config.plugins.serienRec.logScrollLast.save() config.plugins.serienRec.logWrapAround.save() config.plugins.serienRec.timeUpdate.save() config.plugins.serienRec.kindOfTimer.save() config.plugins.serienRec.dbversion.save() config.plugins.serienRec.bouquetList.save() config.plugins.serienRec.deleteOlderThan.save() config.plugins.serienRec.imap_check_interval.save() config.plugins.serienRec.planerCacheEnabled.save() config.plugins.serienRec.planerCacheSize.save() config.plugins.serienRec.readdatafromfiles.save() config.plugins.serienRec.refreshViews.save() config.plugins.serienRec.setupType.save() config.plugins.serienRec.tvplaner_create_marker.save() config.plugins.serienRec.updateInterval.save() config.plugins.serienRec.justplay.save() config.plugins.serienRec.justremind.save() config.plugins.serienRec.zapbeforerecord.save() configfile.save()
class TextDescriptorsCorrelationMetric(Metric[TextDescriptorsCorrelationMetricResult]): column_name: str _generated_text_features: Dict[(str, GeneratedFeature)] descriptors: Dict[(str, FeatureDescriptor)] def __init__(self, column_name: str, descriptors: Optional[Dict[(str, FeatureDescriptor)]]=None, options: AnyOptions=None) -> None: self.column_name = column_name if descriptors: self.descriptors = descriptors else: self.descriptors = {'Text Length': TextLength(), 'Non Letter Character %': NonLetterCharacterPercentage(), 'OOV %': OOV()} super().__init__(options=options) self._generated_text_features = {} def generated_text_features(self): return self._generated_text_features def required_features(self, data_definition: DataDefinition): column_type = data_definition.get_column(self.column_name).column_type if (column_type == ColumnType_data.Text): self._generated_text_features = {name: desc.feature(self.column_name) for (name, desc) in self.descriptors.items()} return list(self.generated_text_features.values()) return [] def get_parameters(self) -> tuple: return (self.column_name,) def calculate(self, data: InputData) -> TextDescriptorsCorrelationMetricResult: if (self.column_name not in data.current_data): raise ValueError(f"Column '{self.column_name}' was not found in current data.") if (data.reference_data is not None): if (self.column_name not in data.reference_data): raise ValueError(f"Column '{self.column_name}' was not found in reference data.") curr_text_df = pd.concat([data.get_current_column(x.feature_name()) for x in list(self.generated_text_features.values())], axis=1) curr_text_df.columns = list(self.generated_text_features.keys()) ref_df = None if (data.reference_data is not None): ref_text_df = pd.concat([data.get_reference_column(x.feature_name()) for x in list(self.generated_text_features.values())], axis=1) ref_text_df.columns = list(self.generated_text_features.keys()) ref_df = pd.concat([data.reference_data.copy().reset_index(drop=True), ref_text_df.reset_index(drop=True)], axis=1) curr_result = {} ref_result: Optional[dict] = None if (ref_df is not None): ref_result = {} num_features = data.data_definition.get_columns(ColumnType.Numerical, features_only=True) for (name, feature) in self.generated_text_features.items(): correlations = calculate_numerical_correlation(name, data.get_current_column(feature.feature_name()), data.current_data[[feature.column_name for feature in num_features]]) curr_result[name] = {value.kind: value for value in correlations} if ((ref_df is not None) and (ref_result is not None)): correlations = calculate_numerical_correlation(name, data.get_reference_column(feature.feature_name()), data.current_data[[feature.column_name for feature in num_features]]) ref_result[name] = {value.kind: value for value in correlations} return TextDescriptorsCorrelationMetricResult(column_name=self.column_name, current=curr_result, reference=ref_result)
def test_avg03(tmpdir, generate_plot): grd = xtgeo.grid_from_file(GFILE2, fformat='egrid') po = xtgeo.gridproperty_from_file(IFILE2, fformat='init', name='PORO', grid=grd) dz = grd.get_dz(asmasked=False) (xc, yc, _zc) = grd.get_xyz(asmasked=False) actnum = grd.get_actnum() actnum = actnum.get_npvalues3d() xcuse = xc.get_npvalues3d() ycuse = yc.get_npvalues3d() dzuse = dz.get_npvalues3d(fill_value=0.0) pouse = po.get_npvalues3d(fill_value=0.0) dzuse[(actnum < 0.5)] = 0.0 pouse[(actnum < 0.5)] = 0.0 zuse = np.ones(xcuse.shape) avgmap = RegularSurface(ncol=200, nrow=250, xinc=50, yinc=50, xori=457000, yori=5927000, values=np.zeros((200, 250))) avgmap.avg_from_3dprop(xprop=xcuse, yprop=ycuse, zoneprop=zuse, zone_minmax=(1, 1), mprop=pouse, dzprop=dzuse, truncate_le=None, zone_avg=True) fau = xtgeo.polygons_from_file(FFILE1, fformat='zmap') fspec = {'faults': fau} if generate_plot: avgmap.quickplot(filename=join(tmpdir, 'tmp_poro3.png'), xlabelrotation=30, faults=fspec) avgmap.to_file(join(tmpdir, 'tmp.poro3.gri'), fformat='irap_ascii') logger.info(avgmap.values.mean()) assert (avgmap.values.mean() == pytest.approx(0.1653, abs=0.01))
class OptionPlotoptionsScatter3dSonificationContexttracksMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def evm_trace(evm: object, event: TraceEvent, trace_memory: bool=False, trace_stack: bool=True, trace_return_data: bool=False) -> None: assert isinstance(evm, (EvmWithoutReturnData, EvmWithReturnData)) last_trace = None if evm.env.traces: last_trace = evm.env.traces[(- 1)] refund_counter = evm.refund_counter parent_evm = evm.message.parent_evm while (parent_evm is not None): refund_counter += parent_evm.refund_counter parent_evm = parent_evm.message.parent_evm len_memory = len(evm.memory) return_data = None if (isinstance(evm, EvmWithReturnData) and trace_return_data): return_data = ('0x' + evm.return_data.hex()) memory = None if (trace_memory and (len_memory > 0)): memory = ('0x' + evm.memory.hex()) stack = None if trace_stack: stack = [hex(i) for i in evm.stack] if isinstance(event, TransactionStart): pass elif isinstance(event, TransactionEnd): final_trace = FinalTrace(event.gas_used, event.output, event.error) evm.env.traces.append(final_trace) elif isinstance(event, PrecompileStart): new_trace = Trace(pc=evm.pc, op=('0x' + event.address.hex().lstrip('0')), gas=hex(evm.gas_left), gasCost='0x0', memory=memory, memSize=len_memory, stack=stack, returnData=return_data, depth=(evm.message.depth + 1), refund=refund_counter, opName=('0x' + event.address.hex().lstrip('0')), precompile=True) evm.env.traces.append(new_trace) elif isinstance(event, PrecompileEnd): assert isinstance(last_trace, Trace) last_trace.gasCostTraced = True last_trace.errorTraced = True elif isinstance(event, OpStart): new_trace = Trace(pc=evm.pc, op=event.op.value, gas=hex(evm.gas_left), gasCost='0x0', memory=memory, memSize=len_memory, stack=stack, returnData=return_data, depth=(evm.message.depth + 1), refund=refund_counter, opName=str(event.op).split('.')[(- 1)]) evm.env.traces.append(new_trace) elif isinstance(event, OpEnd): assert isinstance(last_trace, Trace) last_trace.gasCostTraced = True last_trace.errorTraced = True elif isinstance(event, OpException): if (last_trace is not None): assert isinstance(last_trace, Trace) if ((not last_trace) or last_trace.errorTraced or (last_trace.depth == evm.message.depth)): new_trace = Trace(pc=evm.pc, op='InvalidOpcode', gas=hex(evm.gas_left), gasCost='0x0', memory=memory, memSize=len_memory, stack=stack, returnData=return_data, depth=(evm.message.depth + 1), refund=refund_counter, opName='InvalidOpcode', gasCostTraced=True, errorTraced=True, error=type(event.error).__name__) evm.env.traces.append(new_trace) elif (not last_trace.errorTraced): last_trace.error = type(event.error).__name__ last_trace.errorTraced = True elif isinstance(event, EvmStop): if (not evm.running): return elif (len(evm.code) == 0): return else: evm_trace(evm, OpStart(event.op), trace_memory, trace_stack, trace_return_data) elif isinstance(event, GasAndRefund): if (not evm.env.traces): return assert isinstance(last_trace, Trace) if (not last_trace.gasCostTraced): last_trace.gasCost = hex(event.gas_cost) last_trace.refund = refund_counter last_trace.gasCostTraced = True
class Depot(QWidget): ps_update = pyqtSignal(bool) ps_export = pyqtSignal(int) ps_status_changed = pyqtSignal(tuple) ps_dropped = pyqtSignal(tuple) ps_appended = pyqtSignal(tuple) ps_linked = pyqtSignal(bool) ps_history_backup = pyqtSignal(bool) ps_undo = pyqtSignal(bool) ps_open_image_url = pyqtSignal(tuple) def __init__(self, wget, args): super().__init__(wget) wget.setProperty('class', 'WorkArea') self._args = args self._left_click = False self._double_click = False self._start_hig = None self._start_pt = None self._current_idx = None self._fetched_cell = None self._connected_keymaps = {} self.init_key() self._func_tr_() self.setFocusPolicy(Qt.StrongFocus) self.setAcceptDrops(True) grid_layout = QGridLayout(self) grid_layout.setContentsMargins(0, 0, 0, 0) self._scroll_area = QScrollArea(self) self._scroll_area.setProperty('class', 'WorkArea') self._scroll_area.setFrameShape(QFrame.Box) self._scroll_area.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn) self._scroll_area.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff) self._scroll_area.setWidgetResizable(True) grid_layout.addWidget(self._scroll_area) self._scroll_bar = self._scroll_area.verticalScrollBar() self._scroll_contents = QWidget() self._scroll_contents.setProperty('class', 'WorkArea') self._scroll_grid_layout = QGridLayout(self._scroll_contents) self._scroll_grid_layout.setContentsMargins(0, 0, 0, 0) self._scroll_area.setWidget(self._scroll_contents) self.initialize() self._info = Info(self, self._args) self.create_menu() self.update_text() self._stab_column_wid = None self._pl_wid = 0 self._tot_rows = 0 def paintEvent(self, event): self._pl_wid = int(((self.width() - (self._scroll_bar.width() * 1.25)) / self._args.stab_column)) self._tot_rows = ((len(self._args.stab_ucells) // self._args.stab_column) if ((len(self._args.stab_ucells) % self._args.stab_column) == 0) else ((len(self._args.stab_ucells) // self._args.stab_column) + 1)) if (not self._stab_column_wid): self._stab_column_wid = int(self._pl_wid) height = (self._pl_wid * self._tot_rows) height = (height if (height > self._scroll_area.height()) else self._scroll_area.height()) self._scroll_contents.setMinimumSize((self._pl_wid * self._args.stab_column), height) self._scroll_contents.setMaximumSize((self._pl_wid * self._args.stab_column), height) for i in range(self._tot_rows): for j in range(self._args.stab_column): idx = ((self._args.stab_column * i) + j) if ((idx < len(self._args.stab_ucells)) and isinstance(self._args.stab_ucells[idx], UnitCell)): self._args.stab_ucells[idx].setGeometry((self._pl_wid * j), (self._pl_wid * i), self._pl_wid, self._pl_wid) status_idx = self._current_idx if (status_idx == None): status_idx = 0 else: status_idx = (status_idx + 1) self.ps_status_changed.emit((self._tot_rows, self._args.stab_column, (len(self._args.stab_ucells) - 1), status_idx)) def keyPressEvent(self, event): if (event.key() == Qt.Key_Shift): self._press_key = 1 self.setCursor(QCursor(Qt.PointingHandCursor)) event.accept() elif (event.key() == Qt.Key_Control): self._press_key = 2 self.setCursor(QCursor(Qt.PointingHandCursor)) event.accept() elif (event.key() == Qt.Key_Space): self._press_key = 3 self.setCursor(QCursor(Qt.ClosedHandCursor)) event.accept() elif (event.key() == Qt.Key_Alt): self._press_key = 4 self.setCursor(QCursor(Qt.PointingHandCursor)) event.accept() else: self._press_key = 0 self.setCursor(QCursor(Qt.ArrowCursor)) event.ignore() def keyReleaseEvent(self, event): self._press_key = 0 self.setCursor(QCursor(Qt.ArrowCursor)) event.ignore() def mousePressEvent(self, event): point = np.array(((event.x() - self._scroll_contents.x()), (event.y() - self._scroll_contents.y()))) col = (point[0] // self._pl_wid) row = (point[1] // self._pl_wid) if ((self._press_key == 2) and (event.button() == Qt.LeftButton)): color_list = [] for unit_cell in self._args.stab_ucells[:(- 1)]: if isinstance(unit_cell, UnitCell): color_list.append((unit_cell.color_set, unit_cell.hm_rule, unit_cell.name, unit_cell.desc, unit_cell.cr_time, unit_cell.grid_locations, unit_cell.grid_assitlocs, unit_cell.grid_list, unit_cell.grid_values)) color_dict = {'version': self._args.info_version_en, 'site': self._args.info_main_site, 'type': 'depot'} color_dict['palettes'] = export_list(color_list) color_path = os.sep.join((self._args.global_temp_dir.path(), 'Rickrack_Depot_{}.dpc'.format(abs(hash(str(color_dict)))))) with open(color_path, 'w', encoding='utf-8') as f: json.dump(color_dict, f, indent=4, ensure_ascii=False) self._drag_file = True drag = QDrag(self) mimedata = QMimeData() mimedata.setUrls([QUrl.fromLocalFile(color_path)]) drag.setMimeData(mimedata) pixmap = QPixmap(':/images/images/file_depot_128.png') drag.setPixmap(pixmap) drag.setHotSpot(QPoint((pixmap.width() / 2), (pixmap.height() / 2))) drag.exec_((Qt.CopyAction | Qt.MoveAction)) self._drag_file = False elif (col <= self._args.stab_column): idx = ((self._args.stab_column * row) + col) if ((event.button() == Qt.MidButton) or ((self._press_key == 3) and (event.button() == Qt.LeftButton))): if (event.button() == Qt.MidButton): self.setCursor(QCursor(Qt.ClosedHandCursor)) self._start_hig = (self._scroll_bar.value() + event.y()) elif (idx < len(self._args.stab_ucells)): self.activate_idx(idx) if self._fetched_cell: self._args.stab_ucells[self._current_idx] = self._fetched_cell self._fetched_cell = None self._left_click = False self._start_pt = None elif ((event.button() == Qt.LeftButton) and (idx < (len(self._args.stab_ucells) - 1))): self._left_click = True self._start_pt = np.array((event.x(), event.y())) self._fetched_cell = self._args.stab_ucells[self._current_idx] self._args.stab_ucells[self._current_idx] = None self._fetched_cell.raise_() else: self.activate_idx(None) else: self.activate_idx(None) event.accept() def mouseMoveEvent(self, event): if self._double_click: event.accept() elif ((self._press_key == 1) and self._left_click): color_dict = {'version': self._args.info_version_en, 'site': self._args.info_main_site, 'type': 'set'} color_dict['palettes'] = export_list([(self._fetched_cell.color_set, self._fetched_cell.hm_rule, self._fetched_cell.name, self._fetched_cell.desc, self._fetched_cell.cr_time, self._fetched_cell.grid_locations, self._fetched_cell.grid_assitlocs, self._fetched_cell.grid_list, self._fetched_cell.grid_values)]) color_path = os.sep.join((self._args.global_temp_dir.path(), 'Rickrack_Set_{}.dps'.format(abs(hash(str(color_dict)))))) with open(color_path, 'w', encoding='utf-8') as f: json.dump(color_dict, f, indent=4, ensure_ascii=False) self._drag_file = True drag = QDrag(self) mimedata = QMimeData() mimedata.setUrls([QUrl.fromLocalFile(color_path)]) drag.setMimeData(mimedata) pixmap = QPixmap(':/images/images/file_set_128.png') drag.setPixmap(pixmap) drag.setHotSpot(QPoint((pixmap.width() / 2), (pixmap.height() / 2))) drag.exec_((Qt.CopyAction | Qt.MoveAction)) self._args.stab_ucells[self._current_idx] = self._fetched_cell self._fetched_cell = None self._left_click = False self._start_pt = None self._drag_file = False self._press_key = 0 self.setCursor(QCursor(Qt.ArrowCursor)) self.update() event.accept() elif self._left_click: point = (event.x(), event.y()) pl_wid_5 = (self._pl_wid / 5) if (isinstance(self._start_pt, np.ndarray) and (np.sum(((self._start_pt - point) ** 2)) < (pl_wid_5 ** 2))): event.ignore() else: (x, y) = point x = (x if (x > pl_wid_5) else pl_wid_5) x = (x if (x < (self.width() - pl_wid_5)) else (self.width() - pl_wid_5)) y = (y if (y > pl_wid_5) else pl_wid_5) y = (y if (y < (self.height() - pl_wid_5)) else (self.height() - pl_wid_5)) x = (int(x) - self._scroll_contents.x()) y = (int(y) - self._scroll_contents.y()) self._start_pt = None col = (x // self._pl_wid) row = (y // self._pl_wid) if (col <= self._args.stab_column): idx = ((self._args.stab_column * row) + col) if (idx < (len(self._args.stab_ucells) - 1)): self._args.stab_ucells.pop(self._current_idx) self._current_idx = idx self._args.stab_ucells.insert(idx, None) self._fetched_cell.setGeometry((x - (self._pl_wid / 2)), (y - (self._pl_wid / 2)), self._pl_wid, self._pl_wid) self.update() event.accept() elif (self._start_hig != None): self._scroll_bar.setValue((self._start_hig - event.y())) self.update() event.accept() else: event.ignore() def mouseReleaseEvent(self, event): if self._start_hig: self.setCursor(QCursor(Qt.ArrowCursor)) self._start_hig = None if self._left_click: if isinstance(self._fetched_cell, UnitCell): self._args.stab_ucells[self._current_idx] = self._fetched_cell self._fetched_cell = None self._left_click = False self._start_pt = None self.update() event.accept() else: event.ignore() def mouseDoubleClickEvent(self, event): if (event.button() == Qt.LeftButton): self._double_click = True point = ((event.x() - self._scroll_contents.x()), (event.y() - self._scroll_contents.y())) col = (point[0] // self._pl_wid) row = (point[1] // self._pl_wid) if (col <= self._args.stab_column): idx = ((self._args.stab_column * row) + col) if (idx < len(self._args.stab_ucells)): self.activate_idx(idx) if (idx == (len(self._args.stab_ucells) - 1)): self.attach_set() else: self.import_set() else: self.activate_idx(None) else: self.activate_idx(None) self._double_click = False event.accept() else: event.ignore() def dragEnterEvent(self, event): if self._drag_file: event.ignore() return try: depot_file = event.mimeData().urls()[0].toLocalFile() except Exception as err: event.ignore() return if (depot_file.split('.')[(- 1)].lower() in ('dpc', 'dps', 'json', 'txt', 'aco', 'ase', 'gpl', 'xml')): self._drop_file = depot_file event.accept() else: event.ignore() def dropEvent(self, event): if self._drop_file: if (self._drop_file.split('.')[(- 1)].lower() in ('dpc', 'json')): self.ps_dropped.emit((self._drop_file, False)) else: self.ps_appended.emit((self._drop_file, False)) self._drop_file = None event.accept() else: event.ignore() def resizeEvent(self, event): if self._stab_column_wid: wid = self.width() stab_column = int((wid / self._stab_column_wid)) stab_column = (1 if (stab_column < 1) else stab_column) if (stab_column != self._args.stab_column): self._args.modify_settings('stab_column', stab_column) self.update() event.ignore() def init_key(self): self._press_key = 0 self._drag_file = False self._drop_file = None def initialize(self): unit_cells = [] for cset in self._args.stab_ucells: unit_cell = UnitCell(self._scroll_contents, self._args, *cset) unit_cells.append(unit_cell) self._scroll_grid_layout.addWidget(unit_cell) empty_cell = UnitCell(self._scroll_contents, self._args) unit_cells.append(empty_cell) self._scroll_grid_layout.addWidget(empty_cell) self._args.stab_ucells = unit_cells self._current_idx = None self.ps_history_backup.emit(True) for unit_cell in self._args.stab_ucells: if isinstance(unit_cell, UnitCell): unit_cell._func_tr_() unit_cell.update_text() def activate_idx(self, idx): if ((self._current_idx != None) and isinstance(self._args.stab_ucells[self._current_idx], UnitCell)): self._args.stab_ucells[self._current_idx].activated = False self._args.stab_ucells[self._current_idx].update() self._current_idx = idx if (self._current_idx != None): self._current_idx = (self._current_idx if (self._current_idx > 0) else 0) self._current_idx = (self._current_idx if (self._current_idx < (len(self._args.stab_ucells) - 1)) else (len(self._args.stab_ucells) - 1)) if ((self._current_idx != None) and isinstance(self._args.stab_ucells[self._current_idx], UnitCell)): self._args.stab_ucells[self._current_idx].activated = True self._args.stab_ucells[self._current_idx].update() upp_pos = (self._scroll_contents.y() + self._args.stab_ucells[self._current_idx].y()) low_pos = ((self._scroll_contents.y() + self._args.stab_ucells[self._current_idx].y()) + self._args.stab_ucells[self._current_idx].height()) if (upp_pos <= 0): self._scroll_bar.setValue(self._args.stab_ucells[self._current_idx].y()) elif (low_pos >= self._scroll_area.height()): self._scroll_bar.setValue(((self._args.stab_ucells[self._current_idx].y() + self._pl_wid) - self._scroll_area.height())) status_idx = self._current_idx if (status_idx == None): status_idx = 0 else: status_idx = (status_idx + 1) self.ps_status_changed.emit((self._tot_rows, self._args.stab_column, (len(self._args.stab_ucells) - 1), status_idx)) if (self._current_idx == None): self._info.hide() else: self._info.clone_cell(self._args.stab_ucells[self._current_idx]) def move(self, shift_x, shift_y): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return if (self._current_idx == None): self.activate_idx(0) elif (shift_x < 0): self.activate_idx((self._current_idx - 1)) elif (shift_x > 0): self.activate_idx((self._current_idx + 1)) elif (shift_y < 0): self.activate_idx((self._current_idx - self._args.stab_column)) elif (shift_y > 0): self.activate_idx((self._current_idx + self._args.stab_column)) def zoom(self, ratio): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return stab_column = self._args.stab_column if True: if (ratio > 1): stab_column = (stab_column - 1) elif (ratio < 1): stab_column = (stab_column + 1) elif (ratio > 1): stab_column = (stab_column + 1) elif (ratio < 1): stab_column = (stab_column - 1) self._args.modify_settings('stab_column', stab_column) self._stab_column_wid = None self.update() def home(self): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return if (self._scroll_bar.value() < self._pl_wid): self.activate_idx(0) else: self._scroll_bar.setValue(0) self.update() def page_up(self): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return self._scroll_bar.setValue((self._scroll_bar.value() - self._scroll_area.height())) self.update() def page_down(self): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return self._scroll_bar.setValue((self._scroll_bar.value() + self._scroll_area.height())) self.update() def page_end(self): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return if (self._scroll_bar.value() > (self._args.stab_ucells[(len(self._args.stab_ucells) - 1)].y() - self._scroll_area.height())): self.activate_idx((len(self._args.stab_ucells) - 1)) else: self._scroll_bar.setValue(self._scroll_contents.height()) self.update() def insert_set(self): if (self._current_idx == (len(self._args.stab_ucells) - 1)): self.attach_set() else: self.import_set() def delete_set(self): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return if ((self._current_idx == None) or (self._current_idx > (len(self._args.stab_ucells) - 2))): return if isinstance(self._args.stab_ucells[self._current_idx], UnitCell): self._args.stab_ucells[self._current_idx].close() self._args.stab_ucells.pop(self._current_idx) self._args.stab_ucells[self._current_idx].activated = True self.activate_idx(self._current_idx) self.update() def confirm_delete_set(self): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return self.prompt(self._operation_warns[3], self.delete_set) def link_set(self, link): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return if ((self._current_idx == None) or (self._current_idx > (len(self._args.stab_ucells) - 2))): return self._args.sys_link_colors[1] = bool(link) self.ps_linked.emit(True) def import_set(self, rev_import=False): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return if ((self._current_idx == None) or (self._current_idx > (len(self._args.stab_ucells) - 2))): return if isinstance(self._args.stab_ucells[self._current_idx], UnitCell): self.activate_idx(self._current_idx) if ((self._press_key in (2, 4)) or rev_import): self._args.stab_ucells[self._current_idx].update_colors([i.hsv for i in self._args.sys_color_set], self._args.hm_rule, self._args.sys_grid_locations, self._args.sys_grid_assitlocs, self._args.sys_grid_list, self._args.sys_grid_values) if self._info.isVisible(): self._info.update_values() elif (self._press_key == 1): self.attach_set(location_idx=self._current_idx) else: self._args.sys_color_set.recover(self._args.stab_ucells[self._current_idx].color_set) self._args.hm_rule = str(self._args.stab_ucells[self._current_idx].hm_rule) (self._args.sys_grid_locations, self._args.sys_grid_assitlocs) = norm_grid_locations(self._args.stab_ucells[self._current_idx].grid_locations, self._args.stab_ucells[self._current_idx].grid_assitlocs) self._args.sys_grid_list = norm_grid_list(self._args.stab_ucells[self._current_idx].grid_list) self._args.sys_grid_values = dict(self._args.stab_ucells[self._current_idx].grid_values) self._args.sys_activated_assit_idx = (- 1) self._args.sys_assit_color_locs = [[None for j in self._args.sys_grid_assitlocs[i]] for i in range(5)] self.ps_update.emit(True) self.ps_history_backup.emit(True) (image_url, full_loc) = check_image_desc(self._args.stab_ucells[self._current_idx].desc) if image_url: self.ps_open_image_url.emit((image_url, full_loc)) if (self._press_key == 2): self.link_set((not self._args.sys_link_colors[1])) elif self._args.sys_link_colors[1]: self.link_set(False) def export_set(self): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return if ((self._current_idx == None) or (self._current_idx > (len(self._args.stab_ucells) - 2))): return if isinstance(self._args.stab_ucells[self._current_idx], UnitCell): self.activate_idx(self._current_idx) self.ps_export.emit(self._current_idx) def detail_set(self): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return if ((self._current_idx == None) or (self._current_idx > (len(self._args.stab_ucells) - 2))): return if isinstance(self._args.stab_ucells[self._current_idx], UnitCell): self.activate_idx(self._current_idx) self._info.show() def attach_set(self, color_list=None, location_idx=(- 1)): if (not self.isVisible()): return if (isinstance(self._fetched_cell, UnitCell) or self._left_click): return if color_list: hsv_set = tuple((color_list[0][i].hsv for i in range(5))) unit_cell = UnitCell(self._scroll_contents, self._args, hsv_set, color_list[1], color_list[2], color_list[3], color_list[4], color_list[5], color_list[6], color_list[7], color_list[8]) else: hsv_set = tuple((self._args.sys_color_set[i].hsv for i in range(5))) unit_cell = UnitCell(self._scroll_contents, self._args, hsv_set, self._args.hm_rule, '', '', (time.time(), time.time()), self._args.sys_grid_locations, self._args.sys_grid_assitlocs, self._args.sys_grid_list, self._args.sys_grid_values) self._scroll_grid_layout.addWidget(unit_cell) loc_idx = (- 1) if isinstance(location_idx, (int, np.int_)): loc_idx = int(location_idx) loc_idx = ((- 1) if (loc_idx < 0) else loc_idx) loc_idx = ((- 1) if (loc_idx > (len(self._args.stab_ucells) - 2)) else loc_idx) location_cell = self._args.stab_ucells[loc_idx] location_cell.activated = False unit_cell._func_tr_() unit_cell.update_text() unit_cell.activated = False unit_cell.setGeometry(location_cell.geometry()) total_len = len(self._args.stab_ucells) self._args.stab_ucells = ((self._args.stab_ucells[:loc_idx] + [unit_cell]) + self._args.stab_ucells[loc_idx:]) self.update() self.activate_idx(((total_len + loc_idx) % total_len)) def detail_state(self): return self._info.isVisible() def hide_detail(self): if self._info.isVisible(): self._info.hide() return True else: return False def clean_up(self): for unit_cell in self._args.stab_ucells[:(- 1)]: if isinstance(unit_cell, UnitCell): unit_cell.close() self._args.stab_ucells = self._args.stab_ucells[(- 1):] self._args.stab_ucells[0].activated = False self._current_idx = None self.update() def update_index(self): if (self._args.sys_link_colors[1] and isinstance(self._args.stab_ucells[self._current_idx], UnitCell) and (self._current_idx < (len(self._args.stab_ucells) - 1))): self._args.stab_ucells[self._current_idx].update_colors([i.hsv for i in self._args.sys_color_set], self._args.hm_rule, self._args.sys_grid_locations, self._args.sys_grid_assitlocs, self._args.sys_grid_list, self._args.sys_grid_values) if self._info.isVisible(): self._info.update_values() self.update() def clipboard_in(self): clipboard = QApplication.clipboard().mimeData() if clipboard.hasUrls(): try: depot_file = clipboard.urls()[0].toLocalFile() except Exception as err: return if ((depot_file.split('.')[(- 1)].lower() in ('dpc', 'json')) and os.path.isfile(depot_file)): self.ps_dropped.emit((depot_file, False)) elif ((depot_file.split('.')[(- 1)].lower() in ('dps', 'json', 'txt', 'aco', 'ase', 'gpl', 'xml')) and os.path.isfile(depot_file)): self.ps_appended.emit((depot_file, False)) else: try: color_dict = json.loads(clipboard.text(), encoding='utf-8') except Exception as err: return if (isinstance(color_dict, dict) and ('type' in color_dict) and ('palettes' in color_dict)): if (color_dict['type'] == 'depot'): self.ps_dropped.emit((color_dict, True)) elif (color_dict['type'] == 'set'): self.ps_appended.emit((color_dict, True)) def clipboard_cur(self, ctp): def _func_(): data_lst = [] if ((self._current_idx == None) or (self._args.stab_ucells[self._current_idx] == None) or (self._current_idx >= (len(self._args.stab_ucells) - 1))): for i in (2, 1, 0, 3, 4): color = self._args.sys_color_set[i].getti(ctp) if (ctp == 'hec'): color = ((self._args.hec_prefix[0] + str(color)) + self._args.hec_prefix[1]) else: color = self._args.rgb_prefix[1].join([((self._args.r_prefix[0] + str(color[coi])) + self._args.r_prefix[1]) for coi in range(3)]) color = ((self._args.rgb_prefix[0] + color) + self._args.rgb_prefix[2]) data_lst.append(color) else: for i in (2, 1, 0, 3, 4): color = getattr(self._args.stab_ucells[self._current_idx].color_set[i], ctp) if (ctp == 'hec'): color = ((self._args.hec_prefix[0] + str(color)) + self._args.hec_prefix[1]) else: color = self._args.rgb_prefix[1].join([((self._args.r_prefix[0] + str(color[coi])) + self._args.r_prefix[1]) for coi in range(3)]) color = ((self._args.rgb_prefix[0] + color) + self._args.rgb_prefix[2]) data_lst.append(color) data = self._args.lst_prefix[1].join(data_lst) data = ((self._args.lst_prefix[0] + data) + self._args.lst_prefix[2]) mimedata = QMimeData() mimedata.setText(data) clipboard = QApplication.clipboard() clipboard.setMimeData(mimedata) return _func_ def update_all(self): for unit_cell in self._args.stab_ucells: if isinstance(unit_cell, UnitCell): unit_cell.update() self.update() def prompt(self, text, accept_action): box = QMessageBox(self) box.setWindowTitle(self._operation_warns[0]) box.setText(text) box.setIcon(QMessageBox.Warning) box.addButton(self._operation_warns[1], QMessageBox.AcceptRole) box.addButton(self._operation_warns[2], QMessageBox.RejectRole) if (box.exec_() == 0): accept_action() def create_menu(self): self.setContextMenuPolicy(Qt.CustomContextMenu) self.customContextMenuRequested.connect(self.show_menu) self._menu = QMenu(self) self._action_undo = QAction(self) self._action_undo.triggered.connect((lambda : self.ps_undo.emit(True))) self._menu.addAction(self._action_undo) self._action_redo = QAction(self) self._action_redo.triggered.connect((lambda : self.ps_undo.emit(False))) self._menu.addAction(self._action_redo) self._action_paste = QAction(self) self._action_paste.triggered.connect(self.clipboard_in) self._menu.addAction(self._action_paste) self._action_copy_rgb = QAction(self) self._action_copy_rgb.triggered.connect(self.clipboard_cur('rgb')) self._menu.addAction(self._action_copy_rgb) self._action_copy_hsv = QAction(self) self._action_copy_hsv.triggered.connect(self.clipboard_cur('hsv')) self._menu.addAction(self._action_copy_hsv) self._action_copy_hec = QAction(self) self._action_copy_hec.triggered.connect(self.clipboard_cur('hec')) self._menu.addAction(self._action_copy_hec) self._action_zoom_in = QAction(self) self._action_zoom_in.triggered.connect((lambda : self.zoom(self._args.zoom_step))) self._menu.addAction(self._action_zoom_in) self._action_zoom_out = QAction(self) self._action_zoom_out.triggered.connect((lambda : self.zoom((1 / self._args.zoom_step)))) self._menu.addAction(self._action_zoom_out) self._action_import = QAction(self) self._action_import.triggered.connect(self.import_set) self._menu.addAction(self._action_import) self._action_rev_import = QAction(self) self._action_rev_import.triggered.connect((lambda : self.import_set(rev_import=True))) self._menu.addAction(self._action_rev_import) self._action_export = QAction(self) self._action_export.triggered.connect(self.export_set) self._menu.addAction(self._action_export) self._action_attach_beside = QAction(self) self._action_attach_beside.triggered.connect((lambda : self.attach_set(location_idx=self._current_idx))) self._menu.addAction(self._action_attach_beside) self._action_attach_append = QAction(self) self._action_attach_append.triggered.connect(self.attach_set) self._menu.addAction(self._action_attach_append) self._action_delete = QAction(self) self._action_delete.triggered.connect(self.delete_set) self._menu.addAction(self._action_delete) self._action_link = QAction(self) self._action_link.triggered.connect((lambda : self.link_set((not self._args.sys_link_colors[1])))) self._menu.addAction(self._action_link) self._action_detail = QAction(self) self._action_detail.triggered.connect(self.detail_set) self._menu.addAction(self._action_detail) def show_menu(self): if ((self._current_idx == None) or (self._args.stab_ucells[self._current_idx] == None) or (self._current_idx >= (len(self._args.stab_ucells) - 1))): self._action_copy_rgb.setVisible(False) self._action_copy_hsv.setVisible(False) self._action_copy_hec.setVisible(False) self._action_import.setVisible(False) self._action_rev_import.setVisible(False) self._action_export.setVisible(False) self._action_delete.setVisible(False) self._action_detail.setVisible(False) self._action_attach_beside.setVisible(False) self._action_link.setVisible(False) else: self._action_copy_rgb.setVisible(True) self._action_copy_hsv.setVisible(True) self._action_copy_hec.setVisible(True) self._action_import.setVisible(True) self._action_rev_import.setVisible(True) self._action_export.setVisible(True) self._action_delete.setVisible(True) self._action_detail.setVisible(True) self._action_attach_beside.setVisible(True) self._action_link.setVisible(True) self._menu.exec_(QCursor.pos()) def update_skey(self): for skey in self._args.shortcut_keymaps[39]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.delete_set) for skey in self._args.shortcut_keymaps[40]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.confirm_delete_set) for skey in self._args.shortcut_keymaps[38]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.insert_set) for skey in self._args.shortcut_keymaps[41]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.detail_set) for skey in self._args.shortcut_keymaps[20]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.clipboard_cur('rgb')) for skey in self._args.shortcut_keymaps[21]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.clipboard_cur('hsv')) for skey in self._args.shortcut_keymaps[22]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.clipboard_cur('hec')) for skey in self._args.shortcut_keymaps[45]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.clipboard_cur('hec')) for skey in self._args.shortcut_keymaps[46]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.clipboard_in) for skey in self._args.shortcut_keymaps[36]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.page_up) for skey in self._args.shortcut_keymaps[37]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.page_down) for skey in self._args.shortcut_keymaps[35]: if (skey in self._connected_keymaps): shortcut = self._connected_keymaps[skey] shortcut.disconnect() else: shortcut = QShortcut(QKeySequence(skey), self) self._connected_keymaps[skey] = shortcut shortcut.activated.connect(self.page_end) def update_action_text(self): self._action_undo.setText(self._action_descs[0]) self._action_redo.setText(self._action_descs[1]) self._action_copy_rgb.setText(self._action_descs[2]) self._action_copy_hsv.setText(self._action_descs[3]) self._action_copy_hec.setText(self._action_descs[4]) self._action_paste.setText(self._action_descs[5]) self._action_zoom_in.setText(self._action_descs[6]) self._action_zoom_out.setText(self._action_descs[7]) self._action_import.setText(self._action_descs[8]) self._action_rev_import.setText(self._action_descs[16]) self._action_export.setText(self._action_descs[9]) self._action_attach_beside.setText(self._action_descs[10]) self._action_attach_append.setText(self._action_descs[11]) self._action_delete.setText(self._action_descs[12]) self._action_detail.setText(self._action_descs[13]) if self._args.sys_link_colors[1]: self._action_link.setText(self._action_descs[15]) else: self._action_link.setText(self._action_descs[14]) def update_text(self): self.update_action_text() self._info._func_tr_() self._info.update_text() for unit_cell in self._args.stab_ucells: if isinstance(unit_cell, UnitCell): unit_cell._func_tr_() unit_cell.update_text() def _func_tr_(self): _translate = QCoreApplication.translate self._action_descs = (_translate('Wheel', 'Undo'), _translate('Wheel', 'Redo'), _translate('Depot', 'Copy RGB'), _translate('Depot', 'Copy HSV'), _translate('Depot', 'Copy Hex Code'), _translate('Wheel', 'Paste'), _translate('Board', 'Zoom In'), _translate('Board', 'Zoom Out'), _translate('Depot', 'Replace Color (DK)'), _translate('Depot', 'Export Color Set'), _translate('Depot', 'Insert Color Set (Shift+DK)'), _translate('Depot', 'Append Color Set'), _translate('Depot', 'Delete Color Set'), _translate('Depot', 'Show Detail'), _translate('Depot', 'Link with Result (Ctrl+DK)'), _translate('Depot', 'Un-Link with Result (Ctrl+DK)'), _translate('Depot', 'Rev-Replace Color (Alt+DK)')) self._operation_warns = (_translate('Info', 'Warning'), _translate('Info', 'OK'), _translate('Info', 'Cancel'), _translate('Info', 'The selected color set will be removed from depot.'))