code
stringlengths
281
23.7M
class DirectoryList(Processor): input_variables = {'pattern': {'description': 'Shell glob pattern to match files by', 'required': True}, 'find_method': {'description': 'Type of pattern to match. Currently only supported type is "glob" (also the default)', 'default': 'glob', 'required': False}, 'remove_extension': {'description': 'Remove the extension at the end. Default to False.', 'default': False, 'required': False}, 'suffix_string': {'description': "String to append to each found item name in dir. Defaults to ','", 'default': ',', 'required': False}} output_variables = {'found_filenames': {'description': 'Found filename'}} description = __doc__ def globfind(self, pattern): glob_matches = glob(pattern) if (len(glob_matches) < 1): raise ProcessorError('No matching filename found') glob_matches.sort() new_glob = [] for glob_item in glob_matches: new_string = os.path.basename(glob_item) if self.env['remove_extension']: new_string = os.path.splitext(new_string)[0] new_glob.append(new_string) return new_glob def main(self): pattern = self.env.get('pattern') method = self.env.get('find_method') format_string = ('%s' % self.env['suffix_string']) search_string = '{0}' if (method == 'glob'): self.env['found_filenames'] = search_string.format(format_string.join(self.globfind(pattern))).strip() else: raise ProcessorError(('Unsupported find_method: %s' % method)) self.output(('Found matches: %s' % self.env['found_filenames']))
class TypeAbbrevA(ArgumentProcessor): _shorthand = {'R': T.R, 'f16': T.f16, 'f32': T.f32, 'f64': T.f64, 'i8': T.int8, 'ui8': T.uint8, 'ui16': T.uint16, 'i32': T.int32} def __call__(self, typ, all_args): if (typ in TypeAbbrevA._shorthand): return TypeAbbrevA._shorthand[typ] else: precisions = ', '.join([t for t in TypeAbbrevA._shorthand]) self.err(f'expected one of the following strings specifying precision: {precisions}', ValueError)
def test_get_id_range_for_partition_with_sparse_range(): min_id = 4 max_id = 5999 partition_size = 2000 id_range_item_count = ((max_id - min_id) + 1) record_ids = {4, 5, 7, 99, 101, 120, 1998, 1999, 2000, 2001, 2002, 4444, 5999} etl_config = {'partition_size': partition_size} ctrl = PostgresElasticsearchIndexerController(etl_config) ctrl.min_id = min_id ctrl.max_id = max_id ctrl.record_count = len(record_ids) ctrl.config['partitions'] = ctrl.determine_partitions() assert (ctrl.config['partitions'] == ceil((id_range_item_count / partition_size))) partition_range = range(0, ctrl.config['partitions']) (lower_bound, upper_bound) = ctrl.get_id_range_for_partition(partition_range[0]) assert (lower_bound == min_id) assert (upper_bound == (lower_bound + (partition_size - 1))) (lower_bound, upper_bound) = ctrl.get_id_range_for_partition(partition_range[1]) assert (lower_bound == (min_id + partition_size)) assert (upper_bound == (lower_bound + (partition_size - 1))) (lower_bound, upper_bound) = ctrl.get_id_range_for_partition(partition_range[(- 1)]) assert (lower_bound == (min_id + (partition_size * partition_range[(- 1)]))) assert (upper_bound == max_id) assert (_remove_seen_ids(ctrl, record_ids) == set({}))
class TextLength(FeatureDescriptor): def feature(self, column_name: str) -> GeneratedFeature: return text_length_feature.TextLength(column_name, self.display_name) def for_column(self, column_name: str): return text_length_feature.TextLength(column_name, self.display_name).feature_name()
def _create_plot_component(): x = random.uniform(0.0, 10.0, 50) y = random.uniform(0.0, 5.0, 50) pd = ArrayPlotData(x=x, y=y) plot = Plot(pd, border_visible=True, overlay_border=True) scatter = plot.plot(('x', 'y'), type='scatter', color='lightblue')[0] plot.title = 'Scatter Inspector Demo' plot.padding = 50 plot.tools.append(PanTool(plot)) plot.overlays.append(ZoomTool(plot)) inspector = ScatterInspector(scatter) scatter.tools.append(inspector) overlay = ScatterInspectorOverlay(scatter, hover_color='red', hover_marker_size=6, selection_marker_size=6, selection_color='yellow', selection_outline_color='purple', selection_line_width=3) scatter.overlays.append(overlay) def echo(event): new = event.new print('{} event on element {}'.format(new.event_type, new.event_index)) inspector.observe(echo, 'inspector_event') return plot
class Role(Base): __tablename__ = 'sys_role' id: Mapped[id_key] = mapped_column(init=False) name: Mapped[str] = mapped_column(String(20), unique=True, comment='') data_scope: Mapped[(int | None)] = mapped_column(default=2, comment='(1: 2:)') status: Mapped[int] = mapped_column(default=1, comment='(0 1)') remark: Mapped[(str | None)] = mapped_column(LONGTEXT, default=None, comment='') users: Mapped[list['User']] = relationship(init=False, secondary=sys_user_role, back_populates='roles') menus: Mapped[list['Menu']] = relationship(init=False, secondary=sys_role_menu, back_populates='roles')
.parametrize('c', ['\\', '?', '+', ':', '*']) .usefixtures('use_tmpdir') def test_char_in_unquoted_is_allowed(c): test_config_file_name = 'test.ert' test_config_contents = dedent(f''' NUM_REALIZATIONS 1 RUNPATH path{c}a/b ''') with open(test_config_file_name, 'w', encoding='utf-8') as fh: fh.write(test_config_contents) ert_config = ErtConfig.from_file(test_config_file_name) assert (f'path{c}a/b' in ert_config.model_config.runpath_format_string)
.django_db def test_non_match_from_unintuitive_tas_from_agency(client, monkeypatch, elasticsearch_award_index, subaward_with_unintuitive_agency): _setup_es(client, monkeypatch, elasticsearch_award_index) resp = query_by_tas_subaward(client, {'require': [_agency_path(ATA_TAS)]}) assert (resp.json()['results'] == [])
def main(): meta_path = Path('dataset/tts/WenetSpeech/WenetSpeech.json') dataset_path = Path('dataset/tts/WenetSpeech') cleaned_path = Path('dataset/tts/WenetSpeech/cleaned') if (not cleaned_path.exists()): cleaned_path.mkdir(parents=True) demucs = init_model('htdemucs', device) print('Model loaded') with open(meta_path) as f: dataset = json.load(f)['audios'] print(f'Dataset loaded, {len(dataset)} samples') dataset = dataset[rank::world_size] print(f'Dataset split, {len(dataset)} samples') for (data_idx, data) in enumerate(dataset): done_path = ((cleaned_path / data['aid']) / 'done') done_path.parent.mkdir(parents=True, exist_ok=True) if done_path.exists(): continue print(f'Processing {data_idx}/{len(dataset)} at rank {rank}') try: with tempfile.NamedTemporaryFile(suffix='.wav') as f: subprocess.check_call(['ffmpeg', '-y', '-i', str((dataset_path / data['path'])), '-c:a', 'pcm_s16le', '-threads', '0', '-ar', '24000', str(f.name)], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) (raw_audio, sr) = librosa.load(f.name, sr=None, mono=True) raw_audio = torch.from_numpy(raw_audio[None]).to(device) audio = torchaudio.functional.resample(raw_audio, orig_freq=sr, new_freq=demucs.samplerate) audio = torch.cat([audio, audio], dim=0) tracks = separate_audio(demucs, audio, shifts=1, num_workers=0, progress=False) audio = merge_tracks(tracks, filter=['vocals'])[0] (vocals, sr) = (torchaudio.functional.resample(audio, orig_freq=demucs.samplerate, new_freq=24000), 24000) vocals = vocals.cpu().numpy() for (idx, segment) in enumerate(data['segments']): if (segment['confidence'] <= 0.95): continue begin = int((segment['begin_time'] * sr)) end = int((segment['end_time'] * sr)) segment_audio = vocals[begin:end] temp_path = ((cleaned_path / data['aid']) / f'S{idx:05d}.wav') temp_path.parent.mkdir(parents=True, exist_ok=True) sf.write(temp_path, segment_audio, samplerate=sr) temp_path = temp_path.with_suffix('.txt') temp_path.write_text(segment['text']) done_path.write_text('') except Exception as e: print(f'Error {e} on {data_idx}/{len(dataset)} at rank {rank}') time.sleep(10) continue print('Done')
class TestOOOXMLFilter(util.PluginTestCase): def test_docx(self): config = self.dedent("\n matrix:\n - name: docx\n sources:\n - 'tests/**/*.docx'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.ooxml\n ").format(self.tempdir) self.mktemp('.docx.yml', config, 'utf-8') self.assert_spellcheck('.docx.yml', ['tihs', 'smoe', 'txet']) def test_pptx(self): config = self.dedent("\n matrix:\n - name: pptx\n sources:\n - 'tests/**/*.pptx'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.ooxml\n ").format(self.tempdir) self.mktemp('.pptx.yml', config, 'utf-8') self.assert_spellcheck('.pptx.yml', ['tihs', 'smoe', 'txet']) def test_xlsx(self): config = self.dedent("\n matrix:\n - name: xlsx\n sources:\n - 'tests/**/*.xlsx'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.ooxml\n ").format(self.tempdir) self.mktemp('.xlsx.yml', config, 'utf-8') self.assert_spellcheck('.xlsx.yml', ['tihs', 'smoe', 'txet']) def test_docx_chained(self): config = self.dedent("\n matrix:\n - name: docx\n default_encoding: latin-1\n sources:\n - 'tests/**/*.docx'\n aspell:\n lang: en\n d: en_US\n hunspell:\n d: en_US\n pipeline:\n - pyspelling.filters.text\n - pyspelling.filters.ooxml\n ").format(self.tempdir) self.mktemp('.docx.yml', config, 'utf-8') self.assert_spellcheck('.docx.yml', ['tihs', 'smoe', 'txet'])
def get_reconciled_tree_zmasek(gtree, sptree, inplace=False): def cleanup(tree): for node in tree.traverse(): node.del_prop('M') if (not inplace): gtree = gtree.copy('deepcopy') missing_sp = (gtree.get_species() - sptree.get_species()) if missing_sp: raise KeyError(('* The following species are not contained in the species tree: ' + ', '.join(missing_sp))) sp2node = dict() for node in sptree.leaves(): sp2node[node.species] = node species = sptree.get_species() for node in gtree.leaves(): node.add_prop('M', sp2node[node.species]) for node in gtree.traverse(strategy='postorder'): if (len(node.children) == 0): continue if (len(node.children) != 2): cleanup(gtree) raise ValueError('Algorithm can only work with binary trees.') lca = node.children[0].M.get_common_ancestor(node.children[1].M) node.add_prop('M', lca) node.add_prop('evoltype', 'S') if ((id(node.children[0].M) == id(node.M)) or (id(node.children[1].M) == id(node.M))): node.add_prop('evoltype', 'D') cleanup(gtree) return gtree
class Perm021FCCCRBiasTestCase(unittest.TestCase): def _test_perm021fc_ccr_bias(self, test_name='perm021fc_ccr_bias', dtype='float16'): B = 1024 M = 128 K = 742 N = 64 target = detect_target() X = Tensor(shape=[B, K, M], dtype=dtype, name='input_0', is_input=True) W = Tensor(shape=[1, N, K], dtype=dtype, name='input_1', is_input=True) BIAS = Tensor(shape=[N], dtype=dtype, name='input_2', is_input=True) OP = ops.perm021fc_ccr_bias() Y = OP(X, W, BIAS) Y._attrs['name'] = 'output_0' Y._attrs['is_output'] = True module = compile_model(Y, target, './tmp', test_name) X_pt = get_random_torch_tensor([B, K, M], dtype=dtype) W_pt = get_random_torch_tensor([N, K], dtype=dtype) B_pt = (get_random_torch_tensor([N], dtype=dtype) * 0.5) XT = X_pt.permute(0, 2, 1) XT = torch.reshape(XT, ((- 1), K)) Y_pt = torch.nn.functional.linear(XT, W_pt, bias=B_pt) Y_pt = torch.reshape(Y_pt, (B, M, N)) y = torch.empty_like(Y_pt) module.run_with_tensors({'input_0': X_pt, 'input_1': W_pt.unsqueeze(0), 'input_2': B_pt}, [y]) self.assertTrue(torch.allclose(Y_pt, y, atol=0.1, rtol=0.1)) def test_perm021fc_ccr_bias_fp16(self): self._test_perm021fc_ccr_bias(test_name='perm021fc_ccr_bias_fp16', dtype='float16') def test_perm021fc_ccr_bias_float32_sm80(self): self._test_perm021fc_ccr_bias(test_name='perm021fc_ccr_bias_fp32', dtype='float32') def test_perm021fc_ccr_bias_bf16(self): self._test_perm021fc_ccr_bias(test_name='perm021fc_ccr_bias_bf16', dtype='bfloat16')
def register_args(program: ArgumentParser) -> None: program.add_argument('--face-debugger-items', help=wording.get('face_debugger_items_help').format(choices=', '.join(frame_processors_choices.face_debugger_items)), default=['kps', 'face-mask'], choices=frame_processors_choices.face_debugger_items, nargs='+', metavar='FACE_DEBUGGER_ITEMS')
class Admin(User): def __init__(self, first_name, last_name, username, email, location): super().__init__(first_name, last_name, username, email, location) self.privileges = [] def show_privileges(self): print('\nPrivileges:') for privilege in self.privileges: print(f'- {privilege}')
def test_draw_affine(): with Image(width=100, height=100, background='skyblue') as img: was = img.signature img.format = 'png' with Drawing() as ctx: ctx.affine([1.5, 0.5, 0, 1.5, 45, 25]) ctx.rectangle(top=5, left=5, width=25, height=25) ctx.draw(img) assert (was != img.signature) with raises(ValueError): with Drawing() as ctx: ctx.affine([1.0]) with raises(TypeError): with Drawing() as ctx: ctx.affine(['a', 'b', 'c', 'd', 'e', 'f'])
def extractPenjournalhappyWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Villain Reformation System', 'Pulling Together a Villain Reformation Strategy', 'translated'), ('Villain Reformation Strategy', 'Pulling Together a Villain Reformation Strategy', 'translated'), ('Want to Ascend? Then Fall in Love', 'Want to Ascend? Then Fall in Love', 'translated'), ('Golden Stage', 'Golden Stage', 'translated'), ('a tale of strategies for the throne', 'a tale of strategies for the throne', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def match_to_str(ofmatch): keys = {'eth_src': 'dl_src', 'eth_dst': 'dl_dst', 'eth_type': 'dl_type', 'vlan_vid': 'dl_vlan', 'ipv4_src': 'nw_src', 'ipv4_dst': 'nw_dst', 'ip_proto': 'nw_proto', 'tcp_src': 'tp_src', 'tcp_dst': 'tp_dst', 'udp_src': 'tp_src', 'udp_dst': 'tp_dst'} match = {} ofmatch = ofmatch.to_jsondict()['OFPMatch'] ofmatch = ofmatch['oxm_fields'] for match_field in ofmatch: key = match_field['OXMTlv']['field'] if (key in keys): key = keys[key] mask = match_field['OXMTlv']['mask'] value = match_field['OXMTlv']['value'] if (key == 'dl_vlan'): value = match_vid_to_str(value, mask) elif (key == 'in_port'): value = UTIL.ofp_port_to_user(value) elif (mask is not None): value = ((str(value) + '/') + str(mask)) match.setdefault(key, value) return match
class close_raw_session_result(): thrift_spec = None thrift_field_annotations = None thrift_struct_annotations = None __init__ = None def isUnion(): return False def read(self, iprot): if ((isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)): fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0) self.checkRequired() return if ((isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocolAccelerate) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)): fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2) self.checkRequired() return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if (ftype == TType.STOP): break if (fid == 1): if (ftype == TType.STRUCT): self.se = SessionException() self.se.read(iprot) else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() self.checkRequired() def checkRequired(self): return def write(self, oprot): if ((isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)): oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)) return if ((isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocolAccelerate) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)): oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)) return oprot.writeStructBegin('close_raw_session_result') if (self.se != None): oprot.writeFieldBegin('se', TType.STRUCT, 1) self.se.write(oprot) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def __repr__(self): L = [] padding = (' ' * 4) if (self.se is not None): value = pprint.pformat(self.se, indent=0) value = padding.join(value.splitlines(True)) L.append((' se=%s' % value)) return ('%s(%s)' % (self.__class__.__name__, (('\n' + ',\n'.join(L)) if L else ''))) def __eq__(self, other): if (not isinstance(other, self.__class__)): return False return (self.__dict__ == other.__dict__) def __ne__(self, other): return (not (self == other)) if (not six.PY2): __hash__ = object.__hash__
_PathAttribute.register_type(BGP_ATTR_TYPE_EXTENDED_COMMUNITIES) class BGPPathAttributeExtendedCommunities(_PathAttribute): _ATTR_FLAGS = (BGP_ATTR_FLAG_OPTIONAL | BGP_ATTR_FLAG_TRANSITIVE) _class_prefixes = ['BGP'] def __init__(self, communities, flags=0, type_=None, length=None): super(BGPPathAttributeExtendedCommunities, self).__init__(flags=flags, type_=type_, length=length) self.communities = communities def parse_value(cls, buf): rest = buf communities = [] while rest: (comm, rest) = _ExtendedCommunity.parse(rest) communities.append(comm) return {'communities': communities} def serialize_value(self): buf = bytearray() for comm in self.communities: buf += comm.serialize() return buf def _community_list(self, subtype): _list = [] for comm in (c for c in self.communities if (hasattr(c, 'subtype') and (c.subtype == subtype))): if ((comm.type == 0) or (comm.type == 2)): _list.append(('%d:%d' % (comm.as_number, comm.local_administrator))) elif (comm.type == 1): _list.append(('%s:%d' % (comm.ipv4_address, comm.local_administrator))) return _list def rt_list(self): return self._community_list(2) def soo_list(self): return self._community_list(3)
def test_difference_same_mode_raises_exceptions_if_frame_1_frame_2_different_lengths(): with pytest.raises(scared.PreprocessError): scared.preprocesses.high_order.Difference(frame_1=range(60), mode='same') with pytest.raises(scared.PreprocessError): scared.preprocesses.high_order.Difference(frame_1=range(60), frame_2=range(10), mode='same') with pytest.raises(scared.PreprocessError): scared.preprocesses.high_order.Difference(frame_1=60, frame_2=range(10), mode='same') with pytest.raises(scared.PreprocessError): scared.preprocesses.high_order.Difference(frame_1=range(60), frame_2=10, mode='same')
class CLICommand(Command): def from_cli(cls, parser, argv, cfg): parser.add_argument('--exclude-block', action='append', dest='exclude_blocks', default=cfg('exclude_block', type=List(IPNet), default=[]), help='exclude CIDR blocks from check') args = parser.parse_args(argv) return cls(**vars(args)) def __init__(self, exclude_blocks): super().__init__() self.exclude_blocks = [ip_network(b) for b in exclude_blocks] self.cidrs = [] def execute(self, session, acct): nmc = NetworkManagementClient(session, acct) return [(c, vnet.location, vnet.id) for vnet in nmc.virtual_networks.list_all() for c in vnet.address_space.address_prefixes] def collect_results(self, acct, get_result): for (block, location, vnet_id) in get_result(): block = ip_network(block) if any((block.overlaps(e) for e in self.exclude_blocks)): continue cidr = _CIDR(acct, location, vnet_id, block) self.cidrs.append(cidr) print(f'Found CIDR {cidr}', flush=True) def post_hook(self): overlap = [] for i in range(0, len(self.cidrs)): c1 = self.cidrs[i] for j in range((i + 1), len(self.cidrs)): c2 = self.cidrs[j] if c1.overlaps(c2): overlap.append(sorted((c1, c2))) for (c1, c2) in sorted(overlap, key=(lambda x: x[0])): print(f'OVERLAP! {c1} <<<>>> {c2}')
.asyncio .workspace_host class TestGetOAuthProvider(): async def test_unauthorized(self, unauthorized_api_assertions: HTTPXResponseAssertion, test_client_api: test_data: TestData): oauth_provider = test_data['oauth_providers']['google'] response = (await test_client_api.get(f'/oauth-providers/{oauth_provider.id}')) unauthorized_api_assertions(response) .authenticated_admin async def test_not_existing(self, test_client_api: not_existing_uuid: uuid.UUID): response = (await test_client_api.get(f'/oauth-providers/{not_existing_uuid}')) assert (response.status_code == status.HTTP_404_NOT_FOUND) .authenticated_admin async def test_valid(self, test_client_api: test_data: TestData): oauth_provider = test_data['oauth_providers']['google'] response = (await test_client_api.get(f'/oauth-providers/{oauth_provider.id}')) assert (response.status_code == status.HTTP_200_OK)
class String(TraitType): default_value_type = DefaultValue.constant def __init__(self, value='', minlen=0, maxlen=sys.maxsize, regex='', **metadata): super().__init__(value, **metadata) self.minlen = max(0, minlen) self.maxlen = max(self.minlen, maxlen) self.regex = regex self._init() def _init(self): self._validate = 'validate_all' if (self.regex != ''): self.match = re.compile(self.regex).match if ((self.minlen == 0) and (self.maxlen == sys.maxsize)): self._validate = 'validate_regex' elif ((self.minlen == 0) and (self.maxlen == sys.maxsize)): self._validate = 'validate_str' else: self._validate = 'validate_len' def validate(self, object, name, value): return getattr(self, self._validate)(object, name, value) def validate_all(self, object, name, value): try: value = strx(value) if ((self.minlen <= len(value) <= self.maxlen) and (self.match(value) is not None)): return value except: pass self.error(object, name, value) def validate_str(self, object, name, value): try: return strx(value) except: pass self.error(object, name, value) def validate_len(self, object, name, value): try: value = strx(value) if (self.minlen <= len(value) <= self.maxlen): return value except: pass self.error(object, name, value) def validate_regex(self, object, name, value): try: value = strx(value) if (self.match(value) is not None): return value except: pass self.error(object, name, value) def info(self): msg = '' if ((self.minlen != 0) and (self.maxlen != sys.maxsize)): msg = (' between %d and %d characters long' % (self.minlen, self.maxlen)) elif (self.maxlen != sys.maxsize): msg = (' <= %d characters long' % self.maxlen) elif (self.minlen != 0): msg = (' >= %d characters long' % self.minlen) if (self.regex != ''): if (msg != ''): msg += ' and' msg += (" matching the pattern '%s'" % self.regex) return ('a string' + msg) def create_editor(self): return default_text_editor(self) def __getstate__(self): result = self.__dict__.copy() for name in ['validate', 'match']: if (name in result): del result[name] return result def __setstate__(self, state): self.__dict__.update(state) self._init()
_view(['GET']) def ghost_generics(request, format=None): date = request.query_params.get('date') entity_code = request.query_params.get('entity_code') entity_type = request.query_params.get('entity_type').lower() group_by = request.query_params.get('group_by', 'practice') if (not date): raise NotValid('You must supply a date') if (group_by == 'presentation'): results = get_ghost_branded_generic_spending(date, entity_type, [entity_code]) elif (group_by == 'all'): total = get_total_ghost_branded_generic_spending(date, entity_type, entity_code) results = [{'possible_savings': total}] elif (group_by == 'practice'): if (entity_type == 'practice'): child_org_type = 'practice' child_org_ids = [entity_code] elif (entity_type == 'ccg'): child_org_type = 'practice' child_org_ids = _get_practice_codes_for_ccg(entity_code) else: raise ValueError('Unhanlded org_type: {}'.format(entity_type)) results = get_ghost_branded_generic_spending(date, child_org_type, child_org_ids) else: raise ValueError(group_by) for result in results: result[entity_type] = entity_code response = Response(results) if (request.accepted_renderer.format == 'csv'): filename = ('ghost-generics-%s-%s' % (entity_code, date)) filename = ('%s.csv' % filename) response['content-disposition'] = ('attachment; filename=%s' % filename) return response
def fortios_ftp_proxy(data, fos): fos.do_member_operation('ftp-proxy', 'explicit') if data['ftp_proxy_explicit']: resp = ftp_proxy_explicit(data, fos) else: fos._module.fail_json(msg=('missing task body: %s' % 'ftp_proxy_explicit')) return ((not is_successful_status(resp)), (is_successful_status(resp) and (resp['revision_changed'] if ('revision_changed' in resp) else True)), resp, {})
.skipif(utils.complex_mode, reason='Not complex differentiable') def test_coefficient_derivatives(): m = UnitSquareMesh(3, 3) x = SpatialCoordinate(m) V = FunctionSpace(m, 'CG', 1) f = Function(V) g = Function(V) f.interpolate(((1 + x[0]) + x[1])) g.assign((f + 1)) cd = {g: 1} phi = ((f + (g ** 2)) * dx) v = TestFunction(V) manual = (((1 + (2 * g)) * v) * dx) wrong = derivative(phi, f) correct = derivative(phi, f, coefficient_derivatives=cd) assert np.allclose(assemble(wrong).dat.data_ro, assemble((v * dx)).dat.data_ro) assert np.allclose(assemble(manual).dat.data_ro, assemble(correct).dat.data_ro)
class OptionPlotoptionsSunburstSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
_ordering class ASPathFilter(Filter): POLICY_TOP = 2 POLICY_END = 3 POLICY_INCLUDE = 4 POLICY_NOT_INCLUDE = 5 def __init__(self, as_number, policy): super(ASPathFilter, self).__init__(policy) self._as_number = as_number def __lt__(self, other): return (self.as_number < other.as_number) def __eq__(self, other): return (self.as_number == other.as_number) def __repr__(self): policy = 'TOP' if (self._policy == self.POLICY_INCLUDE): policy = 'INCLUDE' elif (self._policy == self.POLICY_NOT_INCLUDE): policy = 'NOT_INCLUDE' elif (self._policy == self.POLICY_END): policy = 'END' return ('ASPathFilter(as_number=%s,policy=%s)' % (self._as_number, policy)) def as_number(self): return self._as_number def policy(self): return self._policy def evaluate(self, path): path_aspath = path.pathattr_map.get(BGP_ATTR_TYPE_AS_PATH) path_seg_list = path_aspath.path_seg_list if path_seg_list: path_seg = path_seg_list[0] else: path_seg = [] result = False LOG.debug('path_seg : %s', path_seg) if (self.policy == ASPathFilter.POLICY_TOP): if ((len(path_seg) > 0) and (path_seg[0] == self._as_number)): result = True elif (self.policy == ASPathFilter.POLICY_INCLUDE): for aspath in path_seg: LOG.debug('POLICY_INCLUDE as_number : %s', aspath) if (aspath == self._as_number): result = True break elif (self.policy == ASPathFilter.POLICY_END): if ((len(path_seg) > 0) and (path_seg[(- 1)] == self._as_number)): result = True elif (self.policy == ASPathFilter.POLICY_NOT_INCLUDE): if (self._as_number not in path_seg): result = True return (self.policy, result) def clone(self): return self.__class__(self._as_number, policy=self._policy)
class ComposerHandler(object): def __init__(self, db_factory: typing.Union[(transactional_session_maker, None)]=None, compose_dir: str=config.get('compose_dir')): if (not db_factory): self.db_factory = transactional_session_maker() else: self.db_factory = db_factory self.compose_dir = compose_dir self.max_composes_sem = threading.BoundedSemaphore(config.get('max_concurrent_composes')) for setting in ('pungi.cmd', 'compose_dir', 'compose_stage_dir'): try: validate_path(config[setting]) except ValueError as e: raise ValueError('{} Check the {} setting.'.format(str(e), setting)) def run(self, api_version: int, data: dict): resume = data.get('resume', False) agent = data.get('agent') notifications.publish(compose_schemas.ComposeStartV1.from_dict(dict(agent=agent)), force=True) results = [] threads = [] for compose in self._get_composes(api_version, data): log.info('Now starting composes') composer = get_composer(ContentType.from_string(compose['content_type'])) if (not composer): log.error('Unsupported content type %s submitted for composing. SKIPPING', compose['content_type']) continue thread = composer(self.max_composes_sem, compose, agent, self.db_factory, self.compose_dir, resume) threads.append(thread) thread.start() log.info('All of the batches are running. Now waiting for the final results') for thread in threads: thread.join() for result in thread.results(): results.append(result) log.info('Push complete! Summary follows:') for result in results: log.info(result) def _get_composes(self, api_version: int, data: dict): with self.db_factory() as db: if (api_version == 2): try: composes = [Compose.from_dict(db, c) for c in data['composes']] except sqlalchemy.orm.exc.NoResultFound: log.info('Ignoring a compose task that references non-existing Composes') return [] else: raise ValueError('Unable to process request: {}'.format(data)) composes = [c for c in composes if (c.state == ComposeState.requested)] for c in composes: c.state = ComposeState.pending return [c.__json__(composer=True) for c in composes]
def run_tasks(tasks: List[str]) -> None: from src.task import run_task def parse_task(t: str) -> Union[(str, Dict[(str, Any)])]: logging.debug('Parsing: %s', t) if t.startswith('{'): res = json.loads(t) assert isinstance(res, dict) return res return t for t in tasks: task = parse_task(t) if isinstance(task, dict): for (k, t) in task.items(): run_task(k, t) else: run_task(task, task) return
class OptionSeriesTimelineSonificationDefaultspeechoptionsMapping(Options): def pitch(self) -> 'OptionSeriesTimelineSonificationDefaultspeechoptionsMappingPitch': return self._config_sub_data('pitch', OptionSeriesTimelineSonificationDefaultspeechoptionsMappingPitch) def playDelay(self) -> 'OptionSeriesTimelineSonificationDefaultspeechoptionsMappingPlaydelay': return self._config_sub_data('playDelay', OptionSeriesTimelineSonificationDefaultspeechoptionsMappingPlaydelay) def rate(self) -> 'OptionSeriesTimelineSonificationDefaultspeechoptionsMappingRate': return self._config_sub_data('rate', OptionSeriesTimelineSonificationDefaultspeechoptionsMappingRate) def text(self): return self._config_get(None) def text(self, text: str): self._config(text, js_type=False) def time(self) -> 'OptionSeriesTimelineSonificationDefaultspeechoptionsMappingTime': return self._config_sub_data('time', OptionSeriesTimelineSonificationDefaultspeechoptionsMappingTime) def volume(self) -> 'OptionSeriesTimelineSonificationDefaultspeechoptionsMappingVolume': return self._config_sub_data('volume', OptionSeriesTimelineSonificationDefaultspeechoptionsMappingVolume)
def check_element(element, top=True): if ((element.cell.cellname() == 'hexahedron') and (element.family() not in ['Q', 'DQ'])): raise NotImplementedError("Currently can only use 'Q' and/or 'DQ' elements on hexahedral meshes, not", element.family()) if (type(element) in (finat.ufl.BrokenElement, finat.ufl.RestrictedElement, finat.ufl.HDivElement, finat.ufl.HCurlElement)): inner = (element._element,) elif (type(element) is finat.ufl.EnrichedElement): inner = element._elements elif (type(element) is finat.ufl.TensorProductElement): inner = element.sub_elements elif isinstance(element, finat.ufl.MixedElement): if (not top): raise ValueError(f'{type(element).__name__} modifier must be outermost') else: inner = element.sub_elements else: inner = () for e in inner: check_element(e, top=False)
class InventoryConfig(AbstractInventoryConfig): def __init__(self, organization_id, gsuite_sa_path, gsuite_admin_email, record_file=None, replay_file=None, *args, **kwargs): super(InventoryConfig, self).__init__(*args, **kwargs) def get_root_resource_id(self): raise NotImplementedError() def get_gsuite_admin_email(self): raise NotImplementedError() def get_service_config(self): raise NotImplementedError() def set_service_config(self, service_config): raise NotImplementedError() def get_replay_file(self): raise NotImplementedError() def get_record_file(self): raise NotImplementedError()
class FunctionGenerator(ABC): def set_channel_config(self, channel_config: FunctionChannelConfig) -> None: self.channel_config = channel_config def next_sample(self) -> FunctionGeneratorMessage: raise NotImplementedError() def next_n_samples(self, n: int=1) -> List[ndarray]: samples = [self.next_sample() for _ in range(n)] return samples
class OptionPlotoptionsTimelineDatalabelsStyle(Options): def fontSize(self): return self._config_get('0.8em') def fontSize(self, num: float): self._config(num, js_type=False) def fontWeight(self): return self._config_get('normal') def fontWeight(self, text: str): self._config(text, js_type=False) def textOutline(self): return self._config_get('none') def textOutline(self, text: str): self._config(text, js_type=False)
def mark_task_logs_as_failed(year, month): print('mark_task_logs_as_failed') with open((settings.PIPELINE_METADATA_DIR + '/tasks.json')) as f: tasks = json.load(f) graph = nx.DiGraph() for (task_name, task_def) in tasks.items(): for dependency_name in task_def.get('dependencies', []): graph.add_edge(dependency_name, task_name) convert_task_log = TaskLog.objects.get(task_name='convert_hscic_prescribing', year=year, month=month, status=TaskLog.SUCCESSFUL) for task_name in nx.descendants(graph, 'convert_hscic_prescribing'): task_log = TaskLog.objects.get(task_name=task_name, year=year, month=month, status=TaskLog.SUCCESSFUL) assert (task_log.started_at > convert_task_log.started_at) task_log.status = TaskLog.FAILED task_log.save() convert_task_log.status = TaskLog.FAILED convert_task_log.save()
class OptionPlotoptionsPackedbubbleSonificationDefaultinstrumentoptionsMappingTremolo(Options): def depth(self) -> 'OptionPlotoptionsPackedbubbleSonificationDefaultinstrumentoptionsMappingTremoloDepth': return self._config_sub_data('depth', OptionPlotoptionsPackedbubbleSonificationDefaultinstrumentoptionsMappingTremoloDepth) def speed(self) -> 'OptionPlotoptionsPackedbubbleSonificationDefaultinstrumentoptionsMappingTremoloSpeed': return self._config_sub_data('speed', OptionPlotoptionsPackedbubbleSonificationDefaultinstrumentoptionsMappingTremoloSpeed)
class bad_action_error_msg(error_msg): version = 5 type = 1 err_type = 2 def __init__(self, xid=None, code=None, data=None): if (xid != None): self.xid = xid else: self.xid = None if (code != None): self.code = code else: self.code = 0 if (data != None): self.data = data else: self.data = '' return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!H', self.err_type)) packed.append(struct.pack('!H', self.code)) packed.append(self.data) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = bad_action_error_msg() _version = reader.read('!B')[0] assert (_version == 5) _type = reader.read('!B')[0] assert (_type == 1) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] _err_type = reader.read('!H')[0] assert (_err_type == 2) obj.code = reader.read('!H')[0] obj.data = str(reader.read_all()) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.code != other.code): return False if (self.data != other.data): return False return True def pretty_print(self, q): q.text('bad_action_error_msg {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('code = ') value_name_map = {0: 'OFPBAC_BAD_TYPE', 1: 'OFPBAC_BAD_LEN', 2: 'OFPBAC_BAD_EXPERIMENTER', 3: 'OFPBAC_BAD_EXPERIMENTER_TYPE', 4: 'OFPBAC_BAD_OUT_PORT', 5: 'OFPBAC_BAD_ARGUMENT', 6: 'OFPBAC_EPERM', 7: 'OFPBAC_TOO_MANY', 8: 'OFPBAC_BAD_QUEUE', 9: 'OFPBAC_BAD_OUT_GROUP', 10: 'OFPBAC_MATCH_INCONSISTENT', 11: 'OFPBAC_UNSUPPORTED_ORDER', 12: 'OFPBAC_BAD_TAG', 13: 'OFPBAC_BAD_SET_TYPE', 14: 'OFPBAC_BAD_SET_LEN', 15: 'OFPBAC_BAD_SET_ARGUMENT'} if (self.code in value_name_map): q.text(('%s(%d)' % (value_name_map[self.code], self.code))) else: q.text(('%#x' % self.code)) q.text(',') q.breakable() q.text('data = ') q.pp(self.data) q.breakable() q.text('}')
def index_to_coords(index: int, shape): assert isinstance(index, int), (index, type(index)) result = ([None] * len(shape)) i = (len(shape) - 1) while (i >= 0): result[i] = (index % shape[i]) index = (index // shape[i]) i -= 1 result = tuple(result) assert (len(result) == len(shape)) return result
class OptionPlotoptionsLineLabelStyle(Options): def fontSize(self): return self._config_get('0.8em') def fontSize(self, num: float): self._config(num, js_type=False) def fontWeight(self): return self._config_get('bold') def fontWeight(self, text: str): self._config(text, js_type=False)
class ToolbarManager(GObject.Object): toolbar_pos = GObject.property(type=str, default=TopToolbar.name) def __init__(self, plugin, main_box, viewmgr): super(ToolbarManager, self).__init__() self.plugin = plugin controllers = self._create_controllers(plugin, viewmgr) self._bars = {} self._bars[TopToolbar.name] = TopToolbar(plugin, main_box, controllers) self._bars[LeftToolbar.name] = LeftToolbar(plugin, main_box, controllers) self._bars[RightToolbar.name] = RightToolbar(plugin, main_box, controllers) self.last_toolbar_pos = None self._connect_signals() self._connect_properties() self._controllers = controllers if hasattr(self.plugin.shell, 'alternative_toolbar'): self.plugin.shell.alternative_toolbar.connect('toolbar-visibility', self._visibility) def _visibility(self, altplugin, value): if value: self._bars[self.toolbar_pos].show() else: self._bars[self.toolbar_pos].hide() def set_enabled(self, enabled, toolbar_object=None): if toolbar_object: self._controllers[toolbar_object].enabled = enabled else: for controller in self._controllers: self._controllers[controller].enabled = enabled def _connect_signals(self): self.connect('notify::toolbar-pos', self._on_notify_toolbar_pos) def _connect_properties(self): gs = GSetting() setting = gs.get_setting(gs.Path.PLUGIN) setting.bind(gs.PluginKey.TOOLBAR_POS, self, 'toolbar_pos', Gio.SettingsBindFlags.GET) def _create_controllers(self, plugin, viewmgr): controllers = {} album_model = viewmgr.source.album_manager.model controllers[ToolbarObject.PROPERTIES] = PropertiesMenuController(plugin, viewmgr.source) controllers[ToolbarObject.SORT_BY] = SortPopupController(plugin, viewmgr) controllers[ToolbarObject.SORT_ORDER] = SortOrderToggleController(plugin, viewmgr) controllers[ToolbarObject.SORT_BY_ARTIST] = ArtistSortPopupController(plugin, viewmgr) controllers[ToolbarObject.SORT_ORDER_ARTIST] = ArtistSortOrderToggleController(plugin, viewmgr) controllers[ToolbarObject.GENRE] = GenrePopupController(plugin, album_model) controllers[ToolbarObject.PLAYLIST] = PlaylistPopupController(plugin, album_model) controllers[ToolbarObject.DECADE] = DecadePopupController(plugin, album_model) controllers[ToolbarObject.SEARCH] = AlbumSearchEntryController(album_model) controllers[ToolbarObject.VIEW] = viewmgr.controller return controllers def _on_notify_toolbar_pos(self, *args): if self.last_toolbar_pos: self._bars[self.last_toolbar_pos].hide() self._bars[self.toolbar_pos].show() self.last_toolbar_pos = self.toolbar_pos
class OptionPlotoptionsWaterfallSonificationDefaultinstrumentoptionsMappingTremoloDepth(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def viewer_and_approver_user(db): user = FidesUser.create(db=db, data={'username': 'test_fides_viewer_and_approver_user', 'password': '&%3Qe2fGo7'}) client = ClientDetail(hashed_secret='thisisatest', salt='thisisstillatest', scopes=[], roles=[VIEWER_AND_APPROVER], user_id=user.id) FidesUserPermissions.create(db=db, data={'user_id': user.id, 'roles': [VIEWER_AND_APPROVER]}) db.add(client) db.commit() db.refresh(client) (yield user) user.delete(db)
class CubeAxesActor2D(tvtk.CubeAxesActor2D): use_data_bounds = Bool(True) input_info = PipelineInfo(datasets=['any'], attribute_types=['any'], attributes=['any']) traits_view = View(Group(Group(Item('visibility'), HGroup(Item('x_axis_visibility', label='X axis'), Item('y_axis_visibility', label='Y axis'), Item('z_axis_visibility', label='Z axis')), show_border=True, label='Visibity'), Group(Item('use_ranges'), HGroup(Item('ranges', enabled_when='use_ranges')), show_border=True), Group(Item('use_data_bounds'), HGroup(Item('bounds', enabled_when='not use_data_bounds')), show_border=True), Group(Item('x_label'), Item('y_label'), Item('z_label'), Item('label_format'), Item('number_of_labels'), Item('font_factor'), show_border=True), HGroup(Item('show_actual_bounds', label='Use size bigger than screen', editor=BooleanEditor())), Item('fly_mode'), Item('corner_offset'), Item('layer_number'), springy=True), scrollable=True, resizable=True)
def monitor_generator(copr_dir, additional_fields): anti_garbage_collector = set([copr_dir]) packages = BuildsMonitorLogic.package_build_chroots(copr_dir) first = True for package in packages: if (first is True): checkpoint('First package queried') first = False chroots = {} for bch in package['chroots']: chroot = chroots[bch.name] = {} for attr in ['state', 'status', 'build_id']: chroot[attr] = getattr(bch, attr) if ('url_build_log' in additional_fields): chroot['url_build_log'] = bch.rpm_live_log_url if ('url_backend_log' in additional_fields): chroot['url_backend_log'] = bch.rpm_backend_log_url anti_garbage_collector.add(bch.mock_chroot) anti_garbage_collector.add(bch.build.copr_dir) chroot['pkg_version'] = bch.build.pkg_version (yield {'name': package['name'], 'chroots': chroots}) checkpoint('Last package queried')
() def get_drugs_to_invoice(encounter): encounter = frappe.get_doc('Patient Encounter', encounter) if encounter: patient = frappe.get_doc('Patient', encounter.patient) if patient: if patient.customer: orders_to_invoice = [] medication_requests = frappe.get_list('Medication Request', fields=['*'], filters={'patient': patient.name, 'order_group': encounter.name, 'billing_status': ['in', ['Pending', 'Partly Invoiced']], 'docstatus': 1}) for medication_request in medication_requests: is_billable = frappe.get_cached_value('Medication', medication_request.medication, ['is_billable']) description = '' if (medication_request.dosage and medication_request.period): description = _('{0} for {1}').format(medication_request.dosage, medication_request.period) if (medication_request.medicaiton_item and is_billable): billable_order_qty = (medication_request.get('quantity', 1) - medication_request.get('qty_invoiced', 0)) if medication_request.number_of_repeats_allowed: if (medication_request.total_dispensable_quantity >= (medication_request.quantity + medication_request.qty_invoiced)): billable_order_qty = medication_request.get('quantity', 1) else: billable_order_qty = (medication_request.total_dispensable_quantity - medication_request.get('qty_invoiced', 0)) orders_to_invoice.append({'reference_type': 'Medication Request', 'reference_name': medication_request.name, 'drug_code': medication_request.medicaiton_item, 'quantity': billable_order_qty, 'description': description}) return orders_to_invoice
def _generate_c_getopt_code(processed_args, getopt_string, opts, positionals, has_longopts): ret = '' needs_endptr = False for arg in processed_args: if (arg.type in [ArgType.INT, ArgType.FLOAT]): needs_endptr = True break if needs_endptr: ret += ' char *endptr = NULL;\n' if opts: ret += ' int ch;\n\n' if opts: if has_longopts: ret += f''' while ((ch = getopt_long(argc, argv, "{getopt_string}", long_options, NULL)) != -1) ''' else: ret += f''' while ((ch = getopt(argc, argv, "{getopt_string}")) != -1) ''' ret += f''' {{ ''' ret += f''' switch (ch) ''' ret += f''' {{ ''' for arg in opts: ret += f''' case '{arg.opt[1]}': ''' ret += f''' {{ ''' ret += '\n'.join([(' ' + x) for x in _generate_c_opt_lines(arg)]) ret += '\n' ret += f''' break; ''' ret += f''' }} ''' ret += f''' }} ''' ret += f''' }} ''' if positionals: ret += f''' if (argc < (optind + {len(positionals)})) ''' ret += f''' {{ ''' ret += f''' printf("Missing positional arguments "); ''' ret += f''' return -1; ''' ret += f''' }} ''' for i in range(len(positionals)): arg = positionals[i] desc = f'Positional argument #{(i + 1)} ({arg.var_name})' optarg = f'argv[optind]' ret += '\n'.join([(' ' + x) for x in _generate_c_opt_lines(arg, desc, optarg)]) if (i < (len(positionals) - 1)): ret += '\n optind++;' ret += '\n\n' pass elif positionals: ret += f''' if (argc < {(len(positionals) + 1)}) ''' ret += f''' {{ ''' ret += f''' printf("Missing positional arguments "); ''' ret += f''' return -1; ''' ret += f''' }} ''' for i in range(len(positionals)): arg = positionals[i] desc = f'Positional argument #{(i + 1)} ({arg.var_name})' optarg = f'argv[{(i + 1)}]' ret += '\n'.join([(' ' + x) for x in _generate_c_opt_lines(arg, desc, optarg)]) ret += '\n\n' ret += f' return 0;' return ret
_pytree_node_class class JaxCustomMedium(CustomMedium, AbstractJaxMedium): permittivity: Optional[JaxDataArray] = pd.Field(None, title='Permittivity', description='Spatial profile of relative permittivity.') conductivity: Optional[JaxDataArray] = pd.Field(None, title='Conductivity', description='Spatial profile Electric conductivity. Defined such that the imaginary part of the complex permittivity at angular frequency omega is given by conductivity/omega.') eps_dataset: Optional[JaxPermittivityDataset] = pd.Field(None, title='Permittivity Dataset', description='User-supplied dataset containing complex-valued permittivity as a function of space. Permittivity distribution over the Yee-grid will be interpolated based on the data nearest to the grid location.', jax_field=True) _validator(pre=True) def _pre_deprecation_dataset(cls, values): if values.get('permittivity'): raise SetupError("'permittivity' is not yet supported in adjoint plugin. Please continue to use the 'eps_dataset' field to define the component of the permittivity tensor.") return values _validator(pre=True) def _deprecation_dataset(cls, values): return values ('eps_dataset', always=True) def _is_not_too_large(cls, val): for field_dim in 'xyz': field_name = f'eps_{field_dim}{field_dim}' data_array = val.field_components[field_name] coord_lens = [len(data_array.coords[key]) for key in 'xyz'] num_cells_dim = np.prod(coord_lens) if (num_cells_dim > MAX_NUM_CELLS_CUSTOM_MEDIUM): raise SetupError(f"For the adjoint plugin, each component of the 'JaxCustomMedium.eps_dataset' is restricted to have a maximum of {MAX_NUM_CELLS_CUSTOM_MEDIUM} cells. Detected {num_cells_dim} grid cells in the '{field_name}' component .") return val ('eps_dataset', always=True) def _eps_dataset_single_frequency(cls, val): return val ('eps_dataset', always=True) def _eps_dataset_eps_inf_greater_no_less_than_one_sigma_positive(cls, val, values): return val ('permittivity', always=True) def _eps_inf_greater_no_less_than_one(cls, val): return val ('conductivity', always=True) def _conductivity_non_negative_correct_shape(cls, val, values): return val def eps_dataarray_freq(self, frequency: float): as_custom_medium = self.to_medium() return as_custom_medium.eps_dataarray_freq(frequency) def to_medium(self) -> CustomMedium: self_dict = self.dict(exclude={'type'}) eps_field_components = {} for dim in 'xyz': field_name = f'eps_{dim}{dim}' data_array = self_dict['eps_dataset'][field_name] values = np.array(data_array['values']) coords = data_array['coords'] scalar_field = ScalarFieldDataArray(values, coords=coords) eps_field_components[field_name] = scalar_field eps_dataset = PermittivityDataset(**eps_field_components) self_dict['eps_dataset'] = eps_dataset self_dict['permittivity'] = None self_dict['conductivity'] = None return CustomMedium.parse_obj(self_dict) def from_tidy3d(cls, tidy3d_obj: CustomMedium) -> JaxCustomMedium: obj_dict = tidy3d_obj.dict(exclude={'type', 'eps_dataset', 'permittivity', 'conductivity'}) eps_dataset = tidy3d_obj.eps_dataset field_components = {} for dim in 'xyz': field_name = f'eps_{dim}{dim}' data_array = eps_dataset.field_components[field_name] values = data_array.values.tolist() coords = {key: np.array(val).tolist() for (key, val) in data_array.coords.items()} field_components[field_name] = JaxDataArray(values=values, coords=coords) eps_dataset = JaxPermittivityDataset(**field_components) obj_dict['eps_dataset'] = eps_dataset obj_dict['permittivity'] = None obj_dict['conductivity'] = None return cls.parse_obj(obj_dict) def store_vjp(self, grad_data_fwd: FieldData, grad_data_adj: FieldData, sim_bounds: Bound, wvl_mat: float, inside_fn: Callable[([np.ndarray, np.ndarray, np.ndarray], np.ndarray)]) -> JaxMedium: mnt_bounds = grad_data_fwd.monitor.geometry.bounds bounds_intersect = Geometry.bounds_intersection(mnt_bounds, sim_bounds) grids = self.grids(bounds=bounds_intersect) vjp_field_components = {} for dim in 'xyz': eps_field_name = f'eps_{dim}{dim}' orig_data_array = self.eps_dataset.field_components[eps_field_name] coords = orig_data_array.coords grid = grids[eps_field_name] d_sizes = grid.sizes d_sizes = [d_sizes.x, d_sizes.y, d_sizes.z] interp_coords = {} sum_axes = [] for (dim_index, dim_pt) in enumerate('xyz'): coord_dim = coords[dim_pt] if (len(np.array(coord_dim)) == 1): (r_min_coords, r_max_coords) = grid.boundaries.to_list[dim_index] (r_min_sim, r_max_sim) = np.array(sim_bounds).T[dim_index] r_min = max(r_min_coords, r_min_sim) r_max = min(r_max_coords, r_max_sim) size = abs((r_max - r_min)) if (size > 0): num_cells_dim = (int(((size * PTS_PER_WVL_INTEGRATION) / wvl_mat)) + 1) d_len = (size / num_cells_dim) coords_interp = np.linspace((r_min + (d_len / 2)), (r_max - (d_len / 2)), num_cells_dim) else: d_len = 1.0 coords_interp = np.array([((r_min + r_max) / 2.0)]) d_sizes[dim_index] = np.array([d_len]) interp_coords[dim_pt] = coords_interp sum_axes.append(dim_pt) else: interp_coords[dim_pt] = coord_dim d_vols = np.einsum('i, j, k -> ijk', *d_sizes) field_name = ('E' + dim) e_dotted = self.e_mult_volume(field=field_name, grad_data_fwd=grad_data_fwd, grad_data_adj=grad_data_adj, vol_coords=interp_coords, d_vol=d_vols, inside_fn=inside_fn).sum(sum_axes).sum(dim='f') vjp_shape = tuple((len(coord) for (_, coord) in coords.items())) dtype_orig = np.array(orig_data_array.values).dtype vjp_values = e_dotted.values.reshape(vjp_shape) if (dtype_orig.kind == 'f'): vjp_values = vjp_values.real vjp_values = vjp_values.astype(dtype_orig) vjp_data_array = JaxDataArray(values=vjp_values, coords=coords) vjp_field_components[eps_field_name] = vjp_data_array vjp_eps_dataset = JaxPermittivityDataset(**vjp_field_components) return self.copy(update=dict(eps_dataset=vjp_eps_dataset))
_HELPER_REGISTRY.register() class TestHelper(BaseDistillationHelper): def get_pseudo_labeler(self): return TestLabeler(self.teacher) def get_preprocess_student_input(self): return (lambda x: (x + 1)) def get_preprocess_teacher_input(self): return (lambda x: (x + 2)) def get_layer_losses(self, model=None): return [LayerLossMetadata(loss=SimpleAdd(), name='add', layer0='layer0', layer1='layer0'), LayerLossMetadata(loss=SimpleMul(), name='mul', layer0='layer1', layer1='layer1')] def get_combine_losses(self): return (lambda d: {'output': (d['output'] * 0.1), 'add': (d['add'] * 0.5), 'mul': (d['mul'] * 10.0)})
class TestSyntaxErrorReporting(util.TestCase): def test_syntax_error_has_text_and_position(self): with self.assertRaises(sv.SelectorSyntaxError) as cm: sv.compile('input.field[type=42]') e = cm.exception self.assertEqual(e.context, 'input.field[type=42]\n ^') self.assertEqual(e.line, 1) self.assertEqual(e.col, 12) def test_syntax_error_with_multiple_lines(self): with self.assertRaises(sv.SelectorSyntaxError) as cm: sv.compile('input\n.field[type=42]') e = cm.exception self.assertEqual(e.context, ' input\n--> .field[type=42]\n ^') self.assertEqual(e.line, 2) self.assertEqual(e.col, 7) def test_syntax_error_on_third_line(self): with self.assertRaises(sv.SelectorSyntaxError) as cm: sv.compile('input:is(\n [name=foo]\n [type=42]\n)\n') e = cm.exception self.assertEqual(e.line, 3) self.assertEqual(e.col, 3) def test_simple_syntax_error(self): with self.assertRaises(sv.SelectorSyntaxError) as cm: raise sv.SelectorSyntaxError('Syntax Message') e = cm.exception self.assertEqual(e.context, None) self.assertEqual(e.line, None) self.assertEqual(e.col, None) self.assertEqual(str(e), 'Syntax Message')
def test_authenticate_updates_user_password_if_stalker_fails_but_ldap_successes(ldap_server, create_test_db, monkeypatch): from ldap3.extend import StandardExtendedOperations def mock_return(*arg, **kwargs): return 'pipeline' monkeypatch.setattr(StandardExtendedOperations, 'who_am_i', mock_return) login = 'pipeline' ldap_password = 'password' stalker_password = 'different_password' from stalker import User from stalker.db.session import DBSession new_user = User(login=login, password=stalker_password, email='', name='Pipeline') DBSession.add(new_user) DBSession.commit() assert (new_user.check_password(ldap_password) is False) assert (new_user.check_password(stalker_password) is True) from anima.utils import authenticate result = authenticate(login, ldap_password) assert (result is True) pipeline_user = User.query.filter((User.login == login)).first() assert (pipeline_user is not None) assert (new_user.check_password(ldap_password) is True) assert (new_user.check_password(stalker_password) is False)
class HTMLPickledCorpusReader(CategorizedCorpusReader, CorpusReader): def __init__(self, root, fileids=PKL_PATTERN, **kwargs): if (not any((key.startswith('cat_') for key in kwargs.keys()))): kwargs['cat_pattern'] = CAT_PATTERN CategorizedCorpusReader.__init__(self, kwargs) CorpusReader.__init__(self, root, fileids) def resolve(self, fileids, categories): if ((fileids is not None) and (categories is not None)): raise ValueError('Specify fileids or categories, not both') if (categories is not None): return self.fileids(categories) return fileids def text(self, fileids=None, categories=None): fileids = self.resolve(fileids, categories) for (path, enc, fileid) in self.abspaths(fileids, True, True): with open(path, 'rb') as f: (yield pickle.load(f)) def docs(self, fileids=None, categories=None): for doc in self.text(fileids, categories): (yield doc['document']) def titles(self, fileids=None, categories=None): for doc in self.text(fileids, categories): (yield doc['title']) def paras(self, fileids=None, categories=None): for doc in self.docs(fileids, categories): for paragraph in doc: (yield paragraph) def sents(self, fileids=None, categories=None): for paragraph in self.paras(fileids, categories): for sentence in paragraph: (yield sentence) def tagged(self, fileids=None, categories=None): for sent in self.sents(fileids, categories): for token in sent: (yield token) def words(self, fileids=None, categories=None): for token in self.tagged(fileids, categories): (yield token[0])
def to_ifelse_block(node_id: str, cs: ConditionalSection) -> Tuple[(_core_wf.IfElseBlock, typing.List[Binding])]: if (len(cs.cases) == 0): raise AssertionError('Illegal Condition block, with no if-else cases') if (len(cs.cases) < 2): raise AssertionError('At least an if/else is required. Dangling If is not allowed') all_promises: typing.List[Promise] = [] (first_case, promises) = to_case_block(cs.cases[0]) all_promises.extend(promises) other_cases: Optional[typing.List[_core_wf.IfBlock]] = None if (len(cs.cases) > 2): other_cases = [] for c in cs.cases[1:(- 1)]: (case, promises) = to_case_block(c) all_promises.extend(promises) other_cases.append(case) last_case = cs.cases[(- 1)] node = None err = None if (last_case.output_promise is not None): node = last_case.output_node else: err = Error(failed_node_id=node_id, message=(last_case.err if last_case.err else 'Condition failed')) return (_core_wf.IfElseBlock(case=first_case, other=other_cases, else_node=node, error=err), merge_promises(*all_promises))
class DQMLP(nn.Module): def __init__(self, n_observations, n_actions, n_hidden): super().__init__() self.linear = nn.Linear(n_observations, n_hidden) self.linear_adv = nn.Linear(n_hidden, n_actions) self.linear_value = nn.Linear(n_hidden, 1) self.n_actions = n_actions def forward_common(self, frame): z = torch.tanh(self.linear(frame)) return z def forward_value(self, z): return self.linear_value(z) def forward_advantage(self, z): adv = self.linear_adv(z) advm = adv.mean(1).unsqueeze((- 1)).repeat(1, self.n_actions) return (adv - advm) def forward(self, state): z = self.forward_common(state) v = self.forward_value(z) adv = self.forward_advantage(z) return (v + adv)
class JsD3(JsPackage): lib_alias = {'js': 'd3'} lib_selector = 'd3' def svg(self): return D3Svg(component=self.component, selector=('%s.svg' % self._selector), page=self.page) ({'d3': '4.0.0'}) def csv(self, url): return D3File(component=self.component, filename=url, selector=('%s.csv' % self._selector), page=self.page) ({'d3': '4.0.0'}) def tsv(self, url): return D3File(self.component, url, selector=('%s.tsv' % self._selector)) ({'d3': '4.0.0'}) def xml(self, url): return D3File(self.component, url, selector=('%s.tsv' % self._selector)) ({'d3': '4.0.0'}) def json(self, url): return D3File(self.component, url, selector=('%s.json' % self._selector)) def text(self, url): return D3File(self.component, url, selector=('%s.text' % self._selector)) def blob(self, url): return D3File(self.component, url, selector=('%s.blob' % self._selector)) def html(self, url): return D3File(self.component, url, selector=('%s.html' % self._selector)) def image(self, url): return D3File(self.component, url, selector=('%s.image' % self._selector)) def dsv(self, url): return D3File(self.component, url, selector=('%s.tsv' % self._selector)) def min(self, dataset, js_funcs): return JsNumber.JsNumber(('d3.min(%s, %s)' % (dataset, js_funcs))) def max(self, dataset, js_funcs): return JsNumber.JsNumber(('d3.max(%s, %s)' % (dataset, js_funcs))) def select(self, tag, js_code: str=None): tag = JsUtils.jsConvertData(tag, None) if (js_code is not None): return D3Select(self.component, selector=('d3.select(%s)' % tag), js_code=js_code, set_var=True) return D3Select(self.component, selector=('d3.select(%s)' % tag), set_var=False) def selectAll(self, d3_type, set_var=False) -> D3Select: return D3Select(d3_type=d3_type, set_var=set_var) def scaleLinear(self, range) -> D3ScaleLinear: return D3ScaleLinear(range) def scaleBand(self, range=None) -> D3Band: if (range is None): self._js.append('scaleBand()') return D3Band(self.toStr()) def forceSimulation(self, nodes=None): return D3ForceSimulation(nodes) def forceManyBody(self): return D3ForceManyBody() def scaleOrdinal(self, colors): pass def forceX(self): pass def forceY(self): pass def drag(self): pass def toStr(self): if (self._selector is None): raise ValueError('Selector not defined, use this() or new() first') if (len(self._js) == 0): return self._selector data = ('%(jqId)s.%(items)s' % {'jqId': self._selector, 'items': '.'.join(self._js)}) self._js = [] return data
class ProbaMixin(object): def _setup_2_multiplier(self, X, y, job=None): if (self.proba and (y is not None)): self.classes_ = y def _get_multiplier(self, X, y, alt=1): if self.proba: multiplier = self.classes_ else: multiplier = alt return multiplier def _predict_attr(self): return ('predict' if (not self.proba) else 'predict_proba') def classes_(self): return self._classes _.setter def classes_(self, y): self._classes = np.unique(y).shape[0]
class CodeDisplay(QPlainTextEdit): double_click_word = Signal(str) select_token = Signal(str) def __init__(self, text: str, parent: QWidget): super().__init__(text, parent=parent) self.setReadOnly(True) self.resize(self.sizeHint()) self.setLineWrapMode(QPlainTextEdit.LineWrapMode.NoWrap) self.set_font() def set_font(self): self.options = Options.from_gui() font = self.options.getstring('gui.font', fallback='source code pro') font_size = self.options.getint('gui.font_size', fallback=16) is_font_bold = self.options.getboolean('gui.font_bold', fallback=False) is_font_italic = self.options.getboolean('gui.font_italic', fallback=False) self.font = QFont(font, font_size) if is_font_italic: self.font.setItalic(True) if is_font_bold: self.font.setBold(True) self.font.setStyleHint(QFont.Monospace) self.font.setFamily(font) self.setFont(self.font) def _get_word_under_cursor(self, e: QMouseEvent) -> str: cursor = self.cursorForPosition(e.pos()) cursor.select(QTextCursor.WordUnderCursor) return cursor.selectedText() def mouseDoubleClickEvent(self, e: QMouseEvent): self.double_click_word.emit(self._get_word_under_cursor(e)) def mousePressEvent(self, e: QMouseEvent): self.select_token.emit(self._get_word_under_cursor(e)) def register_widget(cls, parent: QWidget): code_display = cls('', parent) code_display.double_click_word.connect(parent.jump_to_symbol) code_display.select_token.connect(parent.on_select) return code_display
def get_printable_message_args(msg, buff=None, prefix=''): try: from cStringIO import StringIO except ImportError: from io import StringIO if (buff is None): buff = StringIO() for f in msg.__slots__: if isinstance(getattr(msg, f), Message): get_printable_message_args(getattr(msg, f), buff=buff, prefix=((prefix + f) + '.')) else: buff.write(((prefix + f) + ' ')) return buff.getvalue().rstrip()
class TestYaml(): def test_dump(self, tmpdir): output_file = str(tmpdir.join('map_out.yaml')) print('output_file:', output_file) rmap = utils.create_template() rmap[0].etc['the_answer'] = 42 generators.Yaml(rmap, output_file).generate() rmap_test = RegisterMap() rmap_test.read_file(output_file) assert (rmap_test == rmap) assert (rmap[0].etc['the_answer'] == rmap_test[0].etc['the_answer'] == 42)
class MovieLayout(object): def __init__(self): self._cameras = {'front': Camera(layout=self, camera='front'), 'left': Camera(layout=self, camera='left'), 'right': Camera(layout=self, camera='right'), 'rear': Camera(layout=self, camera='rear')} self._clip_order = ['left', 'right', 'front', 'rear'] self._font = Font(layout=self) self._swap_left_right = False self._swap_front_rear = False self._perspective = False self._font.halign = 'CENTER' self._font.valign = 'BOTTOM' def cameras(self, camera): return self._cameras.get(camera, self._cameras) def clip_order(self) -> list: return self._clip_order _order.setter def clip_order(self, value: list): self._clip_order = [] for camera in value: camera = camera.lower().strip() if (camera in ['front', 'left', 'right', 'rear']): self._clip_order.append(camera) if ('left' not in self._clip_order): self._clip_order.append('left') if ('right' not in self._clip_order): self._clip_order.append('right') if ('front' not in self._clip_order): self._clip_order.append('front') if ('rear' not in self._clip_order): self._clip_order.append('rear') def font(self): return self._font def font(self, value): self._font = value def swap_left_right(self): return self._swap_left_right _left_right.setter def swap_left_right(self, value): self._swap_left_right = value def swap_front_rear(self): return self._swap_front_rear _front_rear.setter def swap_front_rear(self, value): self._swap_front_rear = value def perspective(self): return self._perspective def perspective(self, new_perspective): self._perspective = new_perspective if self._perspective: self.cameras('left').options = ', pad=iw+4:3/2*ih:-1:ih/8:0x, perspective=x0=0:y0=1*H/5:x1=W:y1=-3/44*H:x2=0:y2=6*H/5:x3=7/8*W:y3=5*H/6:sense=destination' self.cameras('right').options = ', pad=iw+4:3/2*ih:-1:ih/8:0x,perspective=x0=0:y1=1*H/5:x1=W:y0=-3/44*H:x2=1/8*W:y3=6*H/5:x3=W:y2=5*H/6:sense=destination' else: self.cameras('left').options = '' self.cameras('right').options = '' def scale(self): return ((self.video_height * self.video_width) / (1280 * 960)) def scale(self, scale): self.cameras('front').scale = scale self.cameras('left').scale = scale self.cameras('right').scale = scale self.cameras('rear').scale = scale def video_width(self): return int(max((self.cameras('front').xpos + self.cameras('front').width), (self.cameras('left').xpos + self.cameras('left').width), (self.cameras('right').xpos + self.cameras('right').width), (self.cameras('rear').xpos + self.cameras('rear').width))) def video_height(self): perspective_adjustement = ((3 / 2) if self.perspective else 1) return int(max((self.cameras('front').ypos + self.cameras('front').height), ((perspective_adjustement * self.cameras('left').ypos) + self.cameras('left').height), ((perspective_adjustement * self.cameras('right').ypos) + self.cameras('right').height), (self.cameras('rear').ypos + self.cameras('rear').height))) def center_xpos(self): return int((self.video_width / 2)) def center_ypos(self): return int((self.video_height / 2)) def _rear_xpos(self): return (self.cameras('front').xpos + self.cameras('front').width) def _left_ypos(self): return max((self.cameras('front').ypos + self.cameras('front').height), (self.cameras('rear').ypos + self.cameras('rear').height)) def _right_xpos(self): return (self.cameras('left').xpos + self.cameras('left').width) def _right_ypos(self): return max((self.cameras('front').ypos + self.cameras('front').height), (self.cameras('rear').ypos + self.cameras('rear').height))
def start_pips(argList): (blockID, start, stop, total) = argList print(((('Running instance :' + str(blockID)) + ' / ') + str(total))) subprocess.check_call(((((('${XRAY_VIVADO} -mode batch -source $FUZDIR/job.tcl -tclargs ' + str(blockID)) + ' ') + str(start)) + ' ') + str(stop)), shell=True) uphill_wires = 'wires/uphill_wires_{}.txt'.format(blockID) downhill_wires = 'wires/downhill_wires_{}.txt'.format(blockID) Lock.acquire() with open('uphill_wires.txt', 'a') as wfd: f = uphill_wires with open(f, 'r') as fd: shutil.copyfileobj(fd, wfd) with open('downhill_wires.txt', 'a') as wfd: f = downhill_wires with open(f, 'r') as fd: shutil.copyfileobj(fd, wfd) Lock.release() os.remove(uphill_wires) os.remove(downhill_wires)
class OptionsBackgroundColor(DataClass): def fill(self): return self._attrs['fill'] def fill(self, val): self._attrs['fill'] = val def stroke(self): return self._attrs['stroke'] def stroke(self, val): self._attrs['stroke'] = val def strokeWidth(self): return self._attrs['strokeWidth'] def strokeWidth(self, val): self._attrs['strokeWidth'] = val
class OptionSeriesBoxplotDataAccessibility(Options): def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(None) def enabled(self, flag: bool): self._config(flag, js_type=False)
class CitationModel(Model): def get_data_generator(self, document_features_context: DocumentFeaturesContext) -> CitationDataGenerator: return CitationDataGenerator(document_features_context=document_features_context) def get_semantic_extractor(self) -> CitationSemanticExtractor: return CitationSemanticExtractor() def get_tei_training_data_generator(self) -> CitationTeiTrainingDataGenerator: return CitationTeiTrainingDataGenerator() def get_training_tei_parser(self) -> CitationTrainingTeiParser: return CitationTrainingTeiParser()
class TagListPost(ResourceList): def before_post(_args, _kwargs, data): require_relationship(['event'], data) if (get_count(db.session.query(Tag.id).filter_by(name=data.get('name'), event_id=int(data['event']), deleted_at=None)) > 0): raise ConflictError({'pointer': '/data/attributes/name'}, 'Name already exists') schema = TagSchema methods = ['POST'] data_layer = {'session': db.session, 'model': Tag, 'methods': {'before_post': before_post}}
def compute_haplotype_edit_distance(signature1, signature2, reference, window_padding=100): window_start = (min(signature1.start, signature2.start) - window_padding) window_end = (max(signature1.start, signature2.start) + window_padding) haplotype1 = reference.fetch(signature1.contig, max(0, window_start), max(0, signature1.start)).upper() haplotype1 += signature1.sequence.upper() haplotype1 += reference.fetch(signature1.contig, max(0, signature1.start), max(0, window_end)).upper() haplotype2 = reference.fetch(signature2.contig, max(0, window_start), max(0, signature2.start)).upper() haplotype2 += signature2.sequence.upper() haplotype2 += reference.fetch(signature2.contig, max(0, signature2.start), max(0, window_end)).upper() return align(haplotype1, haplotype2)['editDistance']
def extractHonquehonkWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class multi_model(): def __init__(self, cpu_model, gpu_model=None): self.cpu_model = cpu_model self.gpu_model = gpu_model def fit(self, x, y, **kwargs): if (self.gpu_model is not None): return self.gpu_model.fit(x, y, **kwargs) return self.cpu_model.fit(x, y, **kwargs) def compile(self, **kwargs): if (self.gpu_model is not None): return self.gpu_model.compile(**kwargs) return self.cpu_model.compile(**kwargs) def __getattr__(self, name, *args, **kwargs): if hasattr(self.cpu_model, name): def wrapper(*args, **kwargs): return getattr(self.cpu_model, name)(*args, **kwargs) return wrapper raise AttributeError(('Method/Attribute %s not found for keras model' % name))
def extractWwwMalevolencegameCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def mock_audit_events_for_discretionary_access_control_changes_are_collected_pass(self, cmd): if ('auditctl' in cmd): stdout = ['-a always,exit -F arch=b64 -S chmod,fchmod,fchmodat -F auid>=1000 -F auid!=-1 -F key=perm_mod', '-a always,exit -F arch=b32 -S chmod,fchmod,fchmodat -F auid>=1000 -F auid!=-1 -F key=perm_mod', '-a always,exit -F arch=b64 -S chown,fchown,lchown,fchownat -F auid>=1000 -F auid!=-1 -F key=perm_mod', '-a always,exit -F arch=b32 -S lchown,fchown,chown,fchownat -F auid>=1000 -F auid!=-1 -F key=perm_mod', '-a always,exit -F arch=b64 -S setxattr,lsetxattr,fsetxattr,removexattr,lremovexattr,fremovexattr -F auid>=1000 -F auid!=-1 -F key=perm_mod', '-a always,exit -F arch=b32 -S setxattr,lsetxattr,fsetxattr,removexattr,lremovexattr,fremovexattr -F auid>=1000 -F auid!=-1 -F key=perm_mod'] else: stdout = ['-a always,exit -F arch=b64 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b32 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b64 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b32 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b64 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!= -k perm_mod', '-a always,exit -F arch=b32 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!= -k perm_mod'] stderr = [''] returncode = 0 return SimpleNamespace(returncode=returncode, stderr=stderr, stdout=stdout)
class Fuzzer(): swaps = (('dev', 'stg', 'stage', 'test', 'qa', 'uat', 'preprod'), ('in', 'out'), ('inbound', 'outbound'), ('ext', 'int'), ('extern', 'intern'), ('external', 'internal'), ('local', 'global'), ('east', 'west'), ('private', 'public'), ('priv', 'pub'), ('ap', 'eu', 'na', 'la', 'ca'), ('nam', 'emea', 'apac', 'latam'), ('en', 'de', 'fr', 'jp', 'uk'), ('a', 'b', 'c'), ('1a', '1b', '1c'), ('2a', '2b', '2c')) def __init__(self, domain='.'): self.subdomains = set() self.domain = domain def _fuzz(self, word): swaps = self.swaps words = list(filter(None, word.split('-'))) for i in range(1, len(words)): (yield '-'.join(words[:i])) for (i, w) in enumerate(words): pre = words[:i] suf = words[(i + 1):] if w[(- 1)].isdigit(): n = ord(w[(- 1)]) for d in map(chr, range(n, min(58, (n + 5)))): (yield '-'.join(((pre + [(w[:(- 1)] + d)]) + suf))) for swap in swaps: if (w in swap): for s in swap: (yield '-'.join(((pre + [s]) + suf))) def _prep(self, string): labels = string.split('.') for (i, label) in enumerate(labels): pre = labels[:i] suf = labels[(i + 1):] self.subdomains.add('.'.join(labels[i:])) for f in self._fuzz(label): self.subdomains.add('.'.join(((pre + [f]) + suf))) def add(self, items): if (not isinstance(items, (list, set))): raise ValueError('expected argument types: list, set') dotlen = (len(self.domain) + 1) for item in filter(None, items): if (dotlen < len(item)): if (item[(- dotlen)] == '.'): if item.endswith(self.domain): self._prep(item[:(- dotlen)]) else: self._prep(item) else: self._prep(item) def mutations(self): for sub in self.subdomains: (yield '.'.join([sub, self.domain])) def wildations(self): for wild in {'.'.join((['*'] + sub.split('.', 1)[1:])) for sub in self.subdomains}: (yield ((wild + '.') + self.domain)) def count(self): return len(self.subdomains)
def sed(file: str, pattern: str, replace: str) -> None: try: if (sys.platform in ['linux', 'linux2']): check_run(['sed', '-i', '-e', f's#{pattern}#{replace}#g', file], capture_output=False) elif (sys.platform == 'darwin'): check_run(['sed', '-i', '', '-e', f's#{pattern}#{replace}#g', file], capture_output=False) else: raise RuntimeError(f'Current operating system not supported for release publishing: {sys.platform}: ') except Exception as ex: raise RuntimeError(f'Error processing updated to file {file}: ') from ex
_EXTRACTORS.register_module() class ChineseHubert(BaseFeatureExtractor): def __init__(self, model='TencentGameMate/chinese-hubert-base'): super().__init__() self.feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained(model) self.model = HubertModel.from_pretrained(model) _grad() def forward(self, path_or_audio, sampling_rate=None): audio = self.preprocess(path_or_audio, sampling_rate) input_values = self.feature_extractor(audio, sampling_rate=16000, return_tensors='pt').input_values input_values = input_values.to(self.model.device) features = self.model(input_values).last_hidden_state return features.transpose(1, 2)
def format_co_flags(co_flags): if (not co_flags): return '0x00' names = [] remaining = co_flags for (name, flag) in CO_FLAGS.items(): if (co_flags & flag): remaining -= flag names.append(name) if remaining: raise NotImplementedError(remaining) return f"0x{co_flags:08x} ({' | '.join(names)})"
class OptionPlotoptionsHistogramSonificationTracksMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_json_parser(): parser = JsonParser() dependencies = parser.parse(package_json_file) assert (dependencies == ['-navigation/native', '-navigation/native-stack', 'expo', 'expo-clipboard', 'expo-status-bar', 'react', 'react-dom', 'react-native', 'react-native-safe-area-context', 'react-native-screens', 'react-native-web', 'react-uuid', '/core', '/react', '/react-native', 'typescript'])
class VideoRecordingSchema(JSONAPISchema): class Meta(): type_ = 'video-recording' self_view = 'v1.video_recording_detail' self_view_kwargs = {'id': '<id>'} inflect = dasherize id = fields.Str(dump_only=True) bbb_record_id = fields.Str() participants = fields.Integer(required=True) url = fields.Url(required=True) start_time = fields.DateTime(required=True, timezone=False) end_time = fields.DateTime(required=True, timezone=False) video_stream = Relationship(self_view='v1.video_recording_stream', self_view_kwargs={'id': '<id>'}, related_view='v1.video_stream_detail', related_view_kwargs={'video_recording_id': '<id>'}, schema='VideoStreamSchema', type_='video-stream')
def print_to_file(): global crawled global link_to_files global externals global allfiles global directories global emails global directories_with_indexing global host_name global main_domain try: try: output_directory = main_domain.replace(' '') output_directory = main_domain.replace('www.', '') try: output_directory = output_directory.split('/')[0] except: pass try: output_directory = output_directory.split(':')[0] except: pass except: output_directory = main_domain pass if debug: print('Output directory has been set: {0}'.format(output_directory)) try: if debug: print('Creating output directory...') os.mkdir(output_directory) except OSError as error: if ('File exists' in error): if debug: print('\t\t> Output directory already exists! Overwriting content!') pass else: print('\t\t\t\t> Cannot create output directory! Not downloading files:') return (- 15) temp = host_name temp = temp.replace(' '') temp = temp.replace('/', '_') if debug: print('Saving file as: {0}/crawler_{1}'.format(output_directory, temp)) f = open(((output_directory + '/crawler_') + temp), 'w') f.writelines('\n') f.writelines((('Sumary information of crawling site ' + host_name) + '\n')) f.writelines('\n') count = 0 f.writelines('\n+ Links crawled:') for i in crawled: f.writelines(('\n\t- ' + i)) count = (count + 1) f.writelines((('\n\tTotal links crawled: ' + str(count)) + '\n')) count = 0 f.writelines('\n+ Links to files found:') for i in link_to_files: count = (count + 1) f.writelines(('\n\t- ' + i)) f.writelines((('\n\tTotal links to files: ' + str(count)) + '\n')) count = 0 f.writelines('\n+ Externals links found:') for i in externals: f.writelines(('\n\t- ' + i)) count = (count + 1) f.writelines((('\n\tTotal external links: ' + str(count)) + '\n')) count = 0 f.writelines('\n+ Email addresses found:') for mail in emails: f.writelines(('\n\t- ' + mail)) count = (count + 1) f.writelines((('\n\tTotal email address found: ' + str(count)) + '\n')) count = 0 f.writelines('\n+ Directories found:') for i in directories: f.writelines(('\n\t- ' + i)) count = (count + 1) f.writelines((('\n\tTotal directories: ' + str(count)) + '\n')) count = 0 f.writelines('\n+ Directory indexing found:') for i in directories_with_indexing: f.writelines(('\n\t- ' + i)) count = (count + 1) f.writelines((('\n\tTotal directories with indexing: ' + str(count)) + '\n')) f.close() return 1 except KeyboardInterrupt: print('Keyboard Interrupt while printing! Exiting.') return (- 1) except: return (- 2)
def run_trace_projection(x, degree=1, family='DGT'): m = UnitSquareMesh((2 ** x), (2 ** x), quadrilateral=False) x = SpatialCoordinate(m) f = (((x[0] * (2 - x[0])) * x[1]) * (2 - x[1])) V_ho = FunctionSpace(m, 'CG', 6) ref = Function(V_ho).interpolate(f) T = FunctionSpace(m, family, degree) w = Function(T) w.project(f, solver_parameters={'ksp_type': 'preonly', 'pc_type': 'lu'}) area = FacetArea(m) return sqrt(assemble((((area * inner((w - ref), (w - ref))) * ds) + ((area * inner((w('+') - ref('+')), (w('+') - ref('+')))) * dS))))
def propagate_INT_lr_bits(database, tiles_by_grid, tile_frames_map, verbose=False): (int_frames, int_words, _) = localutil.get_entry('INT', 'CLB_IO_CLK') (verbose and print('')) for tile in database: if (database[tile]['type'] not in ['INT_L', 'INT_R']): continue if (not database[tile]['bits']): continue grid_x = database[tile]['grid_x'] grid_y = database[tile]['grid_y'] baseaddr = int(database[tile]['bits']['CLB_IO_CLK']['baseaddr'], 0) offset = database[tile]['bits']['CLB_IO_CLK']['offset'] if (database[tile]['type'] == 'INT_L'): grid_x += 1 baseaddr = (baseaddr + 128) elif (database[tile]['type'] == 'INT_R'): grid_x -= 1 baseaddr = (baseaddr - 128) else: assert 0, database[tile]['type'] if ((grid_x, grid_y) not in tiles_by_grid): (verbose and print(' Skip edge')) continue other_tile = tiles_by_grid[(grid_x, grid_y)] if (database[tile]['type'] == 'INT_L'): assert (database[other_tile]['type'] == 'INT_R') elif (database[tile]['type'] == 'INT_R'): assert (database[other_tile]['type'] == 'INT_L') else: assert 0 localutil.add_tile_bits(other_tile, database[other_tile], baseaddr, offset, int_frames, int_words, tile_frames_map)
class OptionPlotoptionsLineSonificationContexttracksPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def test(): plugin_lut = get_plugin_lut() if (len(sys.argv) < 2): print('ERROR:') print('You must specify a plugin to test!') debug_print(CALL_LUT, plugin_lut) return target = sys.argv[1] if (target in plugin_lut): instance = plugin_lut[target]() instance.test() print(instance) elif (target in CALL_LUT): ret = None if (len(sys.argv) >= 4): plug_name = sys.argv[2] func_name = sys.argv[3] ret = CALL_LUT[target](plug_name, func_name, *sys.argv[4:]) else: print('You need to specify at least the plugin + function to execute to test-dispatcher') print('Available calls:') ret = CALL_LUT[target]() if ret: with open('test-out.json', 'w') as fp: out = json.dumps(ret, indent=4) fp.write(out) else: print('No call response') else: print('Unknown arg!') debug_print(CALL_LUT, plugin_lut)
class GameObjectMeta(type): def __new__(mcls, clsname, bases, kw): for (k, v) in kw.items(): if isinstance(v, (list, set)): kw[k] = tuple(v) elif isinstance(v, types.FunctionType): v.__name__ = f'{clsname}.{v.__name__}' v.__code__ = v.__code__.replace(co_name=v.__name__) cls = super().__new__(mcls, clsname, bases, kw) all_gameobjects.add(cls) for b in bases: game_objects_hierarchy.add((b, cls)) return cls '\n def __getattribute__(cls, name):\n value = type.__getattribute__(cls, name)\n if isinstance(value, classmethod):\n try:\n rep_class = cls.rep_class(cls)\n return lambda *a, **k: value.__get__(None, rep_class)\n except Exception:\n pass\n\n return value\n ' def _dump_gameobject_hierarchy(): with open('/dev/shm/gomap.dot', 'w') as f: f.write('digraph {\nrankdir=LR;\n') f.write('\n'.join([('"%s" -> "%s";' % (a.__name__, b.__name__)) for (a, b) in game_objects_hierarchy])) f.write('}')
class PopCoordinates(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): return {'latitude': (float,), 'longitude': (float,)} _property def discriminator(): return None attribute_map = {'latitude': 'latitude', 'longitude': 'longitude'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, latitude, longitude, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) self.latitude = latitude self.longitude = longitude for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, latitude, longitude, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) self.latitude = latitude self.longitude = longitude for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class PEP621DependencyGetter(DependencyGetter): def get(self) -> DependenciesExtract: dependencies = [*self._get_dependencies(), *itertools.chain(*self._get_optional_dependencies().values())] self._log_dependencies(dependencies) return DependenciesExtract(dependencies, []) def _get_dependencies(self) -> list[Dependency]: pyproject_data = load_pyproject_toml(self.config) dependency_strings: list[str] = pyproject_data['project']['dependencies'] return self._extract_pep_508_dependencies(dependency_strings, self.package_module_name_map) def _get_optional_dependencies(self) -> dict[(str, list[Dependency])]: pyproject_data = load_pyproject_toml(self.config) return {group: self._extract_pep_508_dependencies(dependencies, self.package_module_name_map) for (group, dependencies) in pyproject_data['project'].get('optional-dependencies', {}).items()} def _extract_pep_508_dependencies(self, dependencies: list[str], package_module_name_map: Mapping[(str, Sequence[str])]) -> list[Dependency]: extracted_dependencies = [] for spec in dependencies: name = self._find_dependency_name_in(spec) if name: extracted_dependencies.append(Dependency(name, self.config, conditional=self._is_conditional(spec), optional=self._is_optional(spec), module_names=package_module_name_map.get(name))) return extracted_dependencies def _is_optional(dependency_specification: str) -> bool: return bool(re.findall('\\[([a-zA-Z0-9-]+?)\\]', dependency_specification)) def _is_conditional(dependency_specification: str) -> bool: return (';' in dependency_specification) def _find_dependency_name_in(spec: str) -> (str | None): match = re.search('[a-zA-Z0-9-_]+', spec) if match: return match.group(0) return None
class cnLedPlugin(QtDesigner.QPyDesignerCustomWidgetPlugin): def __init__(self, parent=None): super().__init__(parent) self.initialized = False def initialize(self, core): if self.initialized: return self.initialized = True def isInitialized(self): return self.initialized def createWidget(self, parent): return CLASSEWIDGET(parent) def name(self): return NOMCLASSEWIDGET def group(self): return GROUPEWIDGET def icon(self): return ICONEWIDGET def toolTip(self): return TEXTETOOLTIP def whatsThis(self): return TEXTEWHATSTHIS def isContainer(self): return False def domXml(self): return '<widget class="{}" name="{}">\n <property name="toolTip" >\n <string>{}</string>\n </property>\n <property name="whatsThis" >\n <string>{}</string>\n </property>\n <property name="Couleur" >\n <string>Rouge</string>\n </property>\n</widget>\n'.format(NOMCLASSEWIDGET, NOMWIDGET, TEXTETOOLTIP, TEXTEWHATSTHIS) def includeFile(self): return FICHIERWIDGET
class ReactTest(unittest.TestCase): def test_input_cells_have_a_value(self): input = InputCell(10) self.assertEqual(input.value, 10) def test_an_input_cell_s_value_can_be_set(self): input = InputCell(4) input.value = 20 self.assertEqual(input.value, 20) def test_compute_cells_calculate_initial_value(self): input = InputCell(1) output = ComputeCell([input], (lambda inputs: (inputs[0] + 1))) self.assertEqual(output.value, 2) def test_compute_cells_take_inputs_in_the_right_order(self): one = InputCell(1) two = InputCell(2) output = ComputeCell([one, two], (lambda inputs: (inputs[0] + (inputs[1] * 10)))) self.assertEqual(output.value, 21) def test_compute_cells_update_value_when_dependencies_are_changed(self): input = InputCell(1) output = ComputeCell([input], (lambda inputs: (inputs[0] + 1))) input.value = 3 self.assertEqual(output.value, 4) def test_compute_cells_can_depend_on_other_compute_cells(self): input = InputCell(1) times_two = ComputeCell([input], (lambda inputs: (inputs[0] * 2))) times_thirty = ComputeCell([input], (lambda inputs: (inputs[0] * 30))) output = ComputeCell([times_two, times_thirty], (lambda inputs: (inputs[0] + inputs[1]))) self.assertEqual(output.value, 32) input.value = 3 self.assertEqual(output.value, 96) def test_compute_cells_fire_callbacks(self): input = InputCell(1) output = ComputeCell([input], (lambda inputs: (inputs[0] + 1))) cb1_observer = [] callback1 = self.callback_factory(cb1_observer) output.add_callback(callback1) input.value = 3 self.assertEqual(cb1_observer[(- 1)], 4) def test_callback_cells_only_fire_on_change(self): input = InputCell(1) output = ComputeCell([input], (lambda inputs: (111 if (inputs[0] < 3) else 222))) cb1_observer = [] callback1 = self.callback_factory(cb1_observer) output.add_callback(callback1) input.value = 2 self.assertEqual(cb1_observer, []) input.value = 4 self.assertEqual(cb1_observer[(- 1)], 222) def test_callbacks_do_not_report_already_reported_values(self): input = InputCell(1) output = ComputeCell([input], (lambda inputs: (inputs[0] + 1))) cb1_observer = [] callback1 = self.callback_factory(cb1_observer) output.add_callback(callback1) input.value = 2 self.assertEqual(cb1_observer[(- 1)], 3) input.value = 3 self.assertEqual(cb1_observer[(- 1)], 4) def test_callbacks_can_fire_from_multiple_cells(self): input = InputCell(1) plus_one = ComputeCell([input], (lambda inputs: (inputs[0] + 1))) minus_one = ComputeCell([input], (lambda inputs: (inputs[0] - 1))) cb1_observer = [] cb2_observer = [] callback1 = self.callback_factory(cb1_observer) callback2 = self.callback_factory(cb2_observer) plus_one.add_callback(callback1) minus_one.add_callback(callback2) input.value = 10 self.assertEqual(cb1_observer[(- 1)], 11) self.assertEqual(cb2_observer[(- 1)], 9) def test_callbacks_can_be_added_and_removed(self): input = InputCell(11) output = ComputeCell([input], (lambda inputs: (inputs[0] + 1))) cb1_observer = [] cb2_observer = [] cb3_observer = [] callback1 = self.callback_factory(cb1_observer) callback2 = self.callback_factory(cb2_observer) callback3 = self.callback_factory(cb3_observer) output.add_callback(callback1) output.add_callback(callback2) input.value = 31 self.assertEqual(cb1_observer[(- 1)], 32) self.assertEqual(cb2_observer[(- 1)], 32) output.remove_callback(callback1) output.add_callback(callback3) input.value = 41 self.assertEqual(len(cb1_observer), 1) self.assertEqual(cb2_observer[(- 1)], 42) self.assertEqual(cb3_observer[(- 1)], 42) def test_removing_a_callback_multiple_times_doesn_t_interfere_with_other_callbacks(self): input = InputCell(1) output = ComputeCell([input], (lambda inputs: (inputs[0] + 1))) cb1_observer = [] cb2_observer = [] callback1 = self.callback_factory(cb1_observer) callback2 = self.callback_factory(cb2_observer) output.add_callback(callback1) output.add_callback(callback2) output.remove_callback(callback1) output.remove_callback(callback1) output.remove_callback(callback1) input.value = 2 self.assertEqual(cb1_observer, []) self.assertEqual(cb2_observer[(- 1)], 3) def test_callbacks_should_only_be_called_once_even_if_multiple_dependencies_change(self): input = InputCell(1) plus_one = ComputeCell([input], (lambda inputs: (inputs[0] + 1))) minus_one1 = ComputeCell([input], (lambda inputs: (inputs[0] - 1))) minus_one2 = ComputeCell([minus_one1], (lambda inputs: (inputs[0] - 1))) output = ComputeCell([plus_one, minus_one2], (lambda inputs: (inputs[0] * inputs[1]))) cb1_observer = [] callback1 = self.callback_factory(cb1_observer) output.add_callback(callback1) input.value = 4 self.assertEqual(cb1_observer[(- 1)], 10) def test_callbacks_should_not_be_called_if_dependencies_change_but_output_value_doesn_t_change(self): input = InputCell(1) plus_one = ComputeCell([input], (lambda inputs: (inputs[0] + 1))) minus_one = ComputeCell([input], (lambda inputs: (inputs[0] - 1))) always_two = ComputeCell([plus_one, minus_one], (lambda inputs: (inputs[0] - inputs[1]))) cb1_observer = [] callback1 = self.callback_factory(cb1_observer) always_two.add_callback(callback1) input.value = 2 self.assertEqual(cb1_observer, []) input.value = 3 self.assertEqual(cb1_observer, []) input.value = 4 self.assertEqual(cb1_observer, []) input.value = 5 self.assertEqual(cb1_observer, []) def callback_factory(self, observer): def callback(observer, value): observer.append(value) return partial(callback, observer)
class YamlConfigEntry(object): switch: str config_value_type: typing.Type = str def read_from_file(self, cfg: ConfigFile, transform: typing.Optional[typing.Callable]=None) -> typing.Optional[typing.Any]: if (not cfg): return None try: v = cfg.get(self) if _exists(v): return (transform(v) if transform else v) except Exception: ... return None
class OptionPlotoptionsStreamgraphSonificationDefaultinstrumentoptionsMappingLowpass(Options): def frequency(self) -> 'OptionPlotoptionsStreamgraphSonificationDefaultinstrumentoptionsMappingLowpassFrequency': return self._config_sub_data('frequency', OptionPlotoptionsStreamgraphSonificationDefaultinstrumentoptionsMappingLowpassFrequency) def resonance(self) -> 'OptionPlotoptionsStreamgraphSonificationDefaultinstrumentoptionsMappingLowpassResonance': return self._config_sub_data('resonance', OptionPlotoptionsStreamgraphSonificationDefaultinstrumentoptionsMappingLowpassResonance)
def upgrade_apt(): if (len(Settings.UpdateString) > 0): if (Settings.UpdateString[0] == '!'): misc.addLog(rpieGlobals.LOG_LEVEL_INFO, 'Update in progress') return False ustr = 'Upgrading APT packages<br>Please do not interrupt!' misc.addLog(rpieGlobals.LOG_LEVEL_INFO, ustr) ustr += "<p style='font-weight:normal;font-size:12px;text-align:left'>" Settings.UpdateString = ('!' + ustr) try: output = os.popen(OS.cmdline_rootcorrect('sudo apt upgrade -y')) for line in output: if (len(Settings.UpdateString) > 2000): Settings.UpdateString = ('!' + ustr) Settings.UpdateString += (line + '<br>') except Exception as e: ustr = ('APT upgrade failed ' + str(e)) Settings.UpdateString = ('=' + ustr) misc.addLog(rpieGlobals.LOG_LEVEL_ERROR, ustr) return False ustr = 'APT upgrade ended' Settings.UpdateString = ('=' + ustr) misc.addLog(rpieGlobals.LOG_LEVEL_INFO, ustr) return True
((detect_target().name() == 'rocm'), 'Not supported by ROCM.') class ParallelGemmCatFusionTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(ParallelGemmCatFusionTestCase, self).__init__(*args, **kwargs) self._test_id = 0 def _fuse_2_split_parallel_gemm_cat(self, b: int, ms: Sequence[int], n: int, k: int, dtype: str='float16'): _LOGGER.info(f'_fuse_2_split_parallel_gemm_cat, b: {b}, ms: {ms}, n: {n}, k: {k}') X1 = Tensor(shape=[IntVar(ms, 'input_batch'), IntImm((b * k))], dtype=dtype, name='X1', is_input=True) X2 = Tensor(shape=[IntVar(ms, 'input_batch'), IntImm((b * k))], dtype=dtype, name='X2', is_input=True) Ws = [] Bs = [] for i in range((2 * b)): W = Tensor(shape=[IntImm(n), IntImm(k)], dtype=dtype, name=f'W{i}', is_input=True) Ws.append(W) B = Tensor(shape=[IntImm(n)], dtype=dtype, name=f'B{i}', is_input=True) Bs.append(B) X3 = ops.split()(X1, k, dim=(- 1)) X4 = ops.split()(X2, k, dim=(- 1)) cat_inputs = [] gemm_inputs = (X3 + X4) for i in range((2 * b)): X5 = ops.gemm_rcr_bias()(gemm_inputs[i], Ws[i], Bs[i]) cat_inputs.append(X5) cat_output = ops.concatenate()(cat_inputs, dim=(- 1)) cat_output._attrs['name'] = 'output0' cat_output._attrs['is_output'] = True sorted_graph = toposort(cat_output) new_sorted_graph = fuse_parallel_gemms(sorted_graph) sorted_ops = graph_utils.get_sorted_ops(new_sorted_graph) assert (not has_op(sorted_ops, 'perm102_bmm_rrr_bias')), 'the final graph should not have op perm102_bmm_rrr_bias' assert (not has_op(sorted_ops, 'perm102_bmm_rcr_bias')), 'the final graph should not have op perm102_bmm_rcr_bias' def _fuse_parallel_gemm_cat(self, b: int, ms: Sequence[int], n: int, k: int, perm102_bmm_op: str, has_tanh: bool=True, reshape_weight: bool=False, dtype: str='float16'): _LOGGER.info(f'_fuse_parallel_gemm_cat, b: {b}, ms: {ms}, n: {n}, k: {k}') X = Tensor(shape=[IntVar(ms, 'input_batch'), IntImm((b * k))], dtype=dtype, name='X', is_input=True) Ws = [] Bs = [] for i in range(b): W = Tensor(shape=[IntImm(n), IntImm(k)], dtype=dtype, name=f'W{i}') if reshape_weight: W = ops.reshape()(W, [n, k]) Ws.append(W) B = Tensor(shape=[IntImm(n)], dtype=dtype, name=f'B{i}') Bs.append(B) X1 = ops.split()(X, k, dim=(- 1)) cat_inputs = [] for i in range(b): X2 = (ops.elementwise(FuncEnum.TANH)(X1[i]) if has_tanh else X1[i]) X3 = ops.gemm_rcr_bias()(X2, Ws[i], Bs[i]) cat_inputs.append(X3) cat_output = ops.concatenate()(cat_inputs, dim=(- 1)) cat_output._attrs['name'] = 'output0' cat_output._attrs['is_output'] = True constants = {} for i in range(b): constants[f'W{i}'] = get_random_torch_tensor([n, k], dtype) constants[f'B{i}'] = get_random_torch_tensor([n], dtype) target = detect_target() with compile_model([cat_output], target, './tmp', f'fuse_parallel_gemm_cat_{dtype}', dll_name=f'test_{self._test_id}.so', constants=constants) as module: self._test_id += 1 sorted_graph = module.debug_sorted_graph sorted_ops = graph_utils.get_sorted_ops(sorted_graph) assert has_op(sorted_ops, perm102_bmm_op), f'the final graph does not have op {perm102_bmm_op}' if (not has_tanh): assert (not has_op(sorted_ops, 'split')), 'the final graph has split op, but it should not' for m in ms: x_pt = get_random_torch_tensor([m, (b * k)], dtype) x1_pt = torch.split(x_pt, k, dim=(- 1)) cat_inputs_pt = [] for i in range(b): x2_pt = (x1_pt[i].tanh() if has_tanh else x1_pt[i]) x3_pt = torch.nn.functional.linear(x2_pt, constants[f'W{i}'], constants[f'B{i}']) cat_inputs_pt.append(x3_pt) cat_output_pt = torch.cat(cat_inputs_pt, dim=(- 1)) out = get_torch_empty_tensor([m, (b * n)], dtype) module.run_with_tensors([x_pt], [out]) self.assertTrue(torch.allclose(out, cat_output_pt, atol=0.05, rtol=0.05)) def test_fuse_parallel_gemm_cat_fp16(self): self._fuse_parallel_gemm_cat(b=4, ms=[256, 512], n=128, k=64, perm102_bmm_op='perm102_bmm_rrr_bias') self._fuse_parallel_gemm_cat(b=4, ms=[256, 512], n=128, k=100, perm102_bmm_op='perm102_bmm_rrr_bias') self._fuse_parallel_gemm_cat(b=4, ms=[128, 256], n=100, k=32, perm102_bmm_op='perm102_bmm_rcr_bias') self._fuse_parallel_gemm_cat(b=16, ms=[15, 31], n=7, k=5, perm102_bmm_op='perm102_bmm_rrr_bias') self._fuse_parallel_gemm_cat(b=4, ms=[128, 256], n=100, k=32, perm102_bmm_op='perm102_bmm_rcr_bias', reshape_weight=True) self._fuse_parallel_gemm_cat(b=4, ms=[256, 512], n=128, k=64, perm102_bmm_op='perm102_bmm_rrr_bias', has_tanh=False) self._fuse_parallel_gemm_cat(b=4, ms=[128, 256], n=100, k=32, perm102_bmm_op='perm102_bmm_rcr_bias', has_tanh=False) self._fuse_parallel_gemm_cat(b=16, ms=[15, 31], n=7, k=5, perm102_bmm_op='perm102_bmm_rrr_bias', has_tanh=False) self._fuse_parallel_gemm_cat(b=16, ms=[1024, 2048], n=100, k=128, perm102_bmm_op='perm102_bmm_rcr_bias', has_tanh=False) self._fuse_2_split_parallel_gemm_cat(b=4, ms=[256, 512], n=128, k=64) ((detect_target().name() == 'rocm'), 'Not supported by ROCM.') def test_fuse_parallel_gemm_cat_fp32_sm80(self): self._fuse_parallel_gemm_cat(b=4, ms=[256, 512], n=128, k=64, perm102_bmm_op='perm102_bmm_rrr_bias', dtype='float32') self._fuse_parallel_gemm_cat(b=4, ms=[128, 256], n=10, k=32, perm102_bmm_op='perm102_bmm_rcr_bias', dtype='float32') self._fuse_parallel_gemm_cat(b=4, ms=[128, 256], n=10, k=32, perm102_bmm_op='perm102_bmm_rcr_bias', reshape_weight=True, dtype='float32') self._fuse_parallel_gemm_cat(b=4, ms=[256, 512], n=32, k=64, perm102_bmm_op='perm102_bmm_rrr_bias', has_tanh=False, dtype='float32') self._fuse_parallel_gemm_cat(b=4, ms=[128, 256], n=10, k=32, perm102_bmm_op='perm102_bmm_rcr_bias', has_tanh=False, dtype='float32') self._fuse_2_split_parallel_gemm_cat(b=4, ms=[256, 512], n=128, k=64, dtype='float32') def _test_fuse_parallel_gemm_cat_partial(self, b1: int, b2: int, ms: Sequence[int], n: int, k: int, has_tanh: bool=True, dtype: str='float16'): _LOGGER.info(f'_fuse_parallel_gemm_cat_partial, b1: {b1}, b2: {b2}, ms: {ms}, n: {n}, k: {k}') batch_dim = IntVar(ms, 'input_batch') b = (b1 + b2) X1 = Tensor(shape=[batch_dim, IntImm((b1 * k))], dtype=dtype, name='X1', is_input=True) X2 = Tensor(shape=[batch_dim, IntImm((b2 * k))], dtype=dtype, name='X2', is_input=True) Ws = [] Bs = [] for i in range(b): W = Tensor(shape=[IntImm(n), IntImm(k)], dtype=dtype, name=f'W{i}') Ws.append(W) B = Tensor(shape=[IntImm(n)], dtype=dtype, name=f'B{i}') Bs.append(B) cat_inputs = [] X3 = ops.split()(X1, k, dim=(- 1)) for i in range(b1): X5 = (ops.elementwise(FuncEnum.TANH)(X3[i]) if has_tanh else X3[i]) X6 = ops.gemm_rcr_bias()(X5, Ws[i], Bs[i]) cat_inputs.append(X6) X7 = ops.reshape()(X1, [(- 1), b1, k]) W = Tensor(shape=[IntImm(b1), IntImm(n), IntImm(k)], dtype=dtype, name='W') B = Tensor(shape=[IntImm(b1), IntImm(n)], dtype=dtype, name='B') WT = ops.permute021()(W) X8 = ops.perm102_bmm_rcr()(X7, W) X9 = ops.reshape()(X8, [batch_dim, (- 1)]) cat_inputs.append(X9) X10 = ops.perm102_bmm_rcr_bias()(X7, W, B) X11 = ops.reshape()(X10, [batch_dim, (- 1)]) cat_inputs.append(X11) X12 = ops.perm102_bmm_rrr()(X7, WT) X13 = ops.reshape()(X12, [batch_dim, (- 1)]) cat_inputs.append(X13) X14 = ops.perm102_bmm_rrr_bias()(X7, WT, B) X15 = ops.reshape()(X14, [batch_dim, (- 1)]) cat_inputs.append(X15) X4 = ops.split()(X2, k, dim=(- 1)) for i in range(b2): X5 = (ops.elementwise(FuncEnum.TANH)(X4[i]) if has_tanh else X4[i]) X6 = ops.gemm_rcr_bias()(X5, Ws[(i + b1)], Bs[(i + b1)]) cat_inputs.append(X6) cat_output = ops.concatenate()(cat_inputs, dim=(- 1)) cat_output._attrs['name'] = 'output0' cat_output._attrs['is_output'] = True constants = {} for i in range(b): constants[f'W{i}'] = get_random_torch_tensor([n, k], dtype) constants[f'B{i}'] = get_random_torch_tensor([n], dtype) constants['W'] = get_random_torch_tensor([b1, n, k], dtype) constants['B'] = get_random_torch_tensor([b1, n], dtype) target = detect_target() with compile_model([cat_output], target, './tmp', f'fuse_parallel_gemm_cat_{dtype}', dll_name=f'test_{self._test_id}.so', constants=constants) as module: self._test_id += 1 sorted_graph = module.debug_sorted_graph sorted_ops = graph_utils.get_sorted_ops(sorted_graph) assert (not has_op(sorted_ops, 'gemm_rcr_bias')), 'the final graph still has op gemm_rcr_bias' if (not has_tanh): assert (not has_op(sorted_ops, 'split')), 'the final graph has split op, but it should not' for m in ms: x_pt = get_random_torch_tensor([m, (b1 * k)], dtype) x1_pt = torch.split(x_pt, k, dim=(- 1)) cat_inputs_pt = [] for i in range(b1): x3_pt = (x1_pt[i].tanh() if has_tanh else x1_pt[i]) x4_pt = torch.nn.functional.linear(x3_pt, constants[f'W{i}'], constants[f'B{i}']) cat_inputs_pt.append(x4_pt) x5_pt = x_pt.reshape(m, b1, k).permute([1, 0, 2]) x6_pt = torch.bmm(x5_pt, constants['W'].permute([0, 2, 1])) x7_pt = x6_pt.permute([1, 0, 2]) x8_pt = x7_pt.reshape([m, (- 1)]) cat_inputs_pt.append(x8_pt) x9_pt = (x7_pt + constants['B']).reshape([m, (- 1)]) cat_inputs_pt.append(x9_pt) cat_inputs_pt.append(x8_pt) cat_inputs_pt.append(x9_pt) xx_pt = get_random_torch_tensor([m, (b2 * k)], dtype) x2_pt = torch.split(xx_pt, k, dim=(- 1)) for i in range(b2): x3_pt = (x2_pt[i].tanh() if has_tanh else x2_pt[i]) x4_pt = torch.nn.functional.linear(x3_pt, constants[f'W{(i + b1)}'], constants[f'B{(i + b1)}']) cat_inputs_pt.append(x4_pt) cat_output_pt = torch.cat(cat_inputs_pt, dim=(- 1)) out = get_torch_empty_tensor(cat_output_pt.size(), dtype) module.run_with_tensors({'X1': x_pt, 'X2': xx_pt}, {'output0': out}) self.assertTrue(torch.allclose(out, cat_output_pt, atol=0.05, rtol=0.05)) def test_fuse_parallel_gemm_cat_partial_fp16(self): self._test_fuse_parallel_gemm_cat_partial(4, 4, [128, 256], 32, 64, True) self._test_fuse_parallel_gemm_cat_partial(4, 4, [128, 256], 32, 64, False) self._test_fuse_parallel_gemm_cat_partial(3, 3, [128, 256], 30, 66, True) self._test_fuse_parallel_gemm_cat_partial(2, 2, [128, 256], 33, 55, True) ((detect_target().name() == 'rocm'), 'Not supported by ROCM.') def test_fuse_parallel_gemm_cat_partial_fp32_sm80(self): self._test_fuse_parallel_gemm_cat_partial(4, 4, [128, 256], 32, 64, True, dtype='float32') self._test_fuse_parallel_gemm_cat_partial(4, 4, [128, 256], 32, 64, False, dtype='float32') def _test_multi_parallel_gemm_cat_groups(self, m, nk_groups, num_unfused_ops=0, dtype='float16'): (inputs, constants) = _prepare_inputs_and_constants(m, nk_groups, dtype) (outputs, module) = _prepare_ait_module(m, nk_groups, constants, dtype, test_idx=self._test_id) self._test_id += 1 with module: sorted_graph = module.debug_sorted_graph sorted_ops = graph_utils.get_sorted_ops(sorted_graph) actual_unfused_ops = count_ops(sorted_ops, 'gemm_rcr_bias') assert (actual_unfused_ops == num_unfused_ops), f'Expecting {num_unfused_ops} unfused gemm_rcr_bias ops, found {actual_unfused_ops}' ys = [] for (i, input) in enumerate(inputs): tanh = input.tanh() y = torch.nn.functional.linear(tanh, constants[f'w_{i}'], constants[f'b_{i}']) ys.append(y) pt_y = torch.cat(ys, dim=(- 1)) module.run_with_tensors(inputs, outputs) self.assertTrue(torch.allclose(pt_y, outputs[0], atol=0.05, rtol=0.05)) def test_multi_parallel_gemm_cat_groups_fp16(self): self._test_multi_parallel_gemm_cat_groups(256, (((([[128, 64]] * 2) + ([[128, 120]] * 4)) + ([[128, 72]] * 2)) + ([[128, 64]] * 2))) self._test_multi_parallel_gemm_cat_groups(256, (((([[128, 64]] * 2) + [[128, 120]]) + ([[128, 72]] * 2)) + [[128, 64]]), 2) ((detect_target().name() == 'rocm'), 'Not supported by ROCM.') def test_multi_parallel_gemm_cat_groups_fp32_sm80(self): self._test_multi_parallel_gemm_cat_groups(256, (((([[128, 64]] * 2) + ([[128, 120]] * 4)) + ([[128, 72]] * 2)) + ([[128, 64]] * 2)), dtype='float32') def _skip_fuse_parallel_gemm_output_cat(self, b: int, ms: Sequence[int], n: int, k: int, perm102_bmm_op: str, dtype: str='float16'): _LOGGER.info(f'_skip_fuse_parallel_gemm_cat, b: {b}, ms: {ms}, n: {n}, k: {k}') X = Tensor(shape=[IntVar(ms, 'input_batch'), IntImm((b * k))], dtype=dtype, name='X', is_input=True) Ws = [] Bs = [] for i in range(b): W = Tensor(shape=[IntImm(n), IntImm(k)], dtype=dtype, name=f'W{i}') Ws.append(W) B = Tensor(shape=[IntImm(n)], dtype=dtype, name=f'B{i}') Bs.append(B) X1 = ops.split()(X, k, dim=(- 1)) cat_inputs = [] for i in range(b): X2 = X1[i] X3 = ops.gemm_rcr_bias()(X2, Ws[i], Bs[i]) cat_inputs.append(X3) X3._attrs['name'] = f'output{(i + 1)}' X3._attrs['is_output'] = True cat_output = ops.concatenate()(cat_inputs, dim=(- 1)) cat_output._attrs['name'] = 'output0' cat_output._attrs['is_output'] = True constants = {} for i in range(b): constants[f'W{i}'] = get_random_torch_tensor([n, k], dtype) constants[f'B{i}'] = get_random_torch_tensor([n], dtype) target = detect_target() with compile_model([cat_output, *cat_inputs], target, './tmp', f'fuse_parallel_gemm_cat_{dtype}', dll_name=f'test_{self._test_id}.so', constants=constants) as module: self._test_id += 1 sorted_graph = module.debug_sorted_graph sorted_ops = graph_utils.get_sorted_ops(sorted_graph) assert (not has_op(sorted_ops, perm102_bmm_op)), f'the final graph has op {perm102_bmm_op}' assert has_op(sorted_ops, 'gemm_rcr_bias'), 'the final graph does not have op gemm_rcr_bias' for m in ms: x_pt = get_random_torch_tensor([m, (b * k)], dtype) x1_pt = torch.split(x_pt, k, dim=(- 1)) cat_inputs_pt = [] for i in range(b): x2_pt = x1_pt[i] x3_pt = torch.nn.functional.linear(x2_pt, constants[f'W{i}'], constants[f'B{i}']) cat_inputs_pt.append(x3_pt) cat_output_pt = (torch.cat(cat_inputs_pt, dim=(- 1)), *cat_inputs_pt) cat_out = get_torch_empty_tensor([m, (b * n)], dtype) out_other = [get_torch_empty_tensor(x.shape, dtype) for x in cat_inputs_pt] out = [cat_out, *out_other] module.run_with_tensors([x_pt], out) for (out_ait, out_pt) in zip(out, cat_output_pt): self.assertTrue(torch.allclose(out_ait, out_pt, atol=0.05, rtol=0.05)) def test_skip_parallel_gemm_cat_groups(self): self._skip_fuse_parallel_gemm_output_cat(b=4, ms=[256, 512], n=128, k=64, perm102_bmm_op='perm102_bmm_rrr_bias')
def subagency_award(db, agencies_with_subagencies): baker.make('search.AwardSearch', award_id=2, latest_transaction_id=2) baker.make('search.TransactionSearch', transaction_id=2, award_id=2, awarding_agency_code='003', awarding_toptier_agency_name='Awarding Toptier Agency 3', awarding_toptier_agency_abbreviation='TA3', funding_agency_code='002', funding_toptier_agency_name='Funding Toptier Agency 2', funding_toptier_agency_abbreviation='TA2', awarding_sub_tier_agency_c='1005', awarding_subtier_agency_name='Awarding Subtier Agency 5', awarding_subtier_agency_abbreviation='SA5', funding_sub_tier_agency_co='1006', funding_subtier_agency_name='Funding Subtier Agency 6', funding_subtier_agency_abbreviation='SA6', federal_action_obligation=10, generated_pragmatic_obligation=10, action_date='2020-01-02', fiscal_action_date='2020-04-02', award_date_signed='2020-01-14', is_fpds=False, awarding_agency_id=1003, funding_agency_id=1002, awarding_toptier_agency_id=1003, funding_toptier_agency_id=1002)
class ActorT(ServiceT, Generic[_T]): agent: 'AgentT' stream: StreamT it: _T actor_task: Optional[asyncio.Task] active_partitions: Optional[Set[TP]] index: Optional[int] = None def __init__(self, agent: 'AgentT', stream: StreamT, it: _T, active_partitions: Optional[Set[TP]]=None, **kwargs: Any) -> None: ... def cancel(self) -> None: ... async def on_isolated_partition_revoked(self, tp: TP) -> None: ... async def on_isolated_partition_assigned(self, tp: TP) -> None: ... def traceback(self) -> str: ...
def safe_join(base, *paths): base_path = force_text(base) base_path = base_path.rstrip('/') paths = [force_text(p) for p in paths] final_path = (base_path + '/') for path in paths: _final_path = posixpath.normpath(posixpath.join(final_path, path)) if (path.endswith('/') or ((_final_path + '/') == final_path)): _final_path += '/' final_path = _final_path if (final_path == base_path): final_path += '/' base_path_len = len(base_path) if ((not final_path.startswith(base_path)) or (final_path[base_path_len] != '/')): raise ValueError('the joined path is located outside of the base path component') return final_path.lstrip('/')
def _model_dynamic_factory() -> Callable[([None], List[Type])]: from dbgpt.model.adapter.model_adapter import _dynamic_model_parser param_class = _dynamic_model_parser() fix_class = [ModelWorkerParameters] if (not param_class): param_class = [ModelParameters] fix_class += param_class return fix_class
_register_parser _set_msg_type(ofproto.OFPT_MULTIPART_REPLY) class OFPMultipartReply(MsgBase): _STATS_MSG_TYPES = {} def register_stats_type(body_single_struct=False): def _register_stats_type(cls): assert (cls.cls_stats_type is not None) assert (cls.cls_stats_type not in OFPMultipartReply._STATS_MSG_TYPES) assert (cls.cls_stats_body_cls is not None) cls.cls_body_single_struct = body_single_struct OFPMultipartReply._STATS_MSG_TYPES[cls.cls_stats_type] = cls return cls return _register_stats_type def __init__(self, datapath, body=None, flags=None): super(OFPMultipartReply, self).__init__(datapath) self.body = body self.flags = flags def parser_stats_body(cls, buf, msg_len, offset): body_cls = cls.cls_stats_body_cls body = [] while (offset < msg_len): entry = body_cls.parser(buf, offset) body.append(entry) offset += entry.length if cls.cls_body_single_struct: return body[0] return body def parser_stats(cls, datapath, version, msg_type, msg_len, xid, buf): msg = MsgBase.parser.__func__(cls, datapath, version, msg_type, msg_len, xid, buf) msg.body = msg.parser_stats_body(msg.buf, msg.msg_len, ofproto.OFP_MULTIPART_REPLY_SIZE) return msg def parser(cls, datapath, version, msg_type, msg_len, xid, buf): (type_, flags) = struct.unpack_from(ofproto.OFP_MULTIPART_REPLY_PACK_STR, six.binary_type(buf), ofproto.OFP_HEADER_SIZE) stats_type_cls = cls._STATS_MSG_TYPES.get(type_) msg = super(OFPMultipartReply, stats_type_cls).parser(datapath, version, msg_type, msg_len, xid, buf) msg.type = type_ msg.flags = flags if (stats_type_cls is not None): offset = ofproto.OFP_MULTIPART_REPLY_SIZE body = [] while (offset < msg_len): b = stats_type_cls.cls_stats_body_cls.parser(msg.buf, offset) body.append(b) offset += (b.length if hasattr(b, 'length') else b.len) if stats_type_cls.cls_body_single_struct: msg.body = body[0] else: msg.body = body return msg
class TestRPNHeads(unittest.TestCase): def test_build_rpn_heads(self): self.assertGreater(len(rpn.RPN_HEAD_REGISTRY._obj_map), 0) for (name, builder) in rpn.RPN_HEAD_REGISTRY._obj_map.items(): logger.info('Testing {}...'.format(name)) cfg = GeneralizedRCNNRunner.get_default_cfg() if (name in RPN_CFGS): cfg.merge_from_file(RPN_CFGS[name]) backbone = build_backbone(cfg) backbone_shape = backbone.output_shape() rpn_input_shape = [backbone_shape[x] for x in cfg.MODEL.RPN.IN_FEATURES] rpn_head = builder(cfg, rpn_input_shape) in_channels = list(backbone_shape.values())[0].channels num_anchors = build_anchor_generator(cfg, rpn_input_shape).num_cell_anchors[0] (N, C_in, H, W) = (2, in_channels, 24, 32) input = torch.rand([N, C_in, H, W], dtype=torch.float32) LAYERS = len(cfg.MODEL.RPN.IN_FEATURES) out = rpn_head(([input] * LAYERS)) self.assertEqual(len(out), 2) (logits, bbox_reg) = out for idx in range(LAYERS): self.assertEqual(logits[idx].shape, torch.Size([input.shape[0], num_anchors, input.shape[2], input.shape[3]])) self.assertEqual(bbox_reg[idx].shape, torch.Size([logits[idx].shape[0], (num_anchors * 4), logits[idx].shape[2], logits[idx].shape[3]])) def test_build_rpn_heads_with_rotated_anchor_generator(self): self.assertGreater(len(rpn.RPN_HEAD_REGISTRY._obj_map), 0) for (name, builder) in rpn.RPN_HEAD_REGISTRY._obj_map.items(): logger.info('Testing {}...'.format(name)) cfg = GeneralizedRCNNRunner.get_default_cfg() if (name in RPN_CFGS): cfg.merge_from_file(RPN_CFGS[name]) cfg.MODEL.ANCHOR_GENERATOR.NAME = 'RotatedAnchorGenerator' backbone = build_backbone(cfg) backbone_shape = backbone.output_shape() rpn_input_shape = [backbone_shape[x] for x in cfg.MODEL.RPN.IN_FEATURES] rpn_head = builder(cfg, rpn_input_shape) in_channels = list(backbone_shape.values())[0].channels anchor_generator = build_anchor_generator(cfg, rpn_input_shape) num_anchors = anchor_generator.num_cell_anchors[0] box_dim = anchor_generator.box_dim (N, C_in, H, W) = (2, in_channels, 24, 32) input = torch.rand([N, C_in, H, W], dtype=torch.float32) LAYERS = len(cfg.MODEL.RPN.IN_FEATURES) out = rpn_head(([input] * LAYERS)) self.assertEqual(len(out), 2) (logits, bbox_reg) = out for idx in range(LAYERS): self.assertEqual(logits[idx].shape, torch.Size([input.shape[0], num_anchors, input.shape[2], input.shape[3]])) self.assertEqual(bbox_reg[idx].shape, torch.Size([logits[idx].shape[0], (num_anchors * box_dim), logits[idx].shape[2], logits[idx].shape[3]]))
class ValveTestConfigRevertBootstrap(ValveTestBases.ValveTestNetwork): BAD_CONFIG = '\n *** busted ***\n' GOOD_CONFIG = "\ndps:\n s1:\n dp_id: 0x1\n hardware: 'GenericTFM'\n interfaces:\n p1:\n number: 1\n native_vlan: 0x100\n" CONFIG_AUTO_REVERT = True def setUp(self): self.setup_valves(self.BAD_CONFIG, error_expected=1) def test_config_revert(self): self.assertEqual(self.get_prom('faucet_config_load_error', bare=True), 1) self.update_config((self.GOOD_CONFIG + '\n'), reload_expected=False, error_expected=0) self.assertEqual(self.get_prom('faucet_config_load_error', bare=True), 0)
_group.command('update-lock-versions') ('rule-ids', nargs=(- 1), required=False) def update_lock_versions(rule_ids): rules = RuleCollection.default() if rule_ids: rules = rules.filter((lambda r: (r.id in rule_ids))) else: rules = rules.filter(production_filter) if (not click.confirm(f'Are you sure you want to update hashes for {len(rules)} rules without a version bump?')): return (changed, new, _) = default_version_lock.manage_versions(rules, exclude_version_update=True, save_changes=True) if (not changed): click.echo('No hashes updated') return changed
_runner def append(c, runner, filename, text, partial=False, escape=True): if isinstance(text, six.string_types): text = [text] for line in text: regex = (('^' + _escape_for_regex(line)) + ('' if partial else '$')) if (line and exists(c, filename, runner=runner) and contains(c, filename, regex, escape=False, runner=runner)): continue line = (line.replace("'", "'\\\\''") if escape else line) runner("echo '{}' >> {}".format(line, filename))