code
stringlengths
281
23.7M
def update_privacy_declaration_data_uses(bind: Connection, data_use_map: Dict[(str, str)]): existing_ctl_policies: ResultProxy = bind.execute(text('SELECT id, data_use FROM privacydeclaration;')) for row in existing_ctl_policies: data_use: str = row['data_use'] updated_data_use: Optional[str] = data_use_map.get(data_use, None) if updated_data_use: update_data_use_query: TextClause = text('UPDATE privacydeclaration SET data_use = :updated_use WHERE id= :declaration_id') bind.execute(update_data_use_query, {'declaration_id': row['id'], 'updated_use': updated_data_use})
def test_highlight_arguments(): def highlight_func(str_, lang, attrs): assert (lang == 'a') assert (attrs == 'b c d') return (('<pre><code>==' + str_) + '==</code></pre>') conf = presets.commonmark.make() conf['options']['highlight'] = highlight_func md = MarkdownIt(config=conf) assert (md.render('``` a b c d \nhl\n```') == '<pre><code>==hl\n==</code></pre>\n')
def example(): import threading class State(): i = 0 s = State() sem = threading.Semaphore() async def on_scroll(e: ft.OnScrollEvent): if (e.pixels >= (e.max_scroll_extent - 100)): if sem.acquire(blocking=False): try: for i in range(0, 10): cl.controls.append(ft.Text(f'Text line {s.i}', key=str(s.i))) s.i += 1 (await cl.update_async()) finally: sem.release() cl = ft.Column(spacing=10, height=200, width=200, scroll=ft.ScrollMode.ALWAYS, on_scroll_interval=0, on_scroll=on_scroll) for i in range(0, 50): cl.controls.append(ft.Text(f'Text line {s.i}', key=str(s.i))) s.i += 1 return ft.Container(cl, border=ft.border.all(1))
def test_fast(): def t1(a: int) -> typing.NamedTuple('OutputsBC', t1_int_output=int, c=str): return ((a + 2), 'world') def t2(a: str, b: str) -> str: return (b + a) ssettings = serialization_settings.new_builder().with_fast_serialization_settings(FastSerializationSettings(enabled=True)).build() task_spec = get_serializable(OrderedDict(), ssettings, t1) assert ('pyflyte-fast-execute' in task_spec.template.container.args)
def group_queries_pfams(queries_pfams_tuples): queries_pfams_groups = [] queries_pfams = defaultdict(set) for (query_name, pfams) in queries_pfams_tuples: for pfam in sorted(pfams): queries_pfams[query_name].add(pfam) queries_pfams_keys = {} for (query, pfams) in queries_pfams.items(): pq_key = ','.join(pfams) if (pq_key in queries_pfams_keys): queries_pfams_keys[pq_key]['queries'].add(query) else: queries_pfams_keys[pq_key] = {'queries': {query}, 'pfams': pfams} return [(x['queries'], x['pfams']) for x in queries_pfams_keys.values()]
class TestTorchFracdiff(): .parametrize('d', [0.1, 0.5, 1]) .parametrize('mode', ['same', 'valid']) def test_torch_fdiff(self, d, mode): torch.manual_seed(42) input = torch.randn(10, 100) result = fdiff(input, d, mode=mode) expect = torch.from_numpy(fracdiff.fdiff(input, d, mode=mode)) assert_close(result, expect, check_stride=False) result = Fracdiff(d, mode=mode)(input) expect = torch.from_numpy(fracdiff.fdiff(input, d, mode=mode)) assert_close(result, expect, check_stride=False) .parametrize('d', [0.1, 0.5, 1]) .parametrize('mode', ['same', 'valid']) def test_torch_fdiff_int(self, d, mode): torch.manual_seed(42) input = torch.randint(5, size=(10, 100)) result = fdiff(input, d, mode=mode) expect = torch.from_numpy(fracdiff.fdiff(np.array(input), d, mode=mode)) assert_close(result, expect, check_stride=False, check_dtype=False) result = Fracdiff(d, mode=mode)(input) expect = torch.from_numpy(fracdiff.fdiff(np.array(input), d, mode=mode)) assert_close(result, expect, check_stride=False, check_dtype=False) .parametrize('d', [0.1, 0.5, 1]) .parametrize('mode', ['same', 'valid']) def test_torch_prepend_append(self, d, mode): torch.manual_seed(42) input = torch.randn(10, 100) prepend = torch.randn(10, 50) append = torch.randn(10, 50) expect = torch.from_numpy(fracdiff.fdiff(input, d, mode=mode, prepend=prepend, append=append)) result = fdiff(input, d, mode=mode, prepend=prepend, append=append) assert_close(result, expect, check_stride=False) expect = torch.from_numpy(fracdiff.fdiff(input, d, mode=mode, prepend=prepend, append=append)) result = Fracdiff(d, mode=mode)(input, prepend=prepend, append=append) assert_close(result, expect, check_stride=False) .parametrize('d', [0.1, 0.5, 1]) .parametrize('mode', ['same', 'valid']) def test_torch_prepend_append_dim0(self, d, mode): torch.manual_seed(42) input = torch.randn(10, 100) prepend = torch.tensor([[1]]).expand(10, 1) append = torch.tensor([[2]]).expand(10, 1) expect = torch.from_numpy(fracdiff.fdiff(input, d, mode=mode, prepend=prepend, append=append)) result = fdiff(input, d, mode=mode, prepend=prepend, append=append) assert_close(result, expect, check_stride=False, check_dtype=False) result = Fracdiff(d, mode=mode)(input, prepend=prepend, append=append) assert_close(result, expect, check_stride=False, check_dtype=False) def test_repr(self): m = Fracdiff(0.1, dim=(- 1), window=10, mode='same') result = repr(m) expect = "Fracdiff(0.1, dim=-1, window=10, mode='same')" assert (result == expect) def test_invalid_mode(self): with pytest.raises(ValueError): input = torch.empty(10, 100) _ = fdiff(input, 0.5, mode='invalid')
def _create_local_model_manager(worker_params: ModelWorkerParameters) -> LocalWorkerManager: from dbgpt.util.net_utils import _get_ip_address host = (worker_params.worker_register_host if worker_params.worker_register_host else _get_ip_address()) port = worker_params.port if ((not worker_params.register) or (not worker_params.controller_addr)): logger.info(f'Not register current to controller, register: {worker_params.register}, controller_addr: {worker_params.controller_addr}') return LocalWorkerManager(host=host, port=port) else: from dbgpt.model.cluster.controller.controller import ModelRegistryClient client = ModelRegistryClient(worker_params.controller_addr) async def register_func(worker_run_data: WorkerRunData): instance = ModelInstance(model_name=worker_run_data.worker_key, host=host, port=port) return (await client.register_instance(instance)) async def deregister_func(worker_run_data: WorkerRunData): instance = ModelInstance(model_name=worker_run_data.worker_key, host=host, port=port) return (await client.deregister_instance(instance)) async def send_heartbeat_func(worker_run_data: WorkerRunData): instance = ModelInstance(model_name=worker_run_data.worker_key, host=host, port=port) return (await client.send_heartbeat(instance)) return LocalWorkerManager(register_func=register_func, deregister_func=deregister_func, send_heartbeat_func=send_heartbeat_func, host=host, port=port)
def fetch_production(zone_key: str='AU-TAS-KI', session: (Session | None)=None, target_datetime=None, logger: Logger=getLogger(__name__)) -> dict: if (target_datetime is not None): raise NotImplementedError('The datasource currently implemented is only real time') try: (hub, dashboard, tz, source) = (ZONE_PARAMS[zone_key]['hub'], ZONE_PARAMS[zone_key]['method'], ZONE_PARAMS[zone_key]['tz'], ZONE_PARAMS[zone_key]['source']) except KeyError: raise KeyError((('The zone ' + zone_key) + " isn't implemented")) payload = SignalR(' dashboard) technologies_parsed = parse_payload(logger, payload) storage_techs = sum_storage_techs(technologies_parsed) return {'zoneKey': zone_key, 'datetime': datetime.now(tz=ZoneInfo(tz)), 'production': {'biomass': technologies_parsed['biomass'], 'coal': technologies_parsed['coal'], 'gas': technologies_parsed['gas'], 'hydro': technologies_parsed['hydro'], 'nuclear': technologies_parsed['nuclear'], 'oil': technologies_parsed['oil'], 'solar': technologies_parsed['solar'], 'wind': (0 if ((technologies_parsed['wind'] < 0) and (technologies_parsed['wind'] > (- 0.1))) else technologies_parsed['wind']), 'geothermal': technologies_parsed['geothermal'], 'unknown': technologies_parsed['unknown']}, 'storage': {'battery': (storage_techs * (- 1))}, 'source': source}
.django_db def test_agency_level_exclude_non_match(client, monkeypatch, elasticsearch_award_index, subaward_with_tas): _setup_es(client, monkeypatch, elasticsearch_award_index) resp = query_by_tas_subaward(client, {'exclude': [_agency_path(BASIC_TAS)]}) assert (resp.json()['results'] == [])
def exit_FileNotWritable(exception, path): if isinstance(exception, PermissionError): raise exception elif isinstance(exception, IOError): print('') sys.exit(f'Error: could not write to file {path}.') else: print('') sys.exit(f'''Error: {exception}. {traceback.format_exc()}''')
def _get_rank_1_approximation(atf_type, target_psd_matrix, noise_psd_matrix, **atf_kwargs): if (atf_type == 'rank1_pca'): return get_pca_rank_one_estimate(target_psd_matrix, **atf_kwargs) elif (atf_type == 'rank1_gev'): return get_gev_rank_one_estimate(target_psd_matrix, noise_psd_matrix, **atf_kwargs) else: raise ValueError(atf_type, 'use either rank1_pca or rank1_gev')
class op(bpy.types.Operator): bl_idname = 'uv.textools_bake' bl_label = 'Bake' bl_description = 'Bake selected objects' def poll(cls, context): if (len(settings.sets) == 0): settings.bake_error = '' return False bake_mode = utilities_ui.get_bake_mode() if (bake_mode not in modes): settings.bake_error = '' return False if (bake_mode in {'ao', 'normal_tangent', 'normal_object', 'curvature', 'environment', 'uv', 'shadow'}): settings.bake_error = '' return True if (bake_mode == 'combined'): if ((not bpy.context.scene.render.bake.use_pass_direct) and (not bpy.context.scene.render.bake.use_pass_indirect) and (not bpy.context.scene.render.bake.use_pass_emit)): settings.bake_error = 'Lighting or Emit needed' return False settings.bake_error = '' return True if (modes[bake_mode].setVColor or (not modes[bake_mode].material)): def is_bakeable(obj): if (len(obj.data.materials) <= 0): settings.bake_error = 'Materials needed' return False elif (not any(obj.data.materials)): settings.bake_error = 'Materials needed' return False else: for slot in obj.material_slots: if (slot.material is not None): if (slot.material.use_nodes == False): settings.bake_error = 'Nodal materials needed' return False bsdf_node = None for n in slot.material.node_tree.nodes: if (n.bl_idname == 'ShaderNodeBsdfPrincipled'): bsdf_node = n if (not bsdf_node): bool_alpha_ignore = bpy.context.preferences.addons[__package__].preferences.bool_alpha_ignore bool_clean_transmission = bpy.context.preferences.addons[__package__].preferences.bool_clean_transmission builtin_modes_material = {'diffuse', 'emission', 'roughness', 'glossiness', 'transmission'} if (modes[bake_mode].relink['needed'] or (bool_clean_transmission and (bake_mode == 'transmission')) or (bool_alpha_ignore and (bake_mode not in builtin_modes_material))): settings.bake_error = 'BSDF nodes needed' return False settings.bake_error = '' return True def is_vc_ready(obj): if (len(obj.data.vertex_colors) > 7): settings.bake_error = 'An empty VC layer needed' return False settings.bake_error = '' return True for bset in settings.sets: if ((len(bset.objects_high) + len(bset.objects_float)) == 0): if (not modes[bake_mode].material): for obj in bset.objects_low: if (not is_bakeable(obj)): return False if modes[bake_mode].setVColor: for obj in bset.objects_low: if (not is_vc_ready(obj)): return False else: if (not modes[bake_mode].material): for obj in (bset.objects_high + bset.objects_float): if (not is_bakeable(obj)): return False if modes[bake_mode].setVColor: for obj in (bset.objects_high + bset.objects_float): if (not is_vc_ready(obj)): return False settings.bake_error = '' return True def execute(self, context): startTime = time.monotonic() preferences = bpy.context.preferences.addons[__package__].preferences circular_report = [False] color_report = [False] if preferences.bool_clean_transmission: modes['transmission'] = ub.BakeMode('', type='ROUGHNESS', color=(0, 0, 0, 1), relink={'needed': True, 'b': 7, 'n': 15}) else: modes['transmission'] = ub.BakeMode('', type='TRANSMISSION') bake_mode = utilities_ui.get_bake_mode() if (bake_mode not in modes): self.report({'ERROR_INVALID_INPUT'}, "Unknown mode '{}' only available: '{}'".format(bake_mode, ', '.join(modes.keys()))) return {'CANCELLED'} selected_objects = [obj for obj in bpy.context.selected_objects] active_object = bpy.context.view_layer.objects.active pre_selection_mode = None if active_object: pre_selection_mode = bpy.context.active_object.mode ub.store_bake_settings() if (preferences.bake_device != 'DEFAULT'): bpy.context.scene.cycles.device = preferences.bake_device bpy.context.scene.render.engine = modes[bake_mode].engine if (settings.bversion < 3): bpy.context.scene.cycles.use_progressive_refine = False if (settings.bversion >= 2.92): bpy.context.scene.render.bake.target = 'IMAGE_TEXTURES' if (settings.bversion >= 3): bpy.context.scene.cycles.use_denoising = False bake(self=self, mode=bake_mode, size=bpy.context.scene.texToolsSettings.size, bake_single=bpy.context.scene.texToolsSettings.bake_force_single, sampling_scale=int(bpy.context.scene.texToolsSettings.bake_sampling), samples=bpy.context.scene.texToolsSettings.bake_samples, cage_extrusion=bpy.context.scene.texToolsSettings.bake_cage_extrusion, ray_distance=bpy.context.scene.texToolsSettings.bake_ray_distance, circular_report=circular_report, color_report=color_report, selected=selected_objects, active=active_object, pre_selection_mode=pre_selection_mode) elapsed = round((time.monotonic() - startTime), 2) if circular_report[0]: if color_report[0]: self.report({'WARNING'}, (((('Possible Circular Dependency: a previously baked image may have affected the new bake; ' + color_report[0]) + 'Baking finished in ') + str(elapsed)) + 's.')) else: self.report({'WARNING'}, (('Possible Circular Dependency: a previously baked image may have affected the new bake. Baking finished in ' + str(elapsed)) + 's.')) elif color_report[0]: self.report({'WARNING'}, (((color_report[0] + '. Baking finished in ') + str(elapsed)) + 's.')) else: self.report({'INFO'}, (('Baking finished in ' + str(elapsed)) + 's.')) return {'FINISHED'}
def generate_oura_activity_header_chart(date, days=7, summary=False, resample='D'): height = (chartHeight if (not summary) else 300) if summary: df = pd.read_sql(sql=app.session.query(ouraActivitySummary).statement, con=engine, index_col='summary_date') else: df = pd.read_sql(sql=app.session.query(ouraActivitySummary).filter((ouraActivitySummary.summary_date > date)).statement, con=engine, index_col='summary_date')[:days] app.session.remove() df = df.set_index(pd.to_datetime(df.index)) df = df.resample(resample).mean() (buttons, range, tickformat) = modal_range_buttons(df=df, resample=resample) df['high_tooltip'] = ['<b>High</b>: {:.0f}h {:.0f}m'.format(x, y) for (x, y) in zip((df['high'] // 60), (df['high'] % 60))] df['medium_tooltip'] = ['<b>Medium</b>: {:.0f}h {:.0f}m'.format(x, y) for (x, y) in zip((df['medium'] // 60), (df['medium'] % 60))] df['low_tooltip'] = ['<b>Low</b>: {:.0f}h {:.0f}m'.format(x, y) for (x, y) in zip((df['low'] // 60), (df['low'] % 60))] barmode = 'group' if summary: data = [go.Bar(name='Low', x=df.index, y=round(df['low'], 1), text=df['low_tooltip'], hoverinfo='text+x', marker={'color': light_blue}), go.Bar(name='Medium', x=df.index, y=round(df['medium'], 1), text=df['medium_tooltip'], hoverinfo='text+x', marker={'color': teal}), go.Bar(name='High', x=df.index, y=round(df['high'], 1), text=df['high_tooltip'], hoverinfo='text+x', marker={'color': white})] layout = go.Layout(height=height, transition=dict(duration=transition), font=dict(color=white, size=10), xaxis=dict(showline=True, color=white, showgrid=False, showticklabels=True, tickvals=df.index, tickformat='%b %d', range=range, rangeselector=dict(borderwidth=0.5, buttons=buttons, xanchor='center', x=0.5, y=0.97), rangeslider=dict(visible=True)), yaxis=dict(showgrid=False, showticklabels=False, gridcolor='rgb(73, 73, 73)', gridwidth=0.5), margin={'l': 0, 'b': 20, 't': 0, 'r': 0}, showlegend=False, legend=dict(x=0.5, y=(- 0.2), xanchor='center', orientation='h', font=dict(size=10, color=white)), hovermode='x', barmode='stack') else: data = [go.Bar(name='Low', x=df.index, y=round(df['low'], 1), text=df['low_tooltip'], hoverinfo='text', marker={'color': light_blue}), go.Bar(name='Medium', x=df.index, y=round(df['medium'], 1), text=df['medium_tooltip'], hoverinfo='text', marker={'color': teal}), go.Bar(name='High', x=df.index, y=round(df['high'], 1), text=df['high_tooltip'], hoverinfo='text', marker={'color': white})] layout = go.Layout(height=height, transition=dict(duration=transition), font=dict(size=10, color=white), xaxis=dict(showline=True, color=white, showgrid=False, showticklabels=True, tickvals=df.index, tickformat='%a'), yaxis=dict(showgrid=False, showticklabels=False, gridcolor='rgb(73, 73, 73)', gridwidth=0.5), margin={'l': 0, 'b': 20, 't': 0, 'r': 0}, showlegend=False, legend=dict(x=0.5, y=(- 0.2), xanchor='center', orientation='h', font=dict(size=10, color=white)), hovermode='x', barmode=barmode) clickData = {'points': [{'x': df.index.max(), 'y': df['low'].max()}, {'y': df['medium'].max()}, {'y': df['high'].max()}]} figure = {'data': data, 'layout': layout} return (figure, clickData)
class ThresholdStalenessWeight(StalenessWeight): def __init__(self, **kwargs): init_self_cfg(self, component_class=__class__, config_class=ThresholdStalenessWeightConfig, **kwargs) super().__init__(**kwargs) def _set_defaults_in_cfg(cls, cfg): pass def _raw_weight(self, staleness: int) -> float: if (staleness <= self.cfg.cutoff): return 1.0 else: return self.cfg.value_after_cutoff
() def construct_graph_dead_code(variable_u, variable_v, aliased_variables_x, aliased_variables_y, aliased_variables_z) -> Tuple[(List[BasicBlock], ControlFlowGraph)]: instructions = [Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant()])), Assignment(ListOperation([]), Call(imp_function_symbol('scanf'), [Constant(), UnaryOperation(OperationType.address, [aliased_variables_x[1]]), UnaryOperation(OperationType.address, [aliased_variables_y[1]]), UnaryOperation(OperationType.address, [aliased_variables_z[1]])])), Assignment(aliased_variables_x[2], aliased_variables_x[1]), Branch(Condition(OperationType.less_or_equal, [aliased_variables_x[2], aliased_variables_y[1]])), Assignment(variable_u[1], aliased_variables_z[1]), Branch(Condition(OperationType.greater, [aliased_variables_x[2], variable_u[1]])), Phi(variable_u[2], [aliased_variables_y[1], variable_u[1]]), Assignment(ListOperation([variable_v[3], variable_v[4]]), Call(function_symbol('compute'), [aliased_variables_x[2], variable_u[2]])), Assignment(variable_u[3], aliased_variables_y[1]), Branch(Condition(OperationType.greater_or_equal, [aliased_variables_z[1], variable_u[3]])), Assignment(variable_u[4], aliased_variables_z[1]), Assignment(variable_u[5], variable_u[3]), Phi(variable_u[6], [variable_u[4], variable_u[5]]), Phi(variable_u[7], [variable_u[4], aliased_variables_z[1]]), Phi(variable_u[8], [aliased_variables_x[2], variable_u[6]]), Phi(variable_u[9], [aliased_variables_y[1], variable_u[3]]), Phi(variable_u[10], [variable_u[1], variable_u[7]]), Assignment(ListOperation([]), Call(imp_function_symbol('printf'), [Constant(), aliased_variables_x[2], variable_u[9], variable_u[10], variable_u[8]])), Return([Constant(0)])] nodes = [BasicBlock(i) for i in range(7)] nodes[0].instructions = instructions[0:4] nodes[1].instructions = instructions[4:6] nodes[2].instructions = instructions[6:9] nodes[3].instructions = [instructions[10]] nodes[4].instructions = [instructions[11]] nodes[5].instructions = instructions[12:14] nodes[6].instructions = instructions[14:] instructions[6]._origin_block = {nodes[0]: aliased_variables_y[1], nodes[1]: variable_u[1]} instructions[12]._origin_block = {nodes[3]: variable_u[4], nodes[4]: variable_u[5]} instructions[13]._origin_block = {nodes[3]: variable_u[4], nodes[4]: aliased_variables_z[1]} instructions[14]._origin_block = {nodes[1]: aliased_variables_x[2], nodes[5]: variable_u[6]} instructions[15]._origin_block = {nodes[1]: aliased_variables_y[1], nodes[5]: variable_u[3]} instructions[16]._origin_block = {nodes[1]: variable_u[1], nodes[5]: variable_u[7]} cfg = ControlFlowGraph() cfg.add_edges_from([UnconditionalEdge(nodes[0], nodes[1]), UnconditionalEdge(nodes[0], nodes[2]), UnconditionalEdge(nodes[1], nodes[2]), UnconditionalEdge(nodes[1], nodes[6]), UnconditionalEdge(nodes[2], nodes[3]), UnconditionalEdge(nodes[2], nodes[4]), UnconditionalEdge(nodes[3], nodes[5]), UnconditionalEdge(nodes[4], nodes[5]), UnconditionalEdge(nodes[5], nodes[6])]) return (nodes, cfg)
def filter_disk_id(vm, query_filter, data, default=None): disk_id = data.get('disk_id', default) if (disk_id is not None): try: disk_id = int(disk_id) if (not (disk_id > 0)): raise ValueError if vm: query_filter['disk_id'] = Snapshot.get_disk_id(vm, disk_id) else: query_filter['vm_disk_id'] = (disk_id - 1) except Exception: raise InvalidInput('Invalid disk_id') return query_filter
def capture_responder_args(req, resp, resource, params): resource.captured_req = req resource.captured_resp = resp resource.captured_kwargs = params resource.captured_req_media = None resource.captured_req_body = None num_bytes = req.get_header('capture-req-body-bytes') if num_bytes: resource.captured_req_body = req.stream.read(int(num_bytes)) elif req.get_header('capture-req-media'): resource.captured_req_media = req.get_media()
class TestOptions(BaseOptions): def initialize(self): BaseOptions.initialize(self) self.parser.add_argument('--lambda_kl', type=float, default=0.01, help='weight for KL loss') self.parser.add_argument('--ntest', type=int, default=float('inf'), help='# of test examples.') self.parser.add_argument('--dataset_param_file', type=str, default='./dataset/<classname>/', help='file with dataset specific label mapping.') self.parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') self.parser.add_argument('--load_feat_dir', type=str, default='./results/', help='load pre-computed features from here.') self.parser.add_argument('--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') self.parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') self.parser.add_argument('--which_epoch', type=str, default='latest', help='which epoch to load? set to latest to use latest cached model') self.parser.add_argument('--n_samples', type=int, default=5, help='#samples') self.parser.add_argument('--how_many', type=int, default=50, help='how many test images to run') self.parser.add_argument('--cluster_path', type=str, default='features_clustered_010.npy', help='the path for clustered results of encoded features') self.parser.add_argument('--export_onnx', type=str, help='export ONNX model to a given file') self.parser.add_argument('--engine', type=str, help='run serialized TRT engine') self.parser.add_argument('--onnx', type=str, help='run ONNX model via TRT') self.parser.add_argument('--reference_idx', type=int, help='reference image index where we want a piece from it') self.parser.add_argument('--condition_idx', type=int, help='conditioned image index where we base our outfit on') self.parser.add_argument('--swap_piece', type=int, help='which piece we want to swap out from condiontion_img to reference_img') self.isTrain = False
.unit class TestPhoneNumber(): .parametrize('phone_number', INVALID_PHONE_NUMBER_LIST) def test_invalid_phone_numbers(self, phone_number: str) -> None: with pytest.raises(ValueError): PhoneNumber.validate(phone_number) .parametrize('phone_number', VALID_PHONE_NUMBER_LIST) def test_valid_phone_numbers(self, phone_number: str) -> None: validated_number = PhoneNumber.validate(phone_number) assert (validated_number == phone_number)
class SilhouetteCameo(): def __init__(self, log=sys.stderr, cmdfile=None, inc_queries=False, dry_run=False, progress_cb=None, force_hardware=None): self.leftaligned = False self.log = log self.commands = cmdfile self.inc_queries = inc_queries self.dry_run = dry_run self.progress_cb = progress_cb dev = None self.margins_printed = None if self.dry_run: print('Dry run specified; no commands will be sent to cutter.', file=self.log) for hardware in DEVICE: try: if sys_platform.startswith('win'): print('device lookup under windows not tested. Help adding code!', file=self.log) dev = usb.core.find(idVendor=hardware['vendor_id'], idProduct=hardware['product_id']) elif sys_platform.startswith('darwin'): dev = usb1ctx.openByVendorIDAndProductID(hardware['vendor_id'], hardware['product_id']) else: dev = usb.core.find(idVendor=hardware['vendor_id'], idProduct=hardware['product_id']) except usb.core.NoBackendError: dev = None if dev: self.hardware = hardware break if (dev is None): try: if sys_platform.startswith('win'): print('device fallback under windows not tested. Help adding code!', file=self.log) dev = usb.core.find(idVendor=VENDOR_ID_GRAPHTEC) self.hardware = {'name': 'Unknown Graphtec device'} if dev: self.hardware['name'] += (' 0x%04x' % dev.idProduct) self.hardware['product_id'] = dev.idProduct self.hardware['vendor_id'] = dev.idVendor elif sys_platform.startswith('darwin'): print('device fallback under macosx not implemented. Help adding code!', file=self.log) else: dev = usb.core.find(idVendor=VENDOR_ID_GRAPHTEC) self.hardware = {'name': 'Unknown Graphtec device '} if dev: self.hardware['name'] += (' 0x%04x' % dev.idProduct) self.hardware['product_id'] = dev.idProduct self.hardware['vendor_id'] = dev.idVendor except usb.core.NoBackendError: dev = None if (dev is None): if dry_run: print('No device detected; continuing dry run with dummy device', file=self.log) self.hardware = dict(name='Crashtest Dummy Device') else: msg = '' try: for dev in usb.core.find(find_all=True): msg += ('(%04x,%04x) ' % (dev.idVendor, dev.idProduct)) except NameError: msg += 'unable to list devices on OS X' raise ValueError(('No Graphtec Silhouette devices found.\nCheck USB and Power.\nDevices: ' + msg)) try: dev_bus = dev.bus except: dev_bus = (- 1) try: dev_addr = dev.address except: dev_addr = (- 1) print(('%s found on usb bus=%d addr=%d' % (self.hardware['name'], dev_bus, dev_addr)), file=self.log) if (dev is not None): if sys_platform.startswith('win'): print('device init under windows not implemented. Help adding code!', file=self.log) elif sys_platform.startswith('darwin'): dev.claimInterface(0) else: try: if dev.is_kernel_driver_active(0): print('is_kernel_driver_active(0) returned nonzero', file=self.log) if dev.detach_kernel_driver(0): print('detach_kernel_driver(0) returned nonzero', file=self.log) except usb.core.USBError as e: print('usb.core.USBError:', e, file=self.log) if (e.errno == 13): msg = ('\nIf you are not running as root, this might be a udev issue.\nTry a file /etc/udev/rules.d/99-graphtec-silhouette.rules\nwith the following example syntax:\nSUBSYSTEM=="usb", ATTR{idVendor}=="%04x", ATTR{idProduct}=="%04x", MODE="666"\n\nThen run \'sudo udevadm trigger\' to load this file.\n\nAlternatively, you can add yourself to group \'lp\' and logout/login.' % (self.hardware['vendor_id'], self.hardware['product_id'])) print(msg, file=self.log) print(msg, file=sys.stderr) sys.exit(0) if usb_reset_needed: for i in range(5): try: dev.reset() break except usb.core.USBError as e: print('reset failed: ', e, file=self.log) print('retrying reset in 5 sec', file=self.log) time.sleep(5) try: dev.set_configuration() dev.set_interface_altsetting() except usb.core.USBError: pass for hardware in DEVICE: if (hardware['name'] == force_hardware): print('NOTE: Overriding device from', self.hardware.get('name', 'None'), 'to', hardware['name'], file=self.log) self.hardware = hardware break self.dev = dev self.need_interface = False self.regmark = False if ((self.dev is None) or ('width_mm' in self.hardware)): self.leftaligned = True self.enable_sw_clipping = True self.clip_fuzz = 0.05 self.mock_response = None def __del__(self, *args): if self.commands: self.commands.close() mock_responses = {(CMD_ESC + CMD_ENQ): (RESP_READY + CMD_ETX), (QUERY_FIRMWARE_VERSION + CMD_ETX): (b'None ' + CMD_ETX)} def product_id(self): return (self.hardware['product_id'] if ('product_id' in self.hardware) else None) def write(self, data, is_query=False, timeout=10000): data = to_bytes(data) if (self.commands and ((not is_query) or self.inc_queries)): self.commands.write(data) if (self.dev is None): if (data in SilhouetteCameo.mock_responses): self.mock_response = SilhouetteCameo.mock_responses[data] return None if (self.dry_run and (not is_query)): return None try: resp = self.read(timeout=10) if resp: print(("response before write('%s'): '%s'" % (data, resp)), file=self.log) except: pass endpoint = 1 chunksz = 4096 r = 0 o = 0 msg = '' retry = 0 while (o < len(data)): if o: if self.progress_cb: self.progress_cb(o, len(data), msg) elif self.log: self.log.write((' %d%% %s\r' % (((100.0 * o) / len(data)), msg))) self.log.flush() chunk = data[o:(o + chunksz)] try: if self.need_interface: try: r = self.dev.write(endpoint, chunk, interface=0, timeout=timeout) except AttributeError: r = self.dev.bulkWrite(endpoint, chunk, interface=0, timeout=timeout) else: try: r = self.dev.write(endpoint, chunk, timeout=timeout) except AttributeError: r = self.dev.bulkWrite(endpoint, chunk, timeout=timeout) except TypeError as te: raise TypeError(('Write Exception: %s, %s dev=%s' % (type(te), te, type(self.dev)))) except AttributeError as ae: raise TypeError(('Write Exception: %s, %s dev=%s' % (type(ae), ae, type(self.dev)))) except Exception as e: import errno try: if (e.errno == errno.ETIMEDOUT): time.sleep(1) msg += 't' continue except Exception as ee: msg += 's.dev.write Error: {}'.format(ee) else: if len(msg): msg = '' self.log.write('\n') if ((r == 0) and (retry < 5)): time.sleep(1) retry += 1 msg += 'r' elif (r <= 0): raise ValueError(('write %d bytes failed: r=%d' % (len(chunk), r))) else: retry = 0 o += r if (o != len(data)): raise ValueError(('write all %d bytes failed: o=%d' % (len(data), o))) def safe_write(self, data): data = to_bytes(data) safemaxchunksz = 1024 so = 0 while (so < len(data)): safechunksz = min(safemaxchunksz, (len(data) - so)) candidate = data[so:(so + safechunksz)] safechunk = candidate[0:(candidate.rfind(CMD_ETX) + 1)] self.write(data=safechunk, is_query=False) self.wait_for_ready(timeout=120, poll_interval=0.05) so += len(safechunk) def send_command(self, cmd, is_query=False, timeout=10000): self.write(delimit_commands(cmd), is_query=is_query, timeout=timeout) def safe_send_command(self, cmd): data = delimit_commands(cmd) if (len(data) == 0): return self.safe_write(data) def send_escape(self, esc, is_query=False): self.write((CMD_ESC + esc), is_query=is_query) def read(self, size=64, timeout=5000): endpoint = 130 data = None if (self.dev is None): data = self.mock_response self.mock_response = None if (data is None): return None elif self.need_interface: try: data = self.dev.read(endpoint, size, timeout=timeout, interface=0) except AttributeError: data = self.dev.bulkRead(endpoint, size, timeout=timeout, interface=0) else: try: data = self.dev.read(endpoint, size, timeout=timeout) except AttributeError: data = self.dev.bulkRead(endpoint, size, timeout=timeout) if (data is None): raise ValueError('read failed: none') if isinstance(data, (bytes, bytearray)): return data elif isinstance(data, str): return data.encode() else: try: return data.tobytes() except: return data.tostring().encode() def try_read(self, size=64, timeout=1000): ret = None try: ret = self.read(size=size, timeout=timeout) print(("try_read got: '%s'" % ret)) except: pass return ret def send_receive_command(self, cmd, tx_timeout=10000, rx_timeout=1000): self.send_command(cmd, is_query=True, timeout=tx_timeout) try: resp = self.read(timeout=rx_timeout) if (len(resp) > 1): return resp[:(- 1)].decode() except: pass return None def status(self): self.send_escape(CMD_ENQ, is_query=True) resp = b'None\x03' try: resp = self.read(timeout=5000) except usb.core.USBError as e: print('usb.core.USBError:', e, file=self.log) pass if (resp[(- 1)] != CMD_ETX[0]): raise ValueError(('status response not terminated with 0x03: %s' % resp[(- 1)])) return RESP_DECODING.get(bytes(resp[:(- 1)]), bytes(resp[:(- 1)])) def get_tool_setup(self): if (self.product_id() not in PRODUCT_LINE_CAMEO4): return 'none' self.send_escape(CMD_NAK, is_query=True) try: resp = self.read(timeout=1000) if (len(resp) > 1): return resp[:(- 1)].decode() except: pass return 'none' def wait_for_ready(self, timeout=30, poll_interval=2.0, verbose=False): state = self.status() if self.dry_run: return state npolls = int((timeout / poll_interval)) for i in range(1, npolls): if (state == 'ready'): break if (state == 'None'): raise NotImplementedError('Waiting for ready but no device exists.') if verbose: print((' %d/%d: status=%s\r' % (i, npolls, state)), end='', file=sys.stderr) if (verbose == False): if (state == 'unloaded'): print((' %d/%d: please load media ...\r' % (i, npolls)), end='', file=sys.stderr) elif (i > (npolls / 3)): print((' %d/%d: status=%s\r' % (i, npolls, state)), end='', file=sys.stderr) time.sleep(poll_interval) state = self.status() if verbose: print('', file=sys.stderr) return state def initialize(self): try: self.send_escape(CMD_EOT) except Exception as e: raise ValueError(('Write Exception: %s, %s errno=%s\n\nFailed to write the first 3 bytes. Permissions? inf-wizard?' % (type(e), e, e.errno))) print(("Device Version: '%s'" % self.get_version()), file=self.log) if (self.product_id() in PRODUCT_LINE_CAMEO3_ON): resp = self.send_receive_command('TB71') if resp: print(("TB71: '%s'" % resp), file=self.log) resp = self.send_receive_command('FA') if resp: print(("FA: '%s'" % resp), file=self.log) if (self.product_id() == PRODUCT_ID_SILHOUETTE_CAMEO3): resp = self.send_receive_command('TC') if resp: print(("TC: '%s'" % resp), file=self.log) def get_version(self): return self.send_receive_command(QUERY_FIRMWARE_VERSION, rx_timeout=10000) def set_boundary(self, top, left, bottom, right): self.send_command([('\\%d,%d' % (top, left)), ('Z%d,%d' % (bottom, right))]) def set_cutting_mat(self, cuttingmat, mediawidth, mediaheight): if (self.product_id() not in PRODUCT_LINE_CAMEO3_ON): return mat_command = 'TG' matparms = CAMEO_MATS.get(cuttingmat, ('0', False, False)) self.send_command((mat_command + matparms[0])) self.send_command(['FN0', 'TB50,0']) if matparms[1]: self.set_boundary(0, 0, _inch_2_SU(matparms[1]), _inch_2_SU(matparms[2])) else: bottom = _mm_2_SU((self.hardware['length_mm'] if ('length_mm' in self.hardware) else mediaheight)) right = _mm_2_SU((self.hardware['width_mm'] if ('width_mm' in self.hardware) else mediawidth)) self.set_boundary(0, 0, bottom, right) def setup(self, media=132, speed=None, pressure=None, toolholder=None, pen=None, cuttingmat=None, sharpencorners=False, sharpencorners_start=0.1, sharpencorners_end=0.1, autoblade=False, depth=None, sw_clipping=True, clip_fuzz=0.05, trackenhancing=False, bladediameter=0.9, landscape=False, leftaligned=None, mediawidth=210.0, mediaheight=297.0): if (leftaligned is not None): self.leftaligned = leftaligned self.initialize() self.set_cutting_mat(cuttingmat, mediawidth, mediaheight) if (media is not None): if ((media < 100) or (media > 300)): media = 300 if (self.product_id() not in PRODUCT_LINE_CAMEO3_ON): self.send_command(('FW%d' % media)) if (pen is None): if (media == 113): pen = True else: pen = False for i in MEDIA: if (i[0] == media): print(("Media=%d, cap='%s', name='%s'" % (media, i[4], i[5])), file=self.log) if (pressure is None): pressure = i[1] if (speed is None): speed = i[2] if (depth is None): depth = i[3] break tool = SilhouetteCameoTool(toolholder) if (toolholder is None): toolholder = 1 tool_setup = self.get_tool_setup() if (tool_setup == 'none'): current_tool = None else: current_tool = int(tool_setup.split(',')[(toolholder - 1)]) if (self.product_id() in PRODUCTS_WITH_TWO_TOOLS): self.send_command(tool.select()) print(('toolholder: %d' % toolholder), file=self.log) if (self.product_id() in PRODUCT_LINE_CAMEO4): if (pressure is not None): if (pressure < 1): pressure = 1 if (pressure > 33): pressure = 33 self.send_command(tool.pressure(pressure)) print(('pressure: %d' % pressure), file=self.log) self.send_command(self.acceleration_cmd(0)) if (speed is not None): if (speed < 1): speed = 1 if (speed > 30): speed = 30 self.send_command(tool.speed(speed)) print(('speed: %d' % speed), file=self.log) self.send_command(tool.cutter_offset(0, 0.05)) self.send_command(tool.lift(sharpencorners)) if pen: self.send_command(tool.sharpen_corners(0, 0)) else: sharpencorners_start = int(((sharpencorners_start + 0.05) * 10.0)) sharpencorners_end = int(((sharpencorners_end + 0.05) * 10.0)) self.send_command(tool.sharpen_corners(sharpencorners_start, sharpencorners_end)) if (pressure is not None): if (pressure < 1): pressure = 1 if (pressure > 33): pressure = 33 self.send_command(tool.pressure(pressure)) print(('pressure: %d' % pressure), file=self.log) self.send_command(self.acceleration_cmd(3)) if pen: self.send_command(tool.cutter_offset(0, 0.05)) else: self.send_command(tool.cutter_offset(bladediameter, 0.05)) else: if (speed is not None): if (speed < 1): speed = 1 if (speed > 10): speed = 10 if (self.product_id() == PRODUCT_ID_SILHOUETTE_CAMEO3): self.send_command(tool.speed(speed)) else: self.send_command(('!%d' % speed)) print(('speed: %d' % speed), file=self.log) if (pressure is not None): if (pressure < 1): pressure = 1 if (pressure > 33): pressure = 33 if (self.product_id() == PRODUCT_ID_SILHOUETTE_CAMEO3): self.send_command(tool.pressure(pressure)) else: self.send_command(('FX%d' % pressure)) print(('pressure: %d' % pressure), file=self.log) if (self.product_id() == PRODUCT_ID_SILHOUETTE_CAMEO3): if pen: self.send_command(tool.cutter_offset(0, 0.05)) if self.leftaligned: print('Loaded media is expected left-aligned.', file=self.log) else: print('Loaded media is expected right-aligned.', file=self.log) if (self.product_id() == PRODUCT_ID_SILHOUETTE_CAMEO3): self.send_command(tool.lift(sharpencorners)) if pen: self.send_command(tool.sharpen_corners(0, 0)) else: sharpencorners_start = int(((sharpencorners_start + 0.05) * 10.0)) sharpencorners_end = int(((sharpencorners_end + 0.05) * 10.0)) self.send_command(tool.sharpen_corners(sharpencorners_start, sharpencorners_end)) if (self.product_id() == PRODUCT_ID_SILHOUETTE_CAMEO3): if (not pen): self.send_command([tool.cutter_offset(0, 0.05), tool.cutter_offset(bladediameter, 0.05)]) elif pen: self.send_command('FC0') else: self.send_command(('FC%d' % _mm_2_SU(bladediameter))) if (self.product_id() in PRODUCT_LINE_CAMEO3_ON): if (autoblade and (depth is not None)): if (current_tool not in (None, SILHOUETTE_CAMEO4_TOOL_AUTOBLADE, SILHOUETTE_CAMEO4_TOOL_EMPTY)): print(('Expected the tool to be an AutoBlade, found %s. Not setting depth.' % (current_tool,)), file=self.log) elif (toolholder != 1): print(('AutoBlade depth can only be set for tool holder 1, not %s' % (toolholder,)), file=self.log) else: if (depth < 0): depth = 0 if (depth > 10): depth = 10 self.send_command(tool.depth(depth)) print(('depth: %d' % depth), file=self.log) self.enable_sw_clipping = sw_clipping self.clip_fuzz = clip_fuzz if (trackenhancing is not None): if trackenhancing: self.send_command('FY0') elif (self.product_id() in PRODUCT_LINE_CAMEO3_ON): pass else: self.send_command('FY1') if (self.product_id() in PRODUCT_LINE_CAMEO3_ON): pass else: if (landscape is not None): if landscape: self.send_command(['FN0', 'TB50,1']) else: self.send_command(['FN0', 'TB50,0']) self.send_command('FE0,0') def find_bbox(self, cut): bb = {} for path in cut: for pt in path: _bbox_extend(bb, pt[0], pt[1]) return bb def flip_cut(self, cut): bb = self.find_bbox(cut) new_cut = [] for path in cut: new_path = [] for pt in path: new_path.append((pt[0], ((bb['lly'] + bb['ury']) - pt[1]))) new_cut.append(new_path) return new_cut def mirror_cut(self, cut): bb = self.find_bbox(cut) new_cut = [] for path in cut: new_path = [] for pt in path: new_path.append((((bb['llx'] + bb['urx']) - pt[0]), pt[1])) new_cut.append(new_path) return new_cut def acceleration_cmd(self, acceleration): return ('TJ%d' % acceleration) def move_mm_cmd(self, mmy, mmx): return ('M%d,%d' % (_mm_2_SU(mmy), _mm_2_SU(mmx))) def draw_mm_cmd(self, mmy, mmx): return ('D%d,%d' % (_mm_2_SU(mmy), _mm_2_SU(mmx))) def upper_left_mm_cmd(self, mmy, mmx): return ('\\%d,%d' % (_mm_2_SU(mmy), _mm_2_SU(mmx))) def lower_right_mm_cmd(self, mmy, mmx): return ('Z%d,%d' % (_mm_2_SU(mmy), _mm_2_SU(mmx))) def automatic_regmark_test_mm_cmd(self, height, width, top, left): return ('TB123,%d,%d,%d,%d' % (_mm_2_SU(height), _mm_2_SU(width), _mm_2_SU(top), _mm_2_SU(left))) def manual_regmark_mm_cmd(self, height, width): return ('TB23,%d,%d' % (_mm_2_SU(height), _mm_2_SU(width))) def clip_point(self, x, y, bbox): inside = True if ('clip' not in bbox): return (x, y, inside) if ('count' not in bbox['clip']): bbox['clip']['count'] = 0 if ((bbox['clip']['llx'] - x) > self.clip_fuzz): x = bbox['clip']['llx'] inside = False if ((x - bbox['clip']['urx']) > self.clip_fuzz): x = bbox['clip']['urx'] inside = False if ((bbox['clip']['ury'] - y) > self.clip_fuzz): y = bbox['clip']['ury'] inside = False if ((y - bbox['clip']['lly']) > self.clip_fuzz): y = bbox['clip']['lly'] inside = False if (not inside): bbox['clip']['count'] += 1 return (x, y, inside) def plot_cmds(self, plist, bbox, x_off, y_off): if (bbox is None): bbox = {} bbox['count'] = 0 if (not ('only' in bbox)): bbox['only'] = False if (('clip' in bbox) and ('urx' in bbox['clip'])): flipwidth = bbox['clip']['urx'] if (('clip' in bbox) and ('llx' in bbox['clip'])): x_off += bbox['clip']['llx'] if (('clip' in bbox) and ('ury' in bbox['clip'])): y_off += bbox['clip']['ury'] last_inside = True plotcmds = [] for path in plist: if (len(path) < 2): continue x = (path[0][0] + x_off) y = (path[0][1] + y_off) _bbox_extend(bbox, x, y) bbox['count'] += 1 (x, y, last_inside) = self.clip_point(x, y, bbox) if (bbox['only'] is False): plotcmds.append(self.move_mm_cmd(y, x)) for j in range(1, len(path)): x = (path[j][0] + x_off) y = (path[j][1] + y_off) _bbox_extend(bbox, x, y) bbox['count'] += 1 (x, y, inside) = self.clip_point(x, y, bbox) if (bbox['only'] is False): if ((not self.enable_sw_clipping) or (inside and last_inside)): plotcmds.append(self.draw_mm_cmd(y, x)) else: plotcmds.append(self.move_mm_cmd(y, x)) last_inside = inside return plotcmds def plot(self, mediawidth=210.0, mediaheight=297.0, margintop=None, marginleft=None, pathlist=None, offset=None, bboxonly=False, end_paper_offset=0, endposition='below', regmark=False, regsearch=False, regwidth=180, reglength=230, regoriginx=15.0, regoriginy=20.0): bbox = {} if ((margintop is None) and ('margin_top_mm' in self.hardware)): margintop = self.hardware['margin_top_mm'] if ((marginleft is None) and ('margin_left_mm' in self.hardware)): marginleft = self.hardware['margin_left_mm'] if (margintop is None): margintop = 0 if (marginleft is None): marginleft = 0 if (self.leftaligned and ('width_mm' in self.hardware)): mediawidth = self.hardware['width_mm'] print(('mediabox: (%g,%g)-(%g,%g)' % (marginleft, margintop, mediawidth, mediaheight)), file=self.log) width = mediawidth height = mediaheight top = margintop left = marginleft if (width < left): width = left if (height < top): height = top x_off = left y_off = top if (offset is None): offset = (0, 0) elif ((type(offset) != type([])) and (type(offset) != type(()))): offset = (offset, 0) if regmark: print(('bb regoriginx=%g regoriginy=%g' % (regoriginx, regoriginy)), file=self.log) offset = ((offset[0] - regoriginx), (offset[1] - regoriginy)) height = reglength width = regwidth self.send_command('TB50,0') self.send_command('TB99') self.send_command('TB52,2') self.send_command('TB51,400') self.send_command('TB53,10') self.send_command('TB55,1') if regsearch: self.send_command(self.automatic_regmark_test_mm_cmd(reglength, regwidth, (regoriginy - 10), (regoriginx - 10))) else: self.send_command(self.manual_regmark_mm_cmd(reglength, regwidth)) resp = self.read(timeout=40000) if (resp != b' 0\x03'): raise ValueError(("Couldn't find registration marks. %s" % str(resp))) if (self.product_id() not in PRODUCT_LINE_CAMEO3_ON): self.send_command([self.upper_left_mm_cmd(0, 0), self.lower_right_mm_cmd(height, width), 'L0', 'FE0,0', 'FF0,0,0']) bbox['clip'] = {'urx': width, 'ury': top, 'llx': left, 'lly': height} bbox['only'] = bboxonly cmd_list = self.plot_cmds(pathlist, bbox, offset[0], offset[1]) print(('Final bounding box and point counts: ' + str(bbox)), file=self.log) if (bboxonly == True): cmd_list = [self.move_mm_cmd(bbox['ury'], bbox['llx']), self.draw_mm_cmd(bbox['ury'], bbox['urx']), self.draw_mm_cmd(bbox['lly'], bbox['urx']), self.draw_mm_cmd(bbox['lly'], bbox['llx']), self.draw_mm_cmd(bbox['ury'], bbox['llx'])] self.safe_send_command(cmd_list) if (not ('llx' in bbox)): bbox['llx'] = 0 if (not ('lly' in bbox)): bbox['lly'] = 0 if (not ('urx' in bbox)): bbox['urx'] = 0 if (not ('ury' in bbox)): bbox['ury'] = 0 if (endposition == 'start'): if (self.product_id() in PRODUCT_LINE_CAMEO3_ON): new_home = ['L0', self.upper_left_mm_cmd(0, 0), self.move_mm_cmd(0, 0), 'J0', 'FN0', 'TB50,0'] else: new_home = 'H' else: new_home = [self.move_mm_cmd((bbox['lly'] + end_paper_offset), 0), 'SO0'] self.send_command(new_home) return {'bbox': bbox, 'unit': 1, 'trailer': new_home} def move_origin(self, feed_mm): self.wait_for_ready() self.send_command([self.move_mm_cmd(feed_mm, 0), 'SO0', 'FN0']) self.wait_for_ready() def load_dumpfile(self, file): data1234 = None for line in open(file, 'r').readlines(): if re.match('\\s*\\[', line): exec(('data1234=' + line)) break elif re.match('\\s*<\\s*svg', line): print(line) print('Error: xml/svg file. Please load into inkscape. Use extensions -> export -> sendto silhouette, [x] dump to file') return None else: print(line, end='') return data1234
class Grid(object): def __init__(self, db, tilegrid): self.db = db self.tilegrid = tilegrid self.loc = {} self.tileinfo = {} clock_regions = {} for tile in self.tilegrid: tileinfo = self.tilegrid[tile] grid_loc = GridLoc(tileinfo['grid_x'], tileinfo['grid_y']) assert (grid_loc not in self.loc) self.loc[grid_loc] = tile bits = {} if ('bits' in tileinfo): for k in tileinfo['bits']: segment_type = BlockType(k) base_address = int(tileinfo['bits'][k]['baseaddr'], 0) alias = None if ('alias' in tileinfo['bits'][k]): alias = BitAlias(tile_type=tileinfo['bits'][k]['alias']['type'], start_offset=tileinfo['bits'][k]['alias']['start_offset'], sites=tileinfo['bits'][k]['alias']['sites']) bits[segment_type] = Bits(base_address=base_address, frames=tileinfo['bits'][k]['frames'], offset=tileinfo['bits'][k]['offset'], words=tileinfo['bits'][k]['words'], alias=alias) clock_region = None if ('clock_region' in tileinfo): if (tileinfo['clock_region'] is not None): if (tileinfo['clock_region'] not in clock_regions): m = CLOCK_REGION_RE.fullmatch(tileinfo['clock_region']) assert (m is not None), tileinfo['clock_region'] clock_regions[tileinfo['clock_region']] = ClockRegion(name=tileinfo['clock_region'], x=int(m.group(1)), y=int(m.group(2))) clock_region = clock_regions[tileinfo['clock_region']] self.tileinfo[tile] = GridInfo(bits=bits, sites=tileinfo['sites'], prohibited_sites=tileinfo['prohibited_sites'], tile_type=tileinfo['type'], pin_functions=tileinfo.get('pin_functions', {}), clock_region=clock_region) (x, y) = zip(*self.loc.keys()) self._dims = (min(x), max(x), min(y), max(y)) def tiles(self): return self.tileinfo.keys() def tile_locations(self): return self.loc.keys() def dims(self): return self._dims def is_populated(self, grid_loc): return (grid_loc in self.loc) def loc_of_tilename(self, tilename): tileinfo = self.tilegrid[tilename] return GridLoc(tileinfo['grid_x'], tileinfo['grid_y']) def tilename_at_loc(self, grid_loc): return self.loc[grid_loc] def gridinfo_at_loc(self, grid_loc): return self.tileinfo[self.loc[grid_loc]] def gridinfo_at_tilename(self, tilename): return self.tileinfo[tilename] def iter_all_frames(self): for (tile, tileinfo) in self.tileinfo.items(): for (block_type, bits) in tileinfo.bits.items(): (yield BitsInfo(block_type=block_type, tile=tile, bits=bits)) def get_segment_map(self): return segment_map.SegmentMap(self) def tile_key(self, tilename): gridinfo = self.gridinfo_at_tilename(tilename) loc = self.loc_of_tilename(tilename) tile_type = gridinfo.tile_type return (tile_type, loc.grid_x, (- loc.grid_y)) def get_tile_segbits_at_tilename(self, tilename): gridinfo = self.gridinfo_at_tilename(tilename) any_alias = False for (block_type, bits) in gridinfo.bits.items(): if (bits.alias is not None): any_alias = True if any_alias: return TileSegbitsAlias(self.db, gridinfo.tile_type, gridinfo.bits) else: return self.db.get_tile_segbits(gridinfo.tile_type)
_scopes.system_entry_point def _execute_map_task(inputs, output_prefix, raw_output_data_prefix, max_concurrency, test, resolver: str, resolver_args: List[str], checkpoint_path: Optional[str]=None, prev_checkpoint: Optional[str]=None, dynamic_addl_distro: Optional[str]=None, dynamic_dest_dir: Optional[str]=None, experimental: Optional[bool]=False): if (len(resolver_args) < 1): raise Exception(f'Resolver args cannot be <1, got {resolver_args}') with setup_execution(raw_output_data_prefix, checkpoint_path, prev_checkpoint, dynamic_addl_distro, dynamic_dest_dir) as ctx: task_index = _compute_array_job_index() if experimental: mtr = ArrayNodeMapTaskResolver() else: mtr = MapTaskResolver() output_prefix = os.path.join(output_prefix, str(task_index)) map_task = mtr.load_task(loader_args=resolver_args, max_concurrency=max_concurrency) if test: logger.info(f'Test detected, returning. Inputs: {inputs} Computed task index: {task_index} New output prefix: {output_prefix} Raw output path: {raw_output_data_prefix} Resolver and args: {resolver} {resolver_args}') return _handle_annotated_task(ctx, map_task, inputs, output_prefix)
def test_simple_list_pager(): (app, db, admin) = setup() with app.app_context(): (Model1, _) = create_models(db) class TestModelView(CustomModelView): simple_list_pager = True def get_count_query(self): assert False view = TestModelView(Model1, db.session) admin.add_view(view) (count, data) = view.get_list(0, None, None, None, None) assert (count is None)
def test_compound_singles(): adapter = GenericInputAdapter(input_type='compound', input_shape='single') inp_csv = os.path.abspath(os.path.join(inputs_path, 'compound_singles.csv')) inp_json = os.path.abspath(os.path.join(inputs_path, 'compound_singles.json')) inp_py = compound_singles_input d_csv = [d for d in adapter.adapt_one_by_one(inp_csv)] d_json = [d for d in adapter.adapt_one_by_one(inp_json)] d_py = [d for d in adapter.adapt_one_by_one(inp_py)] assert (d_csv == d_json == d_py)
class ReportingAgencyOverview(models.Model): reporting_agency_overview_id = models.AutoField(primary_key=True) toptier_code = models.TextField() fiscal_year = models.IntegerField() fiscal_period = models.IntegerField() total_dollars_obligated_gtas = models.DecimalField(max_digits=23, decimal_places=2, null=True) total_budgetary_resources = models.DecimalField(max_digits=23, decimal_places=2, null=True) total_diff_approp_ocpa_obligated_amounts = models.DecimalField(max_digits=23, decimal_places=2, null=True) unlinked_procurement_c_awards = models.IntegerField(null=True) unlinked_assistance_c_awards = models.IntegerField(null=True) unlinked_procurement_d_awards = models.IntegerField(null=True) unlinked_assistance_d_awards = models.IntegerField(null=True) linked_procurement_awards = models.IntegerField(null=True) linked_assistance_awards = models.IntegerField(null=True) class Meta(): db_table = 'reporting_agency_overview' indexes = [models.Index(fields=['fiscal_year', 'fiscal_period', 'toptier_code'], name='reporting_agency_ovr_group_idx')]
class Return_Statement(Simple_Statement): def __init__(self, t_kw): super().__init__() assert isinstance(t_kw, MATLAB_Token) assert ((t_kw.kind == 'KEYWORD') and (t_kw.value == 'return')) self.t_kw = t_kw self.t_kw.set_ast(self) def loc(self): return self.t_kw.location
class Command(BaseCommand): def add_arguments(self, parser: argparse.ArgumentParser) -> None: parser.add_argument('script', type=str, help='path to a Python script') parser.add_argument('-i', '--icon', dest='icon', help='path to an icon file (.ico, .png, .icns)') parser.add_argument('-n', '--name', dest='name', help='name for the generated executable (Windows) or app bundle (macOS)') parser.add_argument('-D', '--onedir', dest='onedir', action='store_true', default=False, help='create a one-folder bundle containing an executable (Windows)') parser.add_argument('--distpath', dest='distpath', help='where to put the bundled app (default: ./dist)') parser.add_argument('--add-data', dest='add_data', action='append', nargs='*', help='additional non-binary files or folders to be added to the executable') parser.add_argument('--add-binary', dest='add_binary', action='append', nargs='*', help='additional binary files to be added to the executable') parser.add_argument('--hidden-import', dest='hidden_import', action='append', nargs='*', help='add an import not visible in the code of the script(s)') parser.add_argument('--product-name', dest='product_name', help='executable product name (Windows) or bundle name (macOS)') parser.add_argument('--file-description', dest='file_description', help='executable file description (Windows)') parser.add_argument('--product-version', dest='product_version', help='executable product version (Windows) or bundle version (macOS)') parser.add_argument('--file-version', dest='file_version', help='executable file version, n.n.n.n (Windows)') parser.add_argument('--company-name', dest='company_name', help='executable company name (Windows)') parser.add_argument('--copyright', dest='copyright', help='executable (Windows) or bundle (macOS) copyright') parser.add_argument('--codesign-identity', dest='codesign_identity', help='Code signing identity (macOS)') parser.add_argument('--bundle-id', dest='bundle_id', help='bundle identifier (macOS)') parser.add_argument('--debug-console', dest='debug_console', help='Show python console (Ensure correct DEBUG level)') parser.add_argument('--uac-admin', dest='uac_admin', default=False, action='store_true', help='Using this option creates a Manifest that will request elevation upon application start.(Windows)') def handle(self, options: argparse.Namespace) -> None: build_dir = os.path.join(os.getcwd(), 'build') if os.path.exists(build_dir): shutil.rmtree(build_dir, ignore_errors=True) dist_dir = os.path.join(os.getcwd(), 'dist') if os.path.exists(dist_dir): shutil.rmtree(dist_dir, ignore_errors=True) try: import PyInstaller.__main__ from flet.__pyinstaller.utils import copy_flet_bin pyi_args = [options.script, '--noconfirm'] if (not options.debug_console): pyi_args.extend(['--noconsole']) if options.icon: pyi_args.extend(['--icon', options.icon]) if options.name: pyi_args.extend(['--name', options.name]) if options.distpath: pyi_args.extend(['--distpath', options.distpath]) if options.add_data: for add_data_arr in options.add_data: for add_data_item in add_data_arr: pyi_args.extend(['--add-data', add_data_item]) if options.add_binary: for add_binary_arr in options.add_binary: for add_binary_item in add_binary_arr: pyi_args.extend(['--add-binary', add_binary_item]) if options.hidden_import: for hidden_import_arr in options.hidden_import: for hidden_import_item in hidden_import_arr: pyi_args.extend(['--hidden-import', hidden_import_item]) if options.codesign_identity: pyi_args.extend(['--codesign-identity', options.codesign_identity]) if options.bundle_id: pyi_args.extend(['--osx-bundle-identifier', options.bundle_id]) if options.uac_admin: if is_macos(): print('--uac-admin options is not supported on macOS.') sys.exit(1) pyi_args.append('--uac-admin') if options.onedir: if is_macos(): print('--onedir options is not supported on macOS.') sys.exit(1) pyi_args.append('--onedir') else: pyi_args.append('--onefile') hook_config.temp_bin_dir = copy_flet_bin() if (hook_config.temp_bin_dir is not None): fletd_path = os.path.join(hook_config.temp_bin_dir, ('fletd.exe' if is_windows() else 'fletd')) if os.path.exists(fletd_path): os.remove(fletd_path) if is_windows(): from flet.__pyinstaller.win_utils import update_flet_view_icon, update_flet_view_version_info exe_path = os.path.join(hook_config.temp_bin_dir, 'flet', 'flet.exe') if os.path.exists(exe_path): if options.icon: icon_path = options.icon if (not Path(icon_path).is_absolute()): icon_path = str(Path(os.getcwd()).joinpath(icon_path)) update_flet_view_icon(exe_path, icon_path) version_info_path = update_flet_view_version_info(exe_path=exe_path, product_name=options.product_name, file_description=options.file_description, product_version=options.product_version, file_version=options.file_version, company_name=options.company_name, copyright=options.copyright) pyi_args.extend(['--version-file', version_info_path]) elif is_macos(): from flet.__pyinstaller.macos_utils import assemble_app_bundle, unpack_app_bundle, update_flet_view_icon, update_flet_view_version_info tar_path = os.path.join(hook_config.temp_bin_dir, 'flet-macos-amd64.tar.gz') if os.path.exists(tar_path): app_path = unpack_app_bundle(tar_path) if options.icon: icon_path = options.icon if (not Path(icon_path).is_absolute()): icon_path = str(Path(os.getcwd()).joinpath(icon_path)) update_flet_view_icon(app_path, icon_path) app_path = update_flet_view_version_info(app_path=app_path, bundle_id=options.bundle_id, product_name=options.product_name, product_version=options.product_version, copyright=options.copyright) assemble_app_bundle(app_path, tar_path) print('Running PyInstaller:', pyi_args) PyInstaller.__main__.run(pyi_args) if ((hook_config.temp_bin_dir is not None) and os.path.exists(hook_config.temp_bin_dir)): print('Deleting temp directory:', hook_config.temp_bin_dir) shutil.rmtree(hook_config.temp_bin_dir, ignore_errors=True) except ImportError as e: print('Please install PyInstaller module to use flet pack command:', e) sys.exit(1)
def extractUglytransWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def _max_intra_day_drawdown(high, low): running_max = pd.Series(high).expanding(min_periods=1).max() cur_dd = (((low - running_max) / running_max) * 100) dd_max = min(0, cur_dd.min()) idx = cur_dd.idxmin() dd = pd.Series(dtype='object') dd['max'] = dd_max dd['peak'] = running_max[idx] dd['trough'] = low[idx] dd['peak_date'] = high[(high == dd['peak'])].index[0].strftime('%Y-%m-%d') dd['trough_date'] = idx.strftime('%Y-%m-%d') high = high[(high.index > idx)] rd_mask = (high > dd['peak']) if rd_mask.any(): dd['recovery_date'] = high[rd_mask].index[0].strftime('%Y-%m-%d') else: dd['recovery_date'] = 'Not Recovered Yet' return dd
def test_spancat_serde(noop_config): config = Config().from_str(noop_config) del config['components']['llm']['task']['labels'] nlp1 = assemble_from_config(config) nlp2 = assemble_from_config(config) labels = {'loc': 'LOC', 'per': 'PER'} task1: SpanCatTask = nlp1.get_pipe('llm')._task task2: SpanCatTask = nlp2.get_pipe('llm')._task task1._label_dict = labels assert (task1._label_dict == labels) assert (task2._label_dict == dict()) b = nlp1.to_bytes() nlp2.from_bytes(b) assert (task1._label_dict == task2._label_dict == labels)
class SearchMixin(object): context_object_name = 'search_results' query_param = 'q' def get_query_param(self): return self.query_param models = () def get_models(self): return self.models exclude = () def get_exclude(self): return self.exclude def get_queryset(self): return watson.search(self.query, models=self.get_models(), exclude=self.get_exclude()) def get_query(self, request): return request.GET.get(self.get_query_param(), '').strip() empty_query_redirect = None def get_empty_query_redirect(self): return self.empty_query_redirect extra_context = {} def get_extra_context(self): return self.extra_context def get_context_data(self, **kwargs): context = super(SearchMixin, self).get_context_data(**kwargs) context['query'] = self.query for (key, value) in self.get_extra_context().items(): if callable(value): value = value() context[key] = value return context def get(self, request, *args, **kwargs): self.query = self.get_query(request) if (not self.query): empty_query_redirect = self.get_empty_query_redirect() if empty_query_redirect: return redirect(empty_query_redirect) return super(SearchMixin, self).get(request, *args, **kwargs)
class ChartJsOptScale(DataAttrs): def ticks(self) -> ChartJsOptTicks: if ('ticks' not in self._attrs): self._attrs['ticks'] = ChartJsOptTicks(self.page) return self._attrs['ticks'] def gridLines(self) -> ChartJsOptGridLines: if ('gridLines' not in self._attrs): self._attrs['gridLines'] = ChartJsOptGridLines(self.page) return self._attrs['gridLines'] def stacked(self, flag: Union[(bool, primitives.JsDataModel)]=False): pass
def test_unversioned_files_are_deleted(_compressed_manifest_storage): name = 'styles.css' versioned_url = staticfiles_storage.url(name) versioned_name = basename(versioned_url) name_pattern = re.compile((('^' + name.replace('.', '\\.([0-9a-f]+\\.)?')) + '$')) remaining_files = [f for f in os.listdir(settings.STATIC_ROOT) if name_pattern.match(f)] assert ([versioned_name] == remaining_files)
def ac_connect(reversi, dom, id): if reversi.layoutFlag: dom.disableElement('EnhancedLayout') dom.inner('', open('Main.html').read()) if (id and (not lock(id))): dom.alert('Game has already two players!\nReverting to single player game.') id = '' if id: bw = (core.EMPTY if (get_turn(id) == core.EMPTY) else core.WHITE) reversi.init(token=id, board=get_board(id), bw=bw) set_status(dom, ("Play or wait for the opponent's move." if (bw == core.EMPTY) else "It's your turn!"), 'green') dom.disableElement('HideHHStatusSection') else: reversi.init(level=int(dom.getValue('level')), bw=core.BLACK) draw_board(reversi, dom)
def test_select_rarity(monkeypatch: MonkeyPatch): inputs = ['1'] monkeypatch.setattr('builtins.input', (lambda : inputs.pop(0))) save_stats = {'version': 'en'} ids = cat_id_selector.select_cats_rarity(save_stats) actual_ids = [0, 1, 2, 3, 4, 5, 6, 7, 8, 643] assert (ids == actual_ids)
def test_encode_examples() -> None: examples = [('input', 'output'), ('input2', 'output2')] assert (encode_examples(examples, JSONEncoder(use_tags=True), None) == [('input', '<json>"output"</json>'), ('input2', '<json>"output2"</json>')]) assert (encode_examples(examples, NoOpEncoder(), input_formatter=None) == [('input', 'output'), ('input2', 'output2')]) assert (encode_examples(examples, NoOpEncoder(), input_formatter='text_prefix') == [('Text: """\ninput\n"""', 'output'), ('Text: """\ninput2\n"""', 'output2')]) assert (encode_examples(examples, NoOpEncoder(), input_formatter='triple_quotes') == [('"""\ninput\n"""', 'output'), ('"""\ninput2\n"""', 'output2')])
class TestRuleExtractorUtil(unittest.TestCase): def build_workflow_dict_1(): workflow_dict = {} for i in range(3): with Workflow(name='workflow_'.format((i + 1)), namespace='namespace') as workflow: o1 = Operator(name='op_1') o2 = Operator(name='op_2') o1.action_on_condition(action=TaskAction.START, condition=Condition(expect_event_keys=['event_1', 'event_2_{}'.format(i)])) o2.action_on_condition(action=TaskAction.START, condition=Condition(expect_event_keys=['event_3_{}'.format(i), 'event_4'])) workflow_dict[(i + 1)] = workflow return workflow_dict def test_parse_expect_keys(self): with Workflow(name='workflow') as workflow: o1 = Operator(name='op') o1.action_on_condition(action=TaskAction.START, condition=Condition(expect_event_keys=['event_1', 'event_2', 'event_2'])) expect_keys = workflow_expect_event_tuples(workflow=workflow) self.assertEqual(2, len(expect_keys)) def test_build_task_rule_index(self): workflow_dict = self.build_workflow_dict_1() task_rule_index = build_task_rule_index(workflow_dict=workflow_dict) self.assertEqual(1, len(task_rule_index[('namespace', 'event_2_0')])) self.assertEqual(3, len(task_rule_index[('namespace', 'event_1')])) self.assertEqual(3, len(task_rule_index[('namespace', 'event_4')]))
class GroupAddInvalidID(GroupTest): def runTest(self): (port1,) = openflow_ports(1) msg = ofp.message.group_add(group_type=ofp.OFPGT_ALL, group_id=ofp.OFPG_ALL, buckets=[create_bucket(actions=[ofp.action.output(port1)])]) (response, _) = self.controller.transact(msg) self.assertIsInstance(response, ofp.message.group_mod_failed_error_msg) self.assertEquals(response.code, ofp.OFPGMFC_INVALID_GROUP)
def get_data_files(): base_dir = '/usr/local/fb-FioSynthFlash' data_files = [(base_dir, ['CODE_OF_CONDUCT.md']), (base_dir, ['CONTRIBUTING.md']), (base_dir, ['LICENSE']), (base_dir, ['README.md']), (base_dir, ['README-READHAMMER.txt']), (base_dir, ['Release_Notes.txt']), (os.path.join(base_dir, 'jobfiles'), glob.glob('jobfiles/*')), (os.path.join(base_dir, 'wkldsuites'), glob.glob('wkldsuites/*')), (os.path.join(base_dir, 'fiosynth_lib'), glob.glob('fiosynth_lib/*'))] return data_files
def change_ext(path: str, old_ext: Optional[str], new_ext: str) -> str: if (old_ext is None): old_ext = os.path.splitext(path)[1] if (old_ext == '.gz'): path = path[:(- len(old_ext))] old_ext = os.path.splitext(path)[1] if (old_ext and path.endswith(old_ext)): return (path[:(- len(old_ext))] + new_ext) return (path + new_ext)
class MirrorConnection(): def __init__(self, mirror, source): self.mirror = mirror self.source = source def get_file(self, create, args): if self.resource(): LOG.debug(f'Found a copy of {self.source} in mirror {self.mirror}: {self.resource()}.') return self.resource() if (not self.mirror._prefetch): LOG.debug(f'No copy of {self.source} into {self.mirror}: prefetch=False.') return None LOG.info(f'Building mirror for {self.source} in mirror {self.mirror}.') return self.create_copy(create, args) def resource(self): LOG.info(f'Not implemented. {self.source} not in mirror {self.mirror}.') return None def create_copy(self, create, args): LOG.info(f'Not implemented. Not creating anything for {self.source} in mirror {self.mirror}.') return None
def test_cli_call_change_reload_run() -> None: runner = CliRunner() with mock.patch.object(Config, 'bind_socket') as mock_bind_socket: with mock.patch.object(ChangeReload, 'run') as mock_run: result = runner.invoke(cli, ['tests.test_cli:App', '--reload']) assert (result.exit_code == 0) mock_bind_socket.assert_called_once() mock_run.assert_called_once()
.parametrize('examples_path', [str((EXAMPLES_DIR / 'lemma.json')), str((EXAMPLES_DIR / 'lemma.yml')), str((EXAMPLES_DIR / 'lemma.jsonl'))]) def test_jinja_template_rendering_with_examples(examples_path): nlp = spacy.blank('en') text = 'Alice and Bob went to the supermarket.' doc = nlp.make_doc(text) lemma_task = make_lemma_task(examples=fewshot_reader(examples_path)) prompt = list(lemma_task.generate_prompts([doc]))[0] assert (prompt.strip() == f""" You are an expert lemmatization system. Your task is to accept Text as input and identify the lemma for every token in the Text. Consider that contractions represent multiple words. Each word in a contraction should be annotated with its lemma separately. Output each original word on a new line, followed by a colon and the word's lemma - like this: ''' Word1: Lemma of Word1 Word2: Lemma of Word2 ''' Include the final punctuation token in this list. Prefix with your output with "Lemmatized text". Below are some examples (only use these as a guide): Text: ''' The arc of the moral universe is long, but it bends toward justice. ''' Lemmas: ''' The: The arc: arc of: of the: the moral: moral universe: universe is: be long: long ,: , but: but it: it bends: bend toward: toward justice: justice .: . ''' Text: ''' Life can only be understood backwards; but it must be lived forwards. ''' Lemmas: ''' Life: Life can: can only: only be: be understood: understand backwards: backwards ;: ; but: but it: it must: must be: be lived: lived forwards: forwards .: . ''' Text: ''' I'm buying ice cream. ''' Lemmas: ''' I: I 'm: be buying: buy ice: ice cream: cream .: . ''' Here is the text that needs to be lemmatized: ''' {text} ''' """.strip())
class CellRendererToggleImage(Gtk.CellRendererToggle): __gproperties__ = {'icon-name': (GObject.TYPE_STRING, 'icon name', 'The name of the themed icon to display. This property only has an effect if not overridden the "pixbuf" property.', '', GObject.ParamFlags.READWRITE), 'pixbuf': (GdkPixbuf.Pixbuf, 'pixbuf', 'The pixbuf to render.', GObject.ParamFlags.READWRITE), 'icon-size': (GObject.TYPE_UINT, 'icon size', 'The size of the rendered icon.', 0, 65535, Gtk.IconSize.SMALL_TOOLBAR, GObject.ParamFlags.READWRITE), 'render-prelit': (GObject.TYPE_BOOLEAN, 'render prelit', 'Whether to render prelit states or not', True, GObject.ParamFlags.READWRITE)} def __init__(self): Gtk.CellRendererToggle.__init__(self) self.__icon_name = '' self.__pixbuf = None self.__insensitive_pixbuf = None self.__prelit_pixbuf = None self.__pixbuf_width = 0 self.__pixbuf_height = 0 self.__icon_size = Gtk.IconSize.SMALL_TOOLBAR self.__render_prelit = True self.__render_widget = Gtk.Button() self.__icon_theme = Gtk.IconTheme.get_default() self.set_property('activatable', True) def do_get_property(self, property): if (property.name == 'icon-name'): return self.__icon_name elif (property.name == 'icon-size'): return self.__icon_size elif (property.name == 'pixbuf'): return self.__pixbuf elif (property.name == 'render-prelit'): return self.__render_prelit else: raise AttributeError(('unknown property %s' % property.name)) def do_set_property(self, property, value): if (property.name == 'icon-name'): self.__icon_name = value elif (property.name == 'icon-size'): self.__icon_size = value elif (property.name == 'pixbuf'): self.__pixbuf = value elif (property.name == 'render-prelit'): self.__render_prelit = value else: raise AttributeError(('unknown property %s' % property.name)) self.__render_pixbufs() def __render_pixbufs(self): if (self.__pixbuf is None): pixbuf = icons.MANAGER.pixbuf_from_icon_name(self.__icon_name, self.__icon_size) if (pixbuf is None): return self.__pixbuf = pixbuf self.__pixbuf_height = self.__pixbuf.get_height() self.__pixbuf_width = self.__pixbuf.get_width() self.__insensitive_pixbuf = self.__pixbuf.copy() self.__pixbuf.saturate_and_pixelate(dest=self.__insensitive_pixbuf, saturation=1, pixelate=True) if self.__render_prelit: self.__prelit_pixbuf = self.__pixbuf.copy() self.__pixbuf.saturate_and_pixelate(dest=self.__prelit_pixbuf, saturation=0, pixelate=False) def do_render(self, cairo_context, widget, background_area, cell_area, flags): if (self.__pixbuf is None): self.__render_pixbufs() pixbuf = None prelit = (flags & Gtk.CellRendererState.PRELIT) if self.props.sensitive: if self.props.active: pixbuf = self.__pixbuf elif (self.__render_prelit and prelit): pixbuf = self.__prelit_pixbuf elif self.props.active: pixbuf = self.__insensitive_pixbuf if (pixbuf is not None): (area_x, area_y, area_width, area_height) = (cell_area.x, cell_area.y, cell_area.width, cell_area.height) x = ((area_x + (area_width * self.props.xalign)) - (self.__pixbuf_width // 2)) y = ((area_y + (area_height * self.props.yalign)) - (self.__pixbuf_height // 2)) Gdk.cairo_set_source_pixbuf(cairo_context, pixbuf, x, y) cairo_context.paint()
class AcquiredLotCandidates(): def __init__(self, accounting_method: 'AbstractAccountingMethod', acquired_lot_list: List[InTransaction], acquired_lot_2_partial_amount: Dict[(InTransaction, RP2Decimal)], up_to_index: int) -> None: self.__accounting_method: AbstractAccountingMethod = accounting_method self.__acquired_lot_list = acquired_lot_list self.__acquired_lot_2_partial_amount = acquired_lot_2_partial_amount self.__up_to_index = up_to_index def has_partial_amount(self, acquired_lot: InTransaction) -> bool: return (acquired_lot in self.__acquired_lot_2_partial_amount) def get_partial_amount(self, acquired_lot: InTransaction) -> RP2Decimal: if (not self.has_partial_amount(acquired_lot)): raise RP2RuntimeError(f'Internal error: acquired lot has no partial amount: {acquired_lot}') return self.__acquired_lot_2_partial_amount[acquired_lot] def set_partial_amount(self, acquired_lot: InTransaction, amount: RP2Decimal) -> None: self.__acquired_lot_2_partial_amount[acquired_lot] = amount def clear_partial_amount(self, acquired_lot: InTransaction) -> None: self.set_partial_amount(acquired_lot, ZERO) def __iter__(self) -> 'AccountingMethodIterator': return AccountingMethodIterator(self.__acquired_lot_list, self.__up_to_index, self.__accounting_method.lot_candidates_order())
_whoosheer class CoprWhoosheer(AbstractWhoosheer): schema = whoosh.fields.Schema(copr_id=whoosh.fields.NUMERIC(stored=True, unique=True), user_id=whoosh.fields.NUMERIC(stored=True), group_id=whoosh.fields.NUMERIC(stored=True), ownername=whoosh.fields.TEXT(analyzer=whoosh.analysis.StandardAnalyzer(expression='?\\w+(-\\.?\\w+)*'), field_boost=2), coprname=whoosh.fields.TEXT(analyzer=whoosh.analysis.StandardAnalyzer(expression='\\w+(-\\.?\\w+)*'), field_boost=3), chroots=whoosh.fields.TEXT(field_boost=2), packages=whoosh.fields.IDLIST(field_boost=2), description=whoosh.fields.TEXT(), instructions=whoosh.fields.TEXT()) models = [models.Copr, models.Package] auto_update = False def update_copr(cls, writer, copr): writer.update_document(copr_id=copr.id, user_id=copr.user.id, group_id=(copr.group.id if copr.group else None), ownername=copr.owner_name, coprname=copr.name, chroots=cls.get_chroot_info(copr), packages=cls.get_package_names(copr), description=copr.description, instructions=copr.instructions) def update_package(cls, writer, package): writer.update_document(copr_id=package.copr.id, packages=cls.get_package_names(package.copr)) def insert_copr(cls, writer, copr): writer.add_document(copr_id=copr.id, user_id=copr.user.id, group_id=(copr.group.id if copr.group else None), ownername=copr.owner_name, coprname=copr.name, chroots=cls.get_chroot_info(copr), packages=' '.join(cls.get_package_names(copr)), description=copr.description, instructions=copr.instructions) def insert_package(cls, writer, package): writer.update_document(copr_id=package.copr.id, packages=cls.get_package_names(package.copr)) def delete_copr(cls, writer, copr): writer.delete_by_term('copr_id', copr.id) def delete_package(cls, writer, package): writer.update_document(copr_id=package.copr.id, packages=cls.get_package_names(package.copr)) def get_chroot_info(cls, copr): result = db.engine.execute('\n SELECT os_release, os_version, arch\n FROM mock_chroot\n JOIN copr_chroot ON copr_chroot.mock_chroot_id=mock_chroot.id\n WHERE copr_chroot.copr_id={0}\n '.format(copr.id)) return ['{}-{}-{}'.format(t[0], t[1], t[2]) for t in result.fetchall()] def get_package_names(cls, copr): result = db.engine.execute('\n SELECT name\n FROM package\n WHERE copr_id={0}\n '.format(copr.id)) return [row[0] for row in result.fetchall()] def on_commit(cls, app, changes): for change in changes: if (change[0].__class__ in cls.models): copr_id = change[0].get_search_related_copr_id() db.engine.execute('\n UPDATE copr SET latest_indexed_data_update = {0}\n WHERE copr.id = {1}\n '.format(int(time.time()), copr_id))
def run(split: str, experiment: str, cfg: Optional[DictConfig]=None, sequential: bool=False, thresholds: Tuple[int]=(1, 3, 5), **kwargs): cfg = (cfg or {}) if isinstance(cfg, dict): cfg = OmegaConf.create(cfg) default = (default_cfg_sequential if sequential else default_cfg_single) cfg = OmegaConf.merge(default, cfg) dataset = KittiDataModule(cfg.get('data', {})) metrics = evaluate(experiment, cfg, dataset, split=split, sequential=sequential, viz_kwargs=dict(show_dir_error=True, show_masked_prob=False), **kwargs) keys = ['directional_error', 'yaw_max_error'] if sequential: keys += ['directional_seq_error', 'yaw_seq_error'] for k in keys: rec = metrics[k].recall(thresholds).double().numpy().round(2).tolist() logger.info('Recall %s: %s at %s m/', k, rec, thresholds) return metrics
class _Parser(object): def __init__(self, lexer, handler): self.lexer = lexer self.handler = handler def parse(self): result = None if self.lexer.peek_token(tokens.LParen): result = self.parse_node() else: result = self.parse_leaf() remaining = self.lexer.remaining() if ((remaining != '') and (not self.lexer.peek_token(tokens.SemiColon))): raise ParserError(('Unexpected token following tree: ' + self.lexer.remaining())) return result def parse_node(self): self.lexer.read_token(tokens.LParen) self.handler.new_tree_begin() self.parse_edge_list() self.handler.new_tree_end() self.lexer.read_token(tokens.RParen) def parse_leaf(self): if (self.lexer.peek_token(tokens.Comma) or self.lexer.peek_token(tokens.RParen)): self.handler.new_leaf('') return if self.lexer.peek_token(tokens.Number): identifier = str(int(self.lexer.read_token(tokens.Number).get_number())) self.handler.new_leaf(identifier) return identifier = self.lexer.read_token(tokens.ID).get_name() if (identifier == '_'): self.handler.new_leaf('') else: self.handler.new_leaf(identifier) def parse_edge_list(self): while 1: self.parse_edge() if self.lexer.peek_token(tokens.Comma): self.lexer.read_token(tokens.Comma) else: break def parse_edge(self): if self.lexer.peek_token(tokens.LParen): self.parse_node() else: self.parse_leaf() if self.lexer.peek_token(tokens.Number): bootstrap = self.lexer.read_token(tokens.Number).get_number() else: bootstrap = None if self.lexer.peek_token(tokens.Colon): self.lexer.read_token(tokens.Colon) length = self.lexer.read_token(tokens.Number).get_number() else: length = None self.handler.new_edge(bootstrap, length)
def world_domination_check(): try: global debug global output_directory global output_file gtld_domains = ['biz', 'info', 'net', 'com', 'org', 'edu', 'gov', 'me', 'tv', 'name'] tld_domains = ['edu', 'gov', 'mil', 'net', 'org', 'ag', 'co', 'go'] cc_domains = ['.AC', '.AD', '.AE', '.AERO', '.AF', '.AG', '.AI', '.AL', '.AM', '.AN', '.AO', '.AQ', '.AR', '.ARPA', '.AS', '.ASIA', '.AT', '.AU', '.au', '.AW', '.AX', '.AZ', '.BA', '.BB', '.BD', '.BE', '.BF', '.BG', '.BH', '.BI', '.BIZ', '.BJ', '.BL', '.BM', '.BN', '.BO', '.BQ', '.BR', '.BS', '.BT', '.BV', '.BW', '.BY', '.BZ', '.CA', '.CAT', '.CC', '.CD', '.CF', '.CG', '.CH', '.CI', '.CK', '.CL', '.CM', '.CN', '.CO', '.CO', '.COM', '.COOP', '.CR', '.CU', '.CV', '.CW', '.CX', '.CY', '.CZ', '.DE', '.DJ', '.DK', '.DM', '.DO', '.DZ', '.EC', '.EDU', '.EE', '.EG', '.EH', '.ER', '.ES', '.ET', '.EU', '.FE', '.FI', '.FJ', '.FK', '.FM', '.FO', '.FR', '.GA', '.GB', '.GD', '.GE', '.GF', '.GG', '.GH', '.GI', '.GL', '.GM', '.GN', '.GP', '.GQ', '.GR', '.GS', '.GT', '.GU', '.GW', '.GY', '.HK', '.HM', '.HN', '.HR', '.HT', '.HU', '.ID', '.IE', '.IL', '.IM', '.IN', '.INFO', '.INT', '.IO', '.IQ', '.IR', '.IS', '.IT', '.JE', '.JM', '.JO', '.JOBS', '.JP', '.KE', '.KG', '.KH', '.KI', '.KM', '.KN', '.KP', '.KR', '.KW', '.KY', '.KZ', '.LA', '.LB', '.LC', '.LI', '.LK', '.LR', '.LS', '.LT', '.LU', '.LV', '.LY', '.MA', '.MC', '.MD', '.ME', '.MF', '.MG', '.MH', '.MIL', '.MK', '.ML', '.MM', '.MN', '.MO', '.MOBI', '.MP', '.MQ', '.MR', '.MS', '.MT', '.MU', '.MUSEUM', '.MV', '.MW', '.MX', '.MY', '.MZ', '.NA', '.NAME', '.NC', '.NE', '.NET', '.NF', '.NG', '.NI', '.NL', '.NO', '.NP', '.NR', '.NU', '.NZ', '.OM', '.ORG', '.PA', '.PE', '.PF', '.PG', '.PH', '.PK', '.PL', '.PM', '.PN', '.PR', '.PRO', '.PS', '.PT', '.PW', '.PY', '.QA', '.RE', '.RO', '.RS', '.RU', '.RW', '.SA', '.SB', '.SC', '.SD', '.SE', '.SG', '.SH', '.SI', '.SJ', '.SK', '.SL', '.SM', '.SN', '.SO', '.SR', '.ST', '.SU', '.SV', '.SX', '.SY', '.SZ', '.TC', '.TD', '.TEL', '.TF', '.TG', '.TH', '.TJ', '.TK', '.TL', '.TM', '.TN', '.TO', '.TP', '.TR', '.TRAVEL', '.TT', '.TV', '.TW', '.TZ', '.UA', '.UG', '.UK', '.UM', '.US', '.UY', '.UZ', '.VA', '.VC', '.VE', '.VG', '.VI', '.VN', '.VU', '.WF', '.WS', '.YE', '.YT', '.ZA', '.ZM', '.ZW'] for tld in tld_domains: for cc in cc_domains: output_directory = (tld + cc) output_file = ((tld + cc) + '.txt') domain = (tld + cc) analyze_domain(domain) for gtld in gtld_domains: output_directory = gtld output_file = (gtld + '.txt') domain = gtld analyze_domain(domain) except Exception as inst: print(type(inst)) print(inst.args) print(inst) (x, y) = inst print('x =', x) print('y =', y)
def extractEnlightendragonsCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Drezo Regalia', 'Drezo Regalia', 'oel'), ('Black Moon', 'Black Moon', 'oel'), ('Mind Linkers', 'Mind Linkers', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) titlemap = [('Ophidian Aspect Chapter ', 'Ophidian Aspect', 'oel')] for (titlecomponent, name, tl_type) in titlemap: if (titlecomponent.lower() in item['title'].lower()): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_set_default_connection_and_routing(): builder = AEABuilder() builder._package_dependency_manager = Mock() good_connection = ComponentId('connection', PublicId.from_str('good/connection:0.1.0')) bad_connection = ComponentId('connection', PublicId.from_str('bad/connection:0.1.0')) good_protocol = ComponentId('protocol', PublicId.from_str('good/protocol:0.1.0')) bad_protocol = ComponentId('protocol', PublicId.from_str('bad/protocol:0.1.0')) builder._package_dependency_manager.connections = [good_connection] builder._package_dependency_manager.protocols = [good_protocol] builder.set_default_connection(public_id=good_connection.public_id) with pytest.raises(ValueError, match='Connection bad/connection:0.1.0 specified as `default_connection` is not a project dependency!'): builder.set_default_connection(public_id=bad_connection.public_id) builder.set_default_routing({good_protocol.public_id: good_connection.public_id}) with pytest.raises(ValueError, match='Connection bad/connection:0.1.0 specified in `default_routing` is not a project dependency!'): builder.set_default_routing({good_protocol.public_id: bad_connection.public_id}) with pytest.raises(ValueError, match='Protocol bad/protocol:0.1.0 specified in `default_routing` is not a project dependency!'): builder.set_default_routing({bad_protocol.public_id: good_connection.public_id})
.parametrize('ops', ALL_OPS) .parametrize('dtype', FLOAT_TYPES) (max_examples=MAX_EXAMPLES, deadline=None) (X=strategies.arrays_BOP()) def test_maxout(ops, dtype, X): X = ops.asarray(X, dtype=dtype) expected_best = X.max(axis=(- 1)).astype(dtype) (predicted_best, which) = ops.maxout(X) assert (predicted_best.dtype == dtype) ops.xp.testing.assert_allclose(expected_best, predicted_best, rtol=0.001, atol=0.001) ops.xp.testing.assert_allclose(ops.xp.take_along_axis(X, ops.xp.expand_dims(which, (- 1)), axis=(- 1)), ops.xp.expand_dims(expected_best, (- 1)), atol=1e-10)
def get_cs(lab: Vector, lms_to_rgb: Matrix, ok_coeff: List[List[Vector]]) -> Vector: (l, a, b) = lab cusp = find_cusp(a, b, lms_to_rgb, ok_coeff) c_max = find_gamut_intersection(a, b, l, 1, l, lms_to_rgb, ok_coeff, cusp) st_max = to_st(cusp) k = (c_max / min((l * st_max[0]), ((1 - l) * st_max[1]))) st_mid = get_st_mid(a, b) c_a = (l * st_mid[0]) c_b = ((1.0 - l) * st_mid[1]) c_mid = ((0.9 * k) * math.sqrt(math.sqrt((1.0 / ((1.0 / (c_a ** 4)) + (1.0 / (c_b ** 4))))))) c_a = (l * 0.4) c_b = ((1.0 - l) * 0.8) c_0 = math.sqrt((1.0 / ((1.0 / (c_a ** 2)) + (1.0 / (c_b ** 2))))) return [c_0, c_mid, c_max]
class OptionPlotoptionsArearangeSonificationContexttracksMappingHighpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def type_and_remember(): global current_morse, last_typed keys = [] if command_on: keys.append('command') if control_on: keys.append('control') if shift_on: keys.append('shift') letter = morse.get(current_morse, '') if len(letter): keys.append(letter) current_morse = '' keystring = '+'.join(keys) if len(keystring): print('keys:', keystring) keyboard.press_and_release(keystring) last_typed = keystring
def extractAlternatefantasyBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def add_complete_system_status_for_consent_reporting(db: Session, privacy_request: PrivacyRequest, connection_config: ConnectionConfig) -> None: for pref in privacy_request.privacy_preferences: if (pref.affected_system_status and (pref.affected_system_status.get(connection_config.system_key) == ExecutionLogStatus.pending.value)): pref.cache_system_status(db, connection_config.system_key, ExecutionLogStatus.complete)
def create_mock_token(aud: str, expires_in: timedelta=None): exp = (datetime.utcnow() + expires_in) jti = ('test_token' + str(uuid.uuid4())) payload = {'exp': exp, 'aud': aud, 'jti': jti} secret = 'your-secret-key' algorithm = 'HS256' return jwt.encode(payload, secret, algorithm=algorithm)
class SizedPanel(wx.Panel): def __init__(self, parent, wxid, sizer, **kw): wx.Panel.__init__(self, parent, wxid, **kw) self.SetSizer(sizer) self.SetAutoLayout(True) self.sizer = sizer return def Fit(self): self.sizer.Fit(self) def Layout(self): self.sizer.Layout() return
def iter_latest_block(w3: 'Web3', to_block: Optional[Union[(BlockNumber, LatestBlockParam)]]=None) -> Iterable[BlockNumber]: _last = None is_bounded_range = ((to_block is not None) and (to_block != 'latest')) while True: latest_block = w3.eth.block_number if (is_bounded_range and (latest_block > to_block)): (yield None) if ((_last is not None) and (_last == latest_block)): (yield None) else: (yield latest_block) _last = latest_block
def test_not_unpackable_file(): empty_test_file = (TEST_DATA_DIR / 'empty') with TemporaryDirectory(prefix='fact_test_') as unpack_dir: result = unpack_function(empty_test_file, unpack_dir) assert ('sasquatch - error' in result) assert ('unsquashfs4-avm-be - error' in result)
class LiteSATAStriping(Module): def __init__(self, controllers): n = len(controllers) dw = len(controllers[0].sink.data) self.submodules.tx = LiteSATAStripingTX(n, dw) self.submodules.rx = LiteSATAStripingRX(n, dw) for i in range(n): self.comb += [self.tx.sources[i].connect(controllers[i].sink), controllers[i].source.connect(self.rx.sinks[i])] (self.sink, self.source) = (self.tx.sink, self.rx.source)
_meta(equipment.WearEquipmentAction) class WearEquipmentAction(): choose_option_prompt = '?' choose_option_buttons = (('', True), ('', False)) def effect_string(self, act): if (act.action == 'wear'): c = act.associated_card return f'{N.char(act.target)}{N.card(c)}'
def type_to_hci(type): if (type == TYPE_OUT_CMD): return b'\x01' if ((type == TYPE_IN_ACL) or (type == TYPE_OUT_ACL)): return b'\x02' if ((type == TYPE_IN_SCO) or (type == TYPE_OUT_SCO)): return b'\x03' if (type == TYPE_IN_EVT): return b'\x04' if ((type == TYPE_IN_ISO) or (type == TYPE_OUT_ISO)): return b'\x05' raise RuntimeError('type_to_hci: unknown type (0x{:02x})'.format(type))
class Vpnv6Path(VpnPath): ROUTE_FAMILY = RF_IPv6_VPN VRF_PATH_CLASS = None NLRI_CLASS = IP6AddrPrefix def __init__(self, *args, **kwargs): super(Vpnv6Path, self).__init__(*args, **kwargs) from ryu.services.protocols.bgp.info_base.vrf6 import Vrf6Path self.VRF_PATH_CLASS = Vrf6Path
def link_loads(topology, traffic_matrix, routing_matrix=None, ecmp=False): topology = (topology.copy() if topology.is_directed() else topology.to_directed()) capacity_unit = capacity_units[topology.graph['capacity_unit']] volume_unit = capacity_units[traffic_matrix.attrib['volume_unit']] norm_factor = (float(volume_unit) / float(capacity_unit)) if (routing_matrix == None): routing_matrix = dict(nx.all_pairs_dijkstra_path(topology, weight='weight')) for (u, v) in topology.edges(): topology.adj[u][v]['load'] = 0 od_pairs = traffic_matrix.od_pairs() def process_path(path, number_of_paths=1): if (len(path) <= 1): return for (u, v) in zip(path[:(- 1)], path[1:]): if (not ecmp): topology.adj[u][v]['load'] += traffic_matrix.flow[o][d] else: topology.adj[u][v]['load'] += (traffic_matrix.flow[o][d] / float(number_of_paths)) for (o, d) in od_pairs: try: path = routing_matrix[o][d] except KeyError: raise ValueError(('Cannot calculate link loads. There is no routefrom node %s to node %s' % (str(o), str(d)))) if (not ecmp): process_path(path) else: for p in path: process_path(p, len(path)) return {(u, v): ((norm_factor * float(topology.adj[u][v]['load'])) / float(topology.adj[u][v]['capacity'])) for (u, v) in topology.edges()}
class MyList(MutableSequence): def __init__(self, list_): self._d = list(list_) def __getitem__(self, pos): return self._d[pos] def __delitem__(self, pos): del self._d[pos] def __len__(self): return len(self._d) def __setitem__(self, pos, val): self._d[pos] = val def insert(self, pos, val): self._d.index(pos, val) def __repr__(self): return '{}'.format(self._d)
class Session(object): def __init__(self, ipv4s: list, ipv6s: list) -> None: self.sock = None self.server = None self.server_peer = None self.client_peer = None self.ipv4s = ipv4s self.ipv6s = ipv6s def connect(self, server: Tuple[(str, int)]) -> None: if (self.sock is not None): self.sock.close() self.sock = socket(AF_INET, SOCK_STREAM) self.sock.connect(server) self.server = server print(('connected to %s:%d ...' % (self.server[0], self.server[1]))) def handshake(self, peer_id: str=None, peer_name: str=None) -> None: print('initiating handshake sequence') cli_hello = Hello.build(peer_id, peer_name) print('> client.', end='') cli_hello.print() self.sock.sendall(cli_hello.to_raw_data()) self.client_peer = cli_hello.peer_id ack = Ack.from_reader(self.sock) print('< server.', end='') ack.print() srv_hello = Hello.from_reader(self.sock) print('< server.', end='') srv_hello.print() self.server_peer = srv_hello.peer_id self.server_peer.print() ack = Ack.build(65536) print('> client.', end='') ack.print() self.sock.sendall(ack.to_raw_data()) accept = Accept.build(0) print('> client.', end='') accept.print() self.sock.sendall(accept.to_raw_data()) def invite(self) -> None: print('sending invitation') invite = Invite.build(self.server_peer, self.client_peer) print('> client.', end='') invite.print() self.sock.sendall(invite.to_raw_data()) ack = Ack.from_reader(self.sock) print('< server.', end='') ack.print() def wait_invitation_response(self) -> InviteResponse: print('waiting invitation response ...') while True: what = Header.from_reader(self.sock) if (not what.has_payload()): ack = Ack(what) print('< server.', end='') ack.print() else: response = InviteResponse.from_reader(what, self.sock) print('< server.', end='') response.print() ack = Ack.build_with_signature(InviteResponse.SIGNATURE, 0) self.sock.sendall(ack.to_raw_data()) print('> client.', end='') ack.print() return response def send_client_data(self, server_response: InviteResponse): print(('sending client data for [%s] / [%s]' % (self.ipv4s, self.ipv6s))) client_response = InviteClientData.from_server_response(server_response, self.ipv4s, self.ipv6s) self.sock.sendall(client_response.to_raw_data()) print('> client.', end='') client_response.print() ack = Ack.from_reader(self.sock) print('< server.', end='') ack.print() ack = Ack.from_reader(self.sock) print('< server.', end='') ack.print() while True: data = self.sock.recv(1024) if (len(data) > 0): print('< server.', end='') hexdump(data)
def test_state_encode_decode(): class StateProtobufObject(): state_bytes = b'' ledger_id = 'some_ledger' body = {'state': 'v'} state = State(ledger_id, body) State.encode(StateProtobufObject, state) recovered_state = State.decode(StateProtobufObject) assert (state == recovered_state)
class TestSession(unittest.TestCase): def setUp(self): pass def tearDown(self): clear_engine_and_session() def test_create_session(self): with TemporaryDirectory(prefix='test_config') as tmp_dir: db_uri = 'sqlite:///{}/aiflow.db'.format(tmp_dir) prepare_session(db_uri=db_uri) TestBase.metadata.create_all(create_sqlalchemy_engine(db_uri)) with create_session() as session: session.add(TestTable('name1')) with create_session() as session: self.assertEqual(1, len(session.query(TestTable).all())) def test_providered_session(self): _session def session_op(session): session self.assertIsNotNone(session) self.assertEqual(str(session.bind.url), METADATA_BACKEND_URI) session_op()
class Match(object): _CONVERT = {REST_DL_TYPE: {REST_DL_TYPE_ARP: ether.ETH_TYPE_ARP, REST_DL_TYPE_IPV4: ether.ETH_TYPE_IP, REST_DL_TYPE_IPV6: ether.ETH_TYPE_IPV6}, REST_NW_PROTO: {REST_NW_PROTO_TCP: inet.IPPROTO_TCP, REST_NW_PROTO_UDP: inet.IPPROTO_UDP, REST_NW_PROTO_ICMP: inet.IPPROTO_ICMP, REST_NW_PROTO_ICMPV6: inet.IPPROTO_ICMPV6}} def to_openflow(rest): def __inv_combi(msg): raise ValueError(('Invalid combination: [%s]' % msg)) def __inv_2and1(*args): __inv_combi(('%s=%s and %s' % (args[0], args[1], args[2]))) def __inv_2and2(*args): __inv_combi(('%s=%s and %s=%s' % (args[0], args[1], args[2], args[3]))) def __inv_1and1(*args): __inv_combi(('%s and %s' % (args[0], args[1]))) def __inv_1and2(*args): __inv_combi(('%s and %s=%s' % (args[0], args[1], args[2]))) match = {} dl_type = rest.get(REST_DL_TYPE) nw_proto = rest.get(REST_NW_PROTO) if (dl_type is not None): if (dl_type == REST_DL_TYPE_ARP): if (REST_SRC_IPV6 in rest): __inv_2and1(REST_DL_TYPE, REST_DL_TYPE_ARP, REST_SRC_IPV6) if (REST_DST_IPV6 in rest): __inv_2and1(REST_DL_TYPE, REST_DL_TYPE_ARP, REST_DST_IPV6) if (REST_DSCP in rest): __inv_2and1(REST_DL_TYPE, REST_DL_TYPE_ARP, REST_DSCP) if nw_proto: __inv_2and1(REST_DL_TYPE, REST_DL_TYPE_ARP, REST_NW_PROTO) elif (dl_type == REST_DL_TYPE_IPV4): if (REST_SRC_IPV6 in rest): __inv_2and1(REST_DL_TYPE, REST_DL_TYPE_IPV4, REST_SRC_IPV6) if (REST_DST_IPV6 in rest): __inv_2and1(REST_DL_TYPE, REST_DL_TYPE_IPV4, REST_DST_IPV6) if (nw_proto == REST_NW_PROTO_ICMPV6): __inv_2and2(REST_DL_TYPE, REST_DL_TYPE_IPV4, REST_NW_PROTO, REST_NW_PROTO_ICMPV6) elif (dl_type == REST_DL_TYPE_IPV6): if (REST_SRC_IP in rest): __inv_2and1(REST_DL_TYPE, REST_DL_TYPE_IPV6, REST_SRC_IP) if (REST_DST_IP in rest): __inv_2and1(REST_DL_TYPE, REST_DL_TYPE_IPV6, REST_DST_IP) if (nw_proto == REST_NW_PROTO_ICMP): __inv_2and2(REST_DL_TYPE, REST_DL_TYPE_IPV6, REST_NW_PROTO, REST_NW_PROTO_ICMP) else: raise ValueError(('Unknown dl_type : %s' % dl_type)) elif (REST_SRC_IP in rest): if (REST_SRC_IPV6 in rest): __inv_1and1(REST_SRC_IP, REST_SRC_IPV6) if (REST_DST_IPV6 in rest): __inv_1and1(REST_SRC_IP, REST_DST_IPV6) if (nw_proto == REST_NW_PROTO_ICMPV6): __inv_1and2(REST_SRC_IP, REST_NW_PROTO, REST_NW_PROTO_ICMPV6) rest[REST_DL_TYPE] = REST_DL_TYPE_IPV4 elif (REST_DST_IP in rest): if (REST_SRC_IPV6 in rest): __inv_1and1(REST_DST_IP, REST_SRC_IPV6) if (REST_DST_IPV6 in rest): __inv_1and1(REST_DST_IP, REST_DST_IPV6) if (nw_proto == REST_NW_PROTO_ICMPV6): __inv_1and2(REST_DST_IP, REST_NW_PROTO, REST_NW_PROTO_ICMPV6) rest[REST_DL_TYPE] = REST_DL_TYPE_IPV4 elif (REST_SRC_IPV6 in rest): if (nw_proto == REST_NW_PROTO_ICMP): __inv_1and2(REST_SRC_IPV6, REST_NW_PROTO, REST_NW_PROTO_ICMP) rest[REST_DL_TYPE] = REST_DL_TYPE_IPV6 elif (REST_DST_IPV6 in rest): if (nw_proto == REST_NW_PROTO_ICMP): __inv_1and2(REST_DST_IPV6, REST_NW_PROTO, REST_NW_PROTO_ICMP) rest[REST_DL_TYPE] = REST_DL_TYPE_IPV6 elif (REST_DSCP in rest): rest[REST_DL_TYPE] = REST_DL_TYPE_IPV4 elif (nw_proto == REST_NW_PROTO_ICMP): rest[REST_DL_TYPE] = REST_DL_TYPE_IPV4 elif (nw_proto == REST_NW_PROTO_ICMPV6): rest[REST_DL_TYPE] = REST_DL_TYPE_IPV6 elif ((nw_proto == REST_NW_PROTO_TCP) or (nw_proto == REST_NW_PROTO_UDP)): raise ValueError('no dl_type was specified') else: raise ValueError(('Unknown nw_proto: %s' % nw_proto)) for (key, value) in rest.items(): if (key in Match._CONVERT): if (value in Match._CONVERT[key]): match.setdefault(key, Match._CONVERT[key][value]) else: raise ValueError(('Invalid rule parameter. : key=%s' % key)) else: match.setdefault(key, value) return match def to_rest(openflow): of_match = openflow[REST_MATCH] mac_dontcare = mac.haddr_to_str(mac.DONTCARE) ip_dontcare = '0.0.0.0' ipv6_dontcare = '::' match = {} for (key, value) in of_match.items(): if ((key == REST_SRC_MAC) or (key == REST_DST_MAC)): if (value == mac_dontcare): continue elif ((key == REST_SRC_IP) or (key == REST_DST_IP)): if (value == ip_dontcare): continue elif ((key == REST_SRC_IPV6) or (key == REST_DST_IPV6)): if (value == ipv6_dontcare): continue elif (value == 0): continue if (key in Match._CONVERT): conv = Match._CONVERT[key] conv = dict(((value, key) for (key, value) in conv.items())) match.setdefault(key, conv[value]) else: match.setdefault(key, value) return match def to_mod_openflow(of_match): mac_dontcare = mac.haddr_to_str(mac.DONTCARE) ip_dontcare = '0.0.0.0' ipv6_dontcare = '::' match = {} for (key, value) in of_match.items(): if ((key == REST_SRC_MAC) or (key == REST_DST_MAC)): if (value == mac_dontcare): continue elif ((key == REST_SRC_IP) or (key == REST_DST_IP)): if (value == ip_dontcare): continue elif ((key == REST_SRC_IPV6) or (key == REST_DST_IPV6)): if (value == ipv6_dontcare): continue elif (value == 0): continue match.setdefault(key, value) return match
def adjust_faces_to_tranformations(img, mainRect, n2i, n2f, tree_layers): if (img.mode == 'c'): rotate_inverted_faces(n2i, n2f, img) elif ((img.mode == 'r') and (img.orientation == 1)): for layer in tree_layers: layer.setTransform(QTransform().translate(0, 0).scale((- 1), 1).translate(0, 0)) layer.moveBy(mainRect.width(), 0) for faceblock in n2f.values(): for (pos, fb) in faceblock.items(): fb.flip_hz()
def add_depot_tools_to_path(): for i in sys.path: if (i.rstrip(os.sep).endswith('depot_tools') and IsRealDepotTools(i)): return i for i in os.environ['PATH'].split(os.pathsep): if IsRealDepotTools(i): sys.path.append(i.rstrip(os.sep)) return i root_dir = os.path.dirname(os.path.abspath(__file__)) previous_dir = os.path.abspath(__file__) while (root_dir and (root_dir != previous_dir)): i = os.path.join(root_dir, 'depot_tools') if IsRealDepotTools(i): sys.path.append(i) return i previous_dir = root_dir root_dir = os.path.dirname(root_dir) print('Failed to find depot_tools', file=sys.stderr) return None
def main(): interface = pt.ShotViewer() shot = pt.System(table=(table := pt.Table.default()), balls=pt.get_nine_ball_rack(table), cue=pt.Cue(cue_ball_id='cue')) shot.aim_at_ball(ball_id='1') shot.strike(V0=8) pt.simulate(shot, inplace=True) interface.show(shot, title='Original system state') new = shot.copy() interface.show(new, title='A deep-ish copy of the original') with tempfile.TemporaryDirectory() as tmp_dir: path = (Path(tmp_dir) / 'shot.json') new.save(path) newer = pt.System.load(path) interface.show(newer, title='A copy of the original, loaded from the disk space')
class FusedElementwiseWithStridedOutputsTestCase(unittest.TestCase): def __init__(self, *args, **kwargs): super(FusedElementwiseWithStridedOutputsTestCase, self).__init__(*args, **kwargs) self._test_id = 0 def _test_fused_elementwise_with_strided_outputs(self, batch0_sizes: List[int], batch1_sizes: List[int], m1: int, m2: int, k: int, test_name: str='fused_elementwise_with_strided_outputs', dtype: str='float16'): batch0_dim = shape_utils.gen_int_var_min_max(batch0_sizes, 'batch0') batch1_dim = shape_utils.gen_int_var_min_max(batch1_sizes, 'batch1') X1 = Tensor(shape=[batch0_dim, batch1_dim, IntImm(m1), IntImm(k)], dtype=dtype, name='input0', is_input=True) X2 = Tensor(shape=[], dtype=dtype, name='X2', value=3.0) X3 = Tensor(shape=[batch0_dim, batch1_dim, IntImm(m2), IntImm(k)], dtype=dtype, name='input1', is_input=True) X4 = ops.elementwise(FuncEnum.ADD)(X1, X2) X5 = ops.elementwise(FuncEnum.TANH)(X4) X6 = ops.elementwise(FuncEnum.TANH)(X3) X7 = ops.concatenate()([X5, X6], dim=2) X7._attrs['name'] = 'output0' X7._attrs['is_output'] = True target = detect_target() with compile_model([X7], target, './tmp', f'{test_name}_{self._test_id}') as module: self._test_id += 1 for batch0_size in batch0_sizes: for batch1_size in batch1_sizes: x1_pt = get_random_torch_tensor([batch0_size, batch1_size, m1, k], dtype=dtype) x3_pt = get_random_torch_tensor([batch0_size, batch1_size, m2, k], dtype=dtype) x5_pt = torch.tanh((x1_pt + 3.0)) x6_pt = torch.tanh(x3_pt) x7_pt = torch.cat([x5_pt, x6_pt], dim=2) inputs = [0, 0] name_to_index_map = module.get_input_name_to_index_map() inputs[name_to_index_map['input0']] = x1_pt inputs[name_to_index_map['input1']] = x3_pt x7 = torch.empty_like(x7_pt) module.run_with_tensors(inputs, [x7]) self.assertTrue(torch.allclose(x7, x7_pt, atol=0.01, rtol=0.01)) def test_all_aligned_fp16(self): self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[1], batch1_sizes=[2, 4, 5], m1=8, m2=8, k=1, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp16', dtype='float16') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[1, 99, 1024], batch1_sizes=[8], m1=8, m2=16, k=1, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp16', dtype='float16') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[3, 5, 1024], batch1_sizes=[2, 5], m1=4, m2=4, k=2, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp16', dtype='float16') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[1024], batch1_sizes=[2], m1=4, m2=2, k=4, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp16', dtype='float16') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[1024], batch1_sizes=[2], m1=16, m2=64, k=32, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp16', dtype='float16') ((detect_target().name() == 'rocm'), 'Not supported by ROCM.') def test_all_aligned_fp32(self): self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[1], batch1_sizes=[2, 4, 5], m1=8, m2=8, k=1, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp32', dtype='float32') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[1, 99, 1024], batch1_sizes=[8], m1=8, m2=16, k=1, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp32', dtype='float32') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[3, 5, 1024], batch1_sizes=[2, 5], m1=4, m2=4, k=2, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp32', dtype='float32') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[1024], batch1_sizes=[2], m1=4, m2=2, k=4, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp32', dtype='float32') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[1024], batch1_sizes=[2], m1=16, m2=64, k=32, test_name='fused_elementwise_with_strided_outputs_all_aligned_fp32', dtype='float32') def test_not_aligned_fp16(self): self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[8], batch1_sizes=[23, 88, 100], m1=1, m2=1, k=1, test_name='fused_elementwise_with_strided_outputs_not_aligned_fp16', dtype='float16') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[88, 100, 234], batch1_sizes=[40], m1=4, m2=2, k=1, test_name='fused_elementwise_with_strided_outputs_not_aligned_fp16', dtype='float16') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[23, 56, 93], batch1_sizes=[12, 34, 55], m1=1, m2=2, k=2, test_name='fused_elementwise_with_strided_outputs_not_aligned_fp16', dtype='float16') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[2], batch1_sizes=[1024], m1=8, m2=2, k=1, test_name='fused_elementwise_with_strided_outputs_not_aligned_fp16', dtype='float16') ((detect_target().name() == 'rocm'), 'Not supported by ROCM.') def test_not_aligned_fp32(self): self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[8], batch1_sizes=[23, 88, 100], m1=1, m2=1, k=1, test_name='fused_elementwise_with_strided_outputs_not_aligned_fp32', dtype='float32') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[88, 100, 234], batch1_sizes=[40], m1=4, m2=2, k=1, test_name='fused_elementwise_with_strided_outputs_not_aligned_fp32', dtype='float32') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[23, 56, 93], batch1_sizes=[12, 34, 55], m1=1, m2=2, k=2, test_name='fused_elementwise_with_strided_outputs_not_aligned_fp32', dtype='float32') self._test_fused_elementwise_with_strided_outputs(batch0_sizes=[2], batch1_sizes=[1024], m1=8, m2=2, k=1, test_name='fused_elementwise_with_strided_outputs_not_aligned_fp32', dtype='float32')
def extractMtlcbgcTumblrCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
.parametrize(['condition', 'expected_set'], [({'name': 'axis', 'minimum': 0.5}, {'axis': Range(minimum=0.5, maximum=math.inf)}), ({'name': 'axis', 'maximum': 0.5}, {'axis': Range(minimum=(- math.inf), maximum=0.5)}), ({'name': 'axis', 'minimum': 0.5, 'maximum': None}, {'axis': Range(minimum=0.5, maximum=math.inf)}), ({'name': 'axis', 'minimum': None, 'maximum': 0.5}, {'axis': Range(minimum=(- math.inf), maximum=0.5)})]) def test_optional_min_max_internal(condition, expected_set: ConditionSet): assert (_conditionSetFrom([condition]) == expected_set)
class OptionPlotoptionsPictorialSonificationDefaultinstrumentoptionsMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_datarow_color_multiple_material_states(): r = ft.DataRow(color={ft.MaterialState.SELECTED: 'red', ft.MaterialState.HOVERED: 'blue', ft.MaterialState.DEFAULT: 'yellow'}) assert isinstance(r, ft.Control) assert (r._build_add_commands() == [Command(indent=0, name=None, values=['r'], attrs={'color': '{"selected":"red","hovered":"blue","":"yellow"}'}, commands=[])]), 'Test failed'
def pwned_recon(email): if (not email): return None results = pypwned.getAllBreachesForAccount(email=email) url = ' if ('404' in results): return None if ('A server error' in results): return None return {'site': 'Have I been pwned.', 'url': url, 'results': results}
def match(AC: ahocorasick.Automaton, tokens: List[str]) -> List[Tuple[(str, int, int, Set[str])]]: (smap, emap, idx) = (dict(), dict(), 0) for (i, token) in enumerate(tokens): smap[idx] = i idx += len(token) emap[idx] = i idx += 1 text = ' '.join(tokens) spans = [] for (eidx, t) in AC.iter(text): eidx += 1 sidx = (eidx - len(t.span)) sidx = smap.get(sidx, None) eidx = emap.get(eidx, None) if ((sidx is None) or (eidx is None)): continue spans.append((t.span, sidx, (eidx + 1), t.values)) return spans
class LayoutLine(): tokens: List[LayoutToken] def text(self) -> str: return join_layout_tokens(self.tokens) def for_text(text: str, **kwargs) -> 'LayoutLine': return LayoutLine(tokens=get_layout_tokens_for_text(text, **kwargs)) def flat_map_layout_tokens(self, fn: T_FlatMapLayoutTokensFn) -> 'LayoutLine': return LayoutLine(tokens=[tokenized_token for token in self.tokens for tokenized_token in fn(token)])
def test_receive_contract_with_fallback_function(receive_function_contract, call): initial_value = call(contract=receive_function_contract, contract_function='getText') assert (initial_value == '') receive_function_contract.fallback.transact() final_value = call(contract=receive_function_contract, contract_function='getText') assert (final_value == 'receive')
class OptionSeriesLineSonificationContexttracksMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class DoubleCritic(hk.Module): def __init__(self, latent_size: int=50, hidden_sizes=(256, 256), name=None): super().__init__(name) self.hidden_sizes = hidden_sizes self.latent_size = latent_size def __call__(self, observation, action): h = hk.Linear(self.latent_size)(observation) h = hk.LayerNorm(axis=(- 1), create_scale=True, create_offset=True)(h) h = jnp.tanh(h) critic1 = Critic(self.hidden_sizes, name='critic1') critic2 = Critic(self.hidden_sizes, name='critic2') return (critic1(h, action), critic2(h, action))
def test_base_event_generator_node_meta(mocker: Any) -> None: node = MyBaseEventGeneratorNode() publisher_metadata = get_method_metadata(node.publish_events) topic = publisher_metadata.published_topics[0] assert (topic.name == node.MY_TOPIC.name) assert (topic.message_type == node.MY_TOPIC.message_type)
class ParallelImageEncoder(IterDataPipe): def __init__(self, dp: IterDataPipe[Tuple[(Image.Image, List[str])]], vision_backbone: str): super().__init__() self.dp = dp self.device = ('cuda' if torch.cuda.is_available() else 'cpu') (self.model, self.preprocessor) = load_vision_backbone(vision_backbone, device=self.device) _mode() _grad() def __iter__(self) -> Generator[(Tuple[(np.ndarray, List[str])], None, None)]: for batch in self.dp: captions = [caps for (_, caps) in batch] images = torch.stack([self.preprocessor(image.convert('RGB')) for (image, _) in batch]).to(self.device) image_features = encode_image_tensor(images, self.model) image_features_np = image_features.unsqueeze(1).cpu().numpy() (yield [(feats, caps) for (feats, caps) in zip(image_features_np, captions)])
def sample_slice(sdf, w=1024, h=1024, x=None, y=None, z=None, bounds=None): if (bounds is None): bounds = _estimate_bounds(sdf) ((x0, y0, z0), (x1, y1, z1)) = bounds if (x is not None): X = np.array([x]) Y = np.linspace(y0, y1, w) Z = np.linspace(z0, z1, h) extent = (Z[0], Z[(- 1)], Y[0], Y[(- 1)]) axes = 'ZY' elif (y is not None): Y = np.array([y]) X = np.linspace(x0, x1, w) Z = np.linspace(z0, z1, h) extent = (Z[0], Z[(- 1)], X[0], X[(- 1)]) axes = 'ZX' elif (z is not None): Z = np.array([z]) X = np.linspace(x0, x1, w) Y = np.linspace(y0, y1, h) extent = (Y[0], Y[(- 1)], X[0], X[(- 1)]) axes = 'YX' else: raise Exception('x, y, or z position must be specified') P = _cartesian_product(X, Y, Z) return (sdf(P).reshape((w, h)), extent, axes)
class PlotExample(HasTraits): plot = Instance(Component) numpoints = Int(500) low_mode = Enum('value', 'track') high_mode = Enum('value', 'track') traits_view = View(Group(Item('plot', editor=ComponentEditor(), show_label=False), HGroup(HGroup(Item('object.plot.x_mapper.range.low_setting', label='Low', editor=TextEditor(), visible_when='object.low_mode == "value" '), Item('low_mode', label='Low Mode'), Item('object.plot.x_mapper.range.high_setting', label='High', editor=TextEditor(), visible_when='object.high_mode == "value" '), Item('high_mode', label='High Mode'), Item('object.plot.x_mapper.range.tracking_amount', label='Tracking Amount', editor=TextEditor(read_only=True), visible_when='object.high_mode == "track" or object.low_mode == "track"'), label='X', show_border=True), HGroup(Item('object.plot.y_mapper.range.low_setting', label='Low', editor=TextEditor()), Item('object.plot.y_mapper.range.high_setting', label='High', editor=TextEditor()), label='Y', show_border=True)), orientation='vertical'), resizable=True, title='Function Plot', width=900, height=600) def xfunc(self, low, high): dx = ((high - low) / self.numpoints) real_low = (ceil((low / dx)) * dx) real_high = (ceil((high / dx)) * dx) return linspace(real_low, real_high, self.numpoints) def yfunc(self, low, high): x = self.xfunc(low, high) return sin((1.0 / x)) def _low_mode_changed(self, newvalue): if (newvalue != 'value'): self.plot.x_mapper.range.low_setting = newvalue def _high_mode_changed(self, newvalue): if (newvalue != 'value'): self.plot.x_mapper.range.high_setting = newvalue def _plot_default(self): container = DataView() xds = FunctionDataSource(func=self.xfunc) yds = FunctionDataSource(func=self.yfunc) xmapper = container.x_mapper ymapper = container.y_mapper xds.data_range = xmapper.range yds.data_range = xmapper.range xmapper.range.set_bounds((- 5), 10) ymapper.range.set_bounds((- 1), 1.2) plot = ScatterPlot(index=xds, value=yds, index_mapper=xmapper, value_mapper=ymapper, color='green', marker='circle', marker_size=3, line_width=0) plot2 = LinePlot(index=xds, value=yds, index_mapper=xmapper, value_mapper=ymapper, color='lightgray') container.add(plot2, plot) plot.tools.append(PanTool(plot, constrain_direction='x', constrain=True)) plot.tools.append(ZoomTool(plot, axis='index', tool_mode='range')) return container
def pack_msg_kexinit_for_server(kex, salg, enc, mac, cmpv): kex_fmt = f'!I{len(kex)}s' sal_fmt = f'!I{len(salg)}s' enc_fmt = f'!I{len(enc)}s' mac_fmt = f'!I{len(mac)}s' cmp_fmt = f'!I{len(cmpv)}s' kex = struct.pack(kex_fmt, len(kex), kex) sal = struct.pack(sal_fmt, len(salg), salg) enc = struct.pack(enc_fmt, len(enc), enc) mac = struct.pack(mac_fmt, len(mac), mac) cmpv = struct.pack(cmp_fmt, len(cmpv), cmpv) remain = b'\x00\x00\x00\x00' packet = b' ' packet += token_bytes(16) packet += kex packet += sal packet += enc packet += enc packet += mac packet += mac packet += cmpv packet += cmpv packet += b'\x00' packet += remain packet += (b'\x00' * 8) size = ((len(packet) + 4) + 2) padding_len = (size % 8) if (padding_len < 4): padding_len = 4 return _pack_packet(packet)
def amazon_invoice_parser_formatter(pages: List[dict]) -> InvoiceParserDataClass: extracted_data = [] for page in pages: if (page.get('JobStatus') == 'FAILED'): raise ProviderException(page.get('StatusMessage', 'Amazon returned a job status: FAILED')) for invoice in (page.get('ExpenseDocuments') or []): summary = {} currencies = {} for field in invoice['SummaryFields']: field_type = field['Type']['Text'] summary[field_type] = field['ValueDetection']['Text'] field_currency = field.get('Currency', {}).get('Code') if (field_currency is not None): if (field_currency not in currencies): currencies[field_currency] = 1 else: currencies[field_currency] += 1 item_lines = [] for line_item_group in invoice['LineItemGroups']: for fields in line_item_group['LineItems']: parsed_items = {item['Type']['Text']: item['ValueDetection']['Text'] for item in fields['LineItemExpenseFields']} item_lines.append(ItemLinesInvoice(description=parsed_items.get('ITEM'), quantity=convert_string_to_number(parsed_items.get('QUANTITY'), float), amount=convert_string_to_number(parsed_items.get('PRICE'), float), unit_price=convert_string_to_number(parsed_items.get('UNIT_PRICE'), float), discount=None, product_code=parsed_items.get('PRODUCT_CODE'), date_item=None, tax_item=None)) customer = CustomerInformationInvoice(customer_name=summary.get('RECEIVER_NAME', summary.get('NAME')), customer_address=summary.get('RECEIVER_ADDRESS', summary.get('ADDRESS')), customer_email=None, customer_id=None, customer_number=summary.get('CUSTOMER_NUMBER'), customer_tax_id=None, customer_mailing_address=None, customer_billing_address=None, customer_shipping_address=None, customer_service_address=None, customer_remittance_address=None, abn_number=None, gst_number=None, pan_number=None, vat_number=None) merchant = MerchantInformationInvoice(merchant_name=summary.get('VENDOR_NAME'), merchant_address=summary.get('VENDOR_ADDRESS'), merchant_phone=summary.get('VENDOR_PHONE'), merchant_email=None, merchant_fax=None, merchant_website=summary.get('VENDOR_URL'), merchant_tax_id=summary.get('TAX_PAYER_ID'), merchant_siret=None, merchant_siren=None, abn_number=None, gst_number=None, pan_number=None, vat_number=None) invoice_currency = None if (len(currencies) == 1): invoice_currency = list(currencies.keys())[0] elif (len(currencies) > 1): invoice_currency = max(currencies, key=currencies.get) locale = LocaleInvoice(currency=invoice_currency, language=None) taxes = [TaxesInvoice(value=convert_string_to_number(summary.get('TAX'), float), rate=None)] invoice_infos = InfosInvoiceParserDataClass(customer_information=customer, merchant_information=merchant, invoice_number=summary.get('INVOICE_RECEIPT_ID'), invoice_total=convert_string_to_number(summary.get('TOTAL'), float), invoice_subtotal=convert_string_to_number(summary.get('SUBTOTAL'), float), amount_due=convert_string_to_number(summary.get('AMOUNT_DUE'), float), previous_unpaid_balance=convert_string_to_number(summary.get('PRIOR_BALANCE'), float), discount=convert_string_to_number(summary.get('DISCOUNT'), float), taxes=taxes, payment_term=summary.get('PAYMENT_TERMS'), purchase_order=None, date=summary.get('ORDER_DATE', summary.get('INVOICE_RECEIPT_DATE')), due_date=summary.get('DUE_DATE'), service_date=None, service_due_date=None, locale=locale, bank_information=None, item_lines=item_lines, po_number=summary.get('PO_NUMBER')) extracted_data.append(invoice_infos) return InvoiceParserDataClass(extracted_data=extracted_data)
class ProgressParallelPreprocessor(ParallelPreprocessor): def on_result(self, pbar): def inner(result): pbar.update(1) self.results.append(result) return inner def transform(self, fileids=None, categories=None): if (not os.path.exists(self.target)): os.makedirs(self.target) self.replicate(self.corpus.root) self.results = [] fileids = self.fileids(fileids, categories) with tqdm(total=len(fileids), unit='Docs') as pbar: pool = mp.Pool(processes=self.tasks) tasks = [pool.apply_async(self.process, (idx, fileid), callback=self.on_result) for (idx, fileid) in enumerate(self.fileids(fileids, categories))] pool.close() pool.join() return self.results
class TestChromiumPost(unittest.TestCase): def setUp(self): self.cr = ChromeController.TabPooledChromium('google-chrome', additional_options=['--no-sandbox', '--disable-setuid-sandbox']) (self.mock_server_port, self.mock_server, self.mock_server_thread) = testing_server.start_server(self, {}) def tearDown(self): self.mock_server.shutdown() del self.cr def test_xhr_post_1(self): intermediate_url = ' target_url = ' with ChromeController.ChromeContext(CHROME_BINARY_NAME, additional_options=['--no-sandbox', '--disable-setuid-sandbox']) as cr: first_nav = cr.blocking_navigate_and_get_source(intermediate_url) ret = cr.xhr_fetch(target_url, post_data='test_post_1', post_type='application/json') pprint.pprint(ret) self.assertEqual(ret['response'], '{"oh" : "hai"}') self.assertEqual(ret['code'], 200) self.assertEqual(ret['mimetype'], 'application/json') def test_xhr_post_2(self): intermediate_url = ' target_url = ' with ChromeController.ChromeContext(CHROME_BINARY_NAME, additional_options=['--no-sandbox', '--disable-setuid-sandbox']) as cr: first_nav = cr.blocking_navigate_and_get_source(intermediate_url) data = urllib.parse.urlencode({'test': 1, 'moar': 'two'}) ret = cr.xhr_fetch(target_url, post_data=data, post_type='application/x-www-form-urlencoded') pprint.pprint(ret) self.assertEqual(ret['response'], '{"oh" : "hai"}') self.assertEqual(ret['code'], 200) self.assertEqual(ret['mimetype'], 'application/json')
def test(): assert (('doc1.similarity(doc2)' in __solution__) or ('doc2.similarity(doc1)' in __solution__)), 'Vergleichst du die Ahnlichkeit der zwei Docs?' assert (0 <= float(similarity) <= 1), 'Der Ahnlichkeitswert muss eine Zahl zwischen 0 und 1 sein. Hast du ihn korrekt berechnet?' __msg__.good('Gut gemacht!')
class TestTurnBattleMagicCmd(BaseEvenniaCommandTest): def test_turnbattlemagiccmd(self): self.call(tb_magic.CmdStatus(), '', 'You have 100 / 100 HP and 20 / 20 MP.') self.call(tb_magic.CmdLearnSpell(), 'test spell', 'There is no spell with that name.') self.call(tb_magic.CmdCast(), '', 'Usage: cast <spell name> = <target>, <target2>') self.call(tb_magic.CmdFight(), '', "There's nobody here to fight!") self.call(tb_magic.CmdAttack(), '', 'You can only do that in combat. (see: help fight)') self.call(tb_magic.CmdPass(), '', 'You can only do that in combat. (see: help fight)') self.call(tb_magic.CmdDisengage(), '', 'You can only do that in combat. (see: help fight)') self.call(tb_magic.CmdRest(), '', 'Char rests to recover HP and MP.')
class OptionPlotoptionsBulletSonificationContexttracksMappingNoteduration(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class TestRun(unittest.TestCase): def test_00import_inkex(self): try: result = subprocess.check_output([sys.executable, '-c', 'import sendto_silhouette;import inkex;print(inkex)'], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_01help(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py', '--help'], stderr=subprocess.STDOUT) self.assertIn('sage: sendto_silhouette.py', str(result)) self.assertIn('--help', str(result)) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_02version(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py', '--version'], stderr=subprocess.STDOUT) self.assertIn('1.', str(result)) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_03run(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py'], stderr=subprocess.STDOUT) print(result.decode()) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_04dry_run(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py', '--dry_run=True', '--preview=False', '--log_paths=True', '--logfile=silhouette.log', 'examples/testcut_square_triangle.svg'], stderr=subprocess.STDOUT) print(result.decode()) filehandle = open('silhouette.log', 'r') log = filehandle.read() filehandle.close() self.assertIn('driver version', log) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_05dry_run(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py', '--dry_run=True', '--preview=False', '--log_paths=True', '--logfile=silhouette.log', 'examples/testcut_square_triangle_o.svg'], stderr=subprocess.STDOUT) print(result.decode()) filehandle = open('silhouette.log', 'r') log = filehandle.read() filehandle.close() self.assertIn('driver version', log) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_06dry_run(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py', '--dry_run=True', '--preview=False', '--log_paths=True', '--logfile=silhouette.log', '--output=silhouette.svg', 'examples/sharp_turns.svg'], stderr=subprocess.STDOUT) print(result.decode()) filehandle = open('silhouette.log', 'r') log = filehandle.read() filehandle.close() self.assertIn('driver version', log) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_07dry_run(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py', '--dashes=True', '--preview=False', '--dry_run=True', '--log_paths=True', '--logfile=silhouette.log', '--output=silhouette.svg', 'examples/dashline.svg'], stderr=subprocess.STDOUT) print(result.decode()) filehandle = open('silhouette.log', 'r') log = filehandle.read() filehandle.close() self.assertIn('driver version', log) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_08cmd_file(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py', '--dry_run=True', '--preview=False', '--cmdfile=testcut_square_triangle_o.cmd', '--force_hardware=Silhouette SD 1', 'examples/testcut_square_triangle_o.svg'], stderr=subprocess.STDOUT) print(result.decode()) filehandle = open('examples/testcut_square_triangle_o.cmd', 'r') cmdref = filehandle.read() filehandle.close() filehandle = open('testcut_square_triangle_o.cmd', 'r') cmd = filehandle.read() filehandle.close() self.assertEqual(cmdref, cmd) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_09dry_run(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py', '--dry_run=True', '--preview=False', '--strategy=matfree', '--logfile=silhouette.log', 'examples/testcut_matfree.svg'], stderr=subprocess.STDOUT) print(result.decode()) filehandle = open('silhouette.log', 'r') log = filehandle.read() filehandle.close() self.assertIn('matfree', log) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False def test_10dry_run(self): try: result = subprocess.check_output([sys.executable, 'sendto_silhouette.py', '--dry_run=True', '--preview=False', '--strategy=matfree', 'examples/testcut_matfree.svg'], stderr=subprocess.STDOUT) print(result.decode()) filehandle = open(os.path.join(gettempdir(), 'silhouette.log'), 'r') log = filehandle.read() filehandle.close() self.assertIn('matfree', log) except subprocess.CalledProcessError as e: print(e.output.decode()) print(e) self.assertEqual(e.returncode, 0) assert False
def test_gap_forwarding(golden): def p(): x: f32 if True: x = 1.0 if True: x = 2.0 if1 = p.find('x = _ #0').parent() if2 = p.find('x = _ #1').parent() x_alloc = p.find('x: _') p = fuse(p, if1, if2) p = insert_pass(p, if1.body()[0].after()) p = insert_pass(p, if2.body()[0].after()) p = insert_pass(p, x_alloc.after()) assert (str(p) == golden)
def test_fi_tagger_return_char(NLP): text = 'hi Aaron,\r\n\r\nHow is your schedule today, I was wondering if you had time for a phone\r\ncall this afternoon?\r\n\r\n\r\n' doc = NLP(text) for token in doc: if token.is_space: assert (token.pos == SPACE) assert (doc[3].text == '\r\n\r\n') assert doc[3].is_space assert (doc[3].pos == SPACE)
class UltrasonicSensor(Sensor): SYSTEM_CLASS_NAME = Sensor.SYSTEM_CLASS_NAME SYSTEM_DEVICE_NAME_CONVENTION = Sensor.SYSTEM_DEVICE_NAME_CONVENTION MODE_US_DIST_CM = 'US-DIST-CM' MODE_US_DIST_IN = 'US-DIST-IN' MODE_US_LISTEN = 'US-LISTEN' MODE_US_SI_CM = 'US-SI-CM' MODE_US_SI_IN = 'US-SI-IN' MODES = (MODE_US_DIST_CM, MODE_US_DIST_IN, MODE_US_LISTEN, MODE_US_SI_CM, MODE_US_SI_IN) def __init__(self, address=None, name_pattern=SYSTEM_DEVICE_NAME_CONVENTION, name_exact=False, **kwargs): super(UltrasonicSensor, self).__init__(address, name_pattern, name_exact, driver_name=['lego-ev3-us', 'lego-nxt-us'], **kwargs) def distance_centimeters_continuous(self): self._ensure_mode(self.MODE_US_DIST_CM) return (self.value(0) * self._scale('US_DIST_CM')) def distance_centimeters_ping(self): self.mode = self.MODE_US_SI_CM return (self.value(0) * self._scale('US_DIST_CM')) def distance_centimeters(self): return self.distance_centimeters_continuous def distance_inches_continuous(self): self._ensure_mode(self.MODE_US_DIST_IN) return (self.value(0) * self._scale('US_DIST_IN')) def distance_inches_ping(self): self.mode = self.MODE_US_SI_IN return (self.value(0) * self._scale('US_DIST_IN')) def distance_inches(self): return self.distance_inches_continuous def other_sensor_present(self): self._ensure_mode(self.MODE_US_LISTEN) return bool(self.value(0))