code
stringlengths
281
23.7M
def _render_admin_add(request, context): dc_settings = request.dc.settings initial = dc_settings.VMS_VM_JSON_DEFAULTS['internal_metadata'].copy() initial['node'] = request.GET.get('node', None) initial['domain'] = dc_settings.VMS_VM_DOMAIN_DEFAULT initial['monitored'] = (dc_settings.MON_ZABBIX_ENABLED and dc_settings.MON_ZABBIX_VM_SYNC and dc_settings.VMS_VM_MONITORED_DEFAULT) initial['cpu_shares'] = dc_settings.VMS_VM_CPU_SHARES_DEFAULT initial['zfs_io_priority'] = dc_settings.VMS_VM_ZFS_IO_PRIORITY_DEFAULT initial['zpool'] = dc_settings.VMS_STORAGE_DEFAULT initial['ostype'] = dc_settings.VMS_VM_OSTYPE_DEFAULT initial['hvm_type'] = dc_settings.VMS_VM_HVM_TYPE_DEFAULT initial['snapshot_limit_manual'] = dc_settings.VMS_VM_SNAPSHOT_LIMIT_MANUAL_DEFAULT initial['snapshot_size_percent_limit'] = dc_settings.VMS_VM_SNAPSHOT_SIZE_PERCENT_LIMIT_DEFAULT initial['snapshot_size_limit'] = dc_settings.VMS_VM_SNAPSHOT_SIZE_LIMIT_DEFAULT initial['bootrom'] = dc_settings.VMS_BHYVE_BOOTROM_DEFAULT initial['owner'] = request.user.username initial['mdata'] = dc_settings.VMS_VM_MDATA_DEFAULT context['settingsform'] = AdminServerSettingsForm(request, None, prefix='opt', initial=initial) context['importform'] = UploadFileForm() return render(request, 'gui/vm/add.html', context)
class NetModuleTest(): def test_net_version(self, w3: 'Web3') -> None: version = w3.net.version assert is_string(version) assert version.isdigit() def test_net_listening(self, w3: 'Web3') -> None: listening = w3.net.listening assert is_boolean(listening) def test_net_peer_count(self, w3: 'Web3') -> None: peer_count = w3.net.peer_count assert is_integer(peer_count)
class FaucetUntaggedLogRotateTest(FaucetUntaggedTest): def test_untagged(self): faucet_log = self.env[self.faucet_controllers[0].name]['FAUCET_LOG'] self.assertTrue(os.path.exists(faucet_log)) os.rename(faucet_log, (faucet_log + '.old')) self.assertTrue(os.path.exists((faucet_log + '.old'))) self.flap_all_switch_ports() self.assertTrue(os.path.exists(faucet_log))
def find(search, data): matches = [] if isinstance(data, dict): for (key, value) in data.items(): if (key in search): if isinstance(value, dict): if ('lo' in value): matches.append({key: value['lo']}) else: matches.append({key: value}) else: dive = find(search, value) if dive: matches.extend(dive) elif isinstance(data, list): for (idx, x) in enumerate(data): sweep_result = find(search, x) if sweep_result: matches.extend(sweep_result) else: return None return matches
class BaseImputer(BaseEstimator, TransformerMixin, GetFeatureNamesOutMixin): def _transform(self, X: pd.DataFrame) -> pd.DataFrame: check_is_fitted(self) X = check_X(X) _check_X_matches_training_df(X, self.n_features_in_) X = X[self.feature_names_in_] return X def transform(self, X: pd.DataFrame) -> pd.DataFrame: X = self._transform(X) return X.fillna(value=self.imputer_dict_) def _get_feature_names_in(self, X): self.feature_names_in_ = X.columns.to_list() self.n_features_in_ = X.shape[1] return self def _more_tags(self): tags_dict = _return_tags() tags_dict['allow_nan'] = True tags_dict['variables'] = 'numerical' return tags_dict
class MPMoveTool(MoveTool): cur_bid = Int((- 1)) def normal_blob_down(self, event): if (self.cur_bid == (- 1)): self.cur_bid = event.bid self.normal_left_down(event) def dragging_blob_up(self, event): if (event.bid == self.cur_bid): self.cur_bid = (- 1) self.normal_left_up(event)
class OptionSeriesGaugeSonificationDefaultinstrumentoptionsMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
.skipif(('pandas' not in sys.modules), reason='Pandas is not installed.') .parametrize('d', [dict(a=1, b=2, c=3), dict(x=dict(a=1, b=2, c=3)), dict(xs=[dict(a=1, b=2, c=3), dict(y=dict(a=10, b=20, c=30))])]) def test_cache_key_consistency(d): assert (len(calculate_cache_key_multiple_times(d)) == 1)
def fetch_idv_child_outlays(award_id: int, award_id_column) -> dict: if (award_id_column != 'award_id'): award_id = re.sub("[']", "''", award_id) sql = "\n with child_cte (award_id) as ({child_sql}),\n date_signed_outlay_amounts (award_id, last_period_total_outlay) as (\n SELECT faba. award_id, COALESCE(sum(COALESCE(faba.gross_outlay_amount_by_award_cpe,0)\n + COALESCE(faba.ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe, 0)\n + COALESCE(faba.ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe, 0)), 0) as last_period_total_outlay\n FROM\n financial_accounts_by_awards faba\n INNER JOIN submission_attributes sa\n ON faba.submission_id = sa.submission_id\n INNER JOIN vw_awards a\n ON faba.award_id = a.id\n AND a.date_signed >= '2019-10-01'\n INNER JOIN child_cte a2 ON faba.award_id = a2.award_id\n INNER JOIN vw_transaction_normalized tn ON tn.id = a.earliest_transaction_id\n WHERE sa.is_final_balances_for_fy AND sa.reporting_fiscal_year = tn.fiscal_year\n GROUP BY faba.award_id\n )\n SELECT sum(CASE WHEN sa.is_final_balances_for_fy = TRUE THEN (COALESCE(faba.gross_outlay_amount_by_award_cpe,0)\n + COALESCE(faba.ussgl487200_down_adj_pri_ppaid_undel_orders_oblig_refund_cpe, 0)\n + COALESCE(faba.ussgl497200_down_adj_pri_paid_deliv_orders_oblig_refund_cpe, 0)) END) AS total_outlay\n FROM\n financial_accounts_by_awards faba\n INNER JOIN submission_attributes sa\n ON faba.submission_id = sa.submission_id\n INNER JOIN date_signed_outlay_amounts o ON faba.award_id = o.award_id AND o.last_period_total_outlay != 0;\n " child_results = execute_sql_to_ordered_dictionary(sql.format(child_sql=child_award_sql.format(award_id=award_id, award_id_column=award_id_column))) grandchild_results = execute_sql_to_ordered_dictionary(sql.format(child_sql=grandchild_award_sql.format(award_id=award_id, award_id_column=award_id_column))) if (len(child_results) == 0): child_results = None else: child_results = child_results[0]['total_outlay'] if (len(grandchild_results) == 0): grandchild_results = None else: grandchild_results = grandchild_results[0]['total_outlay'] return {'child_award_total_outlay': child_results, 'grandchild_award_total_outlay': grandchild_results}
class LabelledForm(object): __slots__ = ['terms'] def __init__(self, *terms: Sequence[Term]): if ((len(terms) == 1) and isinstance(terms[0], LabelledForm)): self.terms = terms[0].terms else: if any([(type(term) is not Term) for term in list(terms)]): raise TypeError('Can only pass terms or a LabelledForm to LabelledForm') self.terms = list(terms) def __add__(self, other: Union[(ufl.Form, Term, 'LabelledForm')]) -> 'LabelledForm': if isinstance(other, ufl.Form): return LabelledForm(*self, Term(other)) elif (type(other) is Term): return LabelledForm(*self, other) elif (type(other) is LabelledForm): return LabelledForm(*self, *other) elif (other is None): return self else: return NotImplemented __radd__ = __add__ def __sub__(self, other: Union[(ufl.Form, Term, 'LabelledForm')]) -> 'LabelledForm': if (type(other) is Term): return LabelledForm(*self, (Constant((- 1.0)) * other)) elif (type(other) is LabelledForm): return LabelledForm(*self, *[(Constant((- 1.0)) * t) for t in other]) elif (other is None): return self else: return LabelledForm(*self, Term((Constant((- 1.0)) * other))) def __mul__(self, other: Union[(float, Constant, ufl.algebra.Product)]) -> 'LabelledForm': return self.label_map(all_terms, (lambda t: Term((other * t.form), t.labels))) def __truediv__(self, other: Union[(float, Constant, ufl.algebra.Product)]) -> 'LabelledForm': return (self * (Constant(1.0) / other)) __rmul__ = __mul__ def __iter__(self) -> Sequence: return iter(self.terms) def __len__(self) -> int: return len(self.terms) def label_map(self, term_filter: Callable[([Term], bool)], map_if_true: Callable[([Term], Optional[Term])]=identity, map_if_false: Callable[([Term], Optional[Term])]=identity) -> 'LabelledForm': new_labelled_form = LabelledForm(functools.reduce(operator.add, filter((lambda t: (t is not None)), ((map_if_true(t) if term_filter(t) else map_if_false(t)) for t in self.terms)), NullTerm)) new_labelled_form.terms = list(filter((lambda t: (t is not NullTerm)), new_labelled_form.terms)) return new_labelled_form def form(self) -> ufl.Form: if (len(self.terms) == 0): raise TypeError('The labelled form cannot return a form as it has no terms') else: return functools.reduce(operator.add, (t.form for t in self.terms))
class ACCESS_MASK(object): GENERIC_READ = GENERIC_WRITE = GENERIC_EXECUTE = GENERIC_ALL = MAXIMUM_ALLOWED = ACCESS_SYSTEM_SECURITY = SYNCHRONIZE = 1048576 WRITE_OWNER = 524288 WRITE_DACL = 262144 READ_CONTROL = 131072 DELETE = 65536 def __init__(self, mask): self.mask = mask def has_priv(self, priv): return ((self.mask & priv) == priv) def set_priv(self, priv): self.mask |= priv def remove_priv(self, priv): self.mask ^= priv
class EmailDomain(UUIDModel, CreatedUpdatedAt, WorkspaceBase): __tablename__ = 'email_domains' email_provider: Mapped[str] = mapped_column(String(length=255), nullable=False) domain_id: Mapped[str] = mapped_column(String(length=255), nullable=False) domain: Mapped[str] = mapped_column(String(length=255), nullable=False, unique=True) _records: Mapped[list[dict[(str, Any)]]] = mapped_column('records', JSON, nullable=False) _property def records(self) -> list[EmailDomainDNSRecord]: return [EmailDomainDNSRecord(**record) for record in self._records] def records(self, records: list[dict[(str, Any)]]): self._records = records def is_verified(self) -> bool: for record in self.records: if (not record.verified): return False return True
class UserQuery(gh.ObjectType): user = gh.Field(User, id=gh.Int(required=True, description='ID'), description='') def resolve_user(root, info, id): ctx = info.context require_perm(ctx, 'player.view_user') return models.User.objects.filter(id=id).first() login = gh.Field(Login, description='') def resolve_login(root, info): return Login()
class OptionSeriesFunnelSonificationTracksPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
class OptionSeriesNetworkgraphDataDatalabels(Options): def align(self): return self._config_get('center') def align(self, text: str): self._config(text, js_type=False) def allowOverlap(self): return self._config_get(False) def allowOverlap(self, flag: bool): self._config(flag, js_type=False) def animation(self) -> 'OptionSeriesNetworkgraphDataDatalabelsAnimation': return self._config_sub_data('animation', OptionSeriesNetworkgraphDataDatalabelsAnimation) def backgroundColor(self): return self._config_get(None) def backgroundColor(self, text: str): self._config(text, js_type=False) def borderColor(self): return self._config_get(None) def borderColor(self, text: str): self._config(text, js_type=False) def borderRadius(self): return self._config_get(0) def borderRadius(self, num: float): self._config(num, js_type=False) def borderWidth(self): return self._config_get(0) def borderWidth(self, num: float): self._config(num, js_type=False) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def crop(self): return self._config_get(True) def crop(self, flag: bool): self._config(flag, js_type=False) def defer(self): return self._config_get(True) def defer(self, flag: bool): self._config(flag, js_type=False) def enabled(self): return self._config_get(False) def enabled(self, flag: bool): self._config(flag, js_type=False) def filter(self) -> 'OptionSeriesNetworkgraphDataDatalabelsFilter': return self._config_sub_data('filter', OptionSeriesNetworkgraphDataDatalabelsFilter) def format(self): return self._config_get('point.value') def format(self, text: str): self._config(text, js_type=False) def formatter(self): return self._config_get(None) def formatter(self, value: Any): self._config(value, js_type=False) def inside(self): return self._config_get(None) def inside(self, flag: bool): self._config(flag, js_type=False) def nullFormat(self): return self._config_get(None) def nullFormat(self, flag: bool): self._config(flag, js_type=False) def nullFormatter(self): return self._config_get(None) def nullFormatter(self, value: Any): self._config(value, js_type=False) def overflow(self): return self._config_get('justify') def overflow(self, text: str): self._config(text, js_type=False) def padding(self): return self._config_get(5) def padding(self, num: float): self._config(num, js_type=False) def position(self): return self._config_get('center') def position(self, text: str): self._config(text, js_type=False) def rotation(self): return self._config_get(0) def rotation(self, num: float): self._config(num, js_type=False) def shadow(self): return self._config_get(False) def shadow(self, flag: bool): self._config(flag, js_type=False) def shape(self): return self._config_get('square') def shape(self, text: str): self._config(text, js_type=False) def style(self): return self._config_get(None) def style(self, value: Any): self._config(value, js_type=False) def textPath(self) -> 'OptionSeriesNetworkgraphDataDatalabelsTextpath': return self._config_sub_data('textPath', OptionSeriesNetworkgraphDataDatalabelsTextpath) def useHTML(self): return self._config_get(False) def useHTML(self, flag: bool): self._config(flag, js_type=False) def verticalAlign(self): return self._config_get('bottom') def verticalAlign(self, text: str): self._config(text, js_type=False) def x(self): return self._config_get(0) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(0) def y(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(6) def zIndex(self, num: float): self._config(num, js_type=False)
class RevSharePolicy(AbstractObject): def __init__(self, api=None): super(RevSharePolicy, self).__init__() self._isRevSharePolicy = True self._api = api class Field(AbstractObject.Field): policy_id = 'policy_id' policy_name = 'policy_name' _field_types = {'policy_id': 'string', 'policy_name': 'string'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
def return_child_observation_data_as_dict(child_observations, obs, obs_length): obs_list = [] has_result = False obs_approved = False for child in child_observations: if child.get('permitted_data_type'): obs_length += 1 if ((child.get('permitted_data_type') == 'Select') and child.get('options')): child['options_list'] = child.get('options').split('\n') if child.get('specimen'): child['received_time'] = frappe.get_value('Specimen', child.get('specimen'), 'received_time') observation_data = {'observation': child} obs_list.append(observation_data) if (child.get('result_data') or child.get('result_text') or (child.get('result_select') not in [None, '', 'Null'])): has_result = True if (child.get('status') == 'Approved'): obs_approved = True obs_dict = {'has_component': True, 'observation': obs.get('name'), obs.get('name'): obs_list, 'display_name': obs.get('observation_template'), 'practitioner_name': obs.get('practitioner_name'), 'healthcare_practitioner': obs.get('healthcare_practitioner'), 'description': obs.get('description'), 'has_result': has_result, 'obs_approved': obs_approved} return obs_dict
def set_spec_debug(debug_option): for file_name in (ONEDIR_SPEC, PORTABLE_SPEC, UPDATER_SPEC_FILE): with open(file_name, 'r+', encoding='utf-8') as _f: new_spec = _f.read().replace(f'debug={(not debug_option)}', f'debug={debug_option}') new_spec = new_spec.replace(f'console={(not debug_option)}', f'console={debug_option}') _f.seek(0) _f.write(new_spec) _f.truncate()
def test_c_token_liquidation(trace_classifier: TraceClassifier): block_number = transaction_hash = '0x012215bedd00147c58e1fb2abbfc13c260190dc9cfc490be3e343' liquidations = [Liquidation(liquidated_user='0xacdd5528c1c92bb5278efa06cdade4d8', liquidator_user='0xe0090ec6895c087a393f0e45f1f85098a6c33bef', debt_token_address='0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48', debt_purchase_amount=, received_amount=, received_token_address='0x70e36f6bf80a52b3b46b3af8e106cc0ed743e8e4', protocol=Protocol.compound_v2, transaction_hash=transaction_hash, trace_address=[1], block_number=block_number)] block = load_test_block(block_number) classified_traces = trace_classifier.classify(block.traces) result = get_liquidations(classified_traces) for liquidation in liquidations: assert (liquidation in result)
class RequiresDirective(docutils.parsers.rst.Directive): has_content = False required_arguments = 1 optional_arguments = 0 def run(self): requires = tuple((name.strip() for name in self.arguments[0].split(','))) node = RequiresNode('requires') node.document = self.state.document sphinx.util.nodes.set_source_info(self, node) msg = 'Requires {}.'.format(', '.join(requires)) node.append(docutils.nodes.paragraph('', docutils.nodes.Text(msg, msg), translatable=False)) return [node]
class OptionSeriesDumbbellSonificationDefaultspeechoptionsMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def status_output(draw, driver_name: str, jobid: int, status: JobStatus) -> str: if (driver_name == 'TORQUE'): job_status = draw(st.sampled_from(job_status_as_torque(status))) return dedent(f''' Job Id: {jobid}.s034-lcam Job_Name = jobname Job_Owner = owner queue = normal {job_status} ''') if (driver_name == 'LSF'): return f'''JOBID USER STAT QUEUE FROM_HOST EXEC_HOST JOB_NAME SUBMIT_TIME {jobid} pytest DONE normal host exec_host name {datetime.datetime.now()} ''' if (driver_name == 'SLURM'): job_status = draw(st.sampled_from(job_status_as_slurm(status))) return f'{jobid} {job_status}' if (driver_name == 'LOCAL'): return '' raise ValueError(f'Unknown driver_name {driver_name}')
class OptionPlotoptionsFunnelSonificationTracksMappingTime(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def extractFleurdelystranslationsTumblrCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('mhneb', 'Mulan Has No Elder Brother', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
.amg def test_amg_basic(): mat_A = load_matrix_step_noslip() petsc_options = initialize_velocity_block_petsc_options() L_sizes = mat_A.getSizes() index_sets = build_amg_index_sets(L_sizes) F_ksp = initialize_asm_ksp_obj(mat_A.createSubMatrix(index_sets[0], index_sets[0])) (b, x) = create_petsc_vecs(mat_A.createSubMatrix(index_sets[0], index_sets[0])) F_ksp.solve(b, x) assert (F_ksp.its == 51)
class Resource(testing.SimpleTestResource): (testing.capture_responder_args) (testing.set_resp_defaults) def on_put(self, req, resp, **kwargs): pass (testing.capture_responder_args) (testing.set_resp_defaults) def on_patch(self, req, resp, **kwargs): pass (testing.capture_responder_args) (testing.set_resp_defaults) def on_delete(self, req, resp, **kwargs): pass (testing.capture_responder_args) (testing.set_resp_defaults) def on_head(self, req, resp, **kwargs): pass (testing.capture_responder_args) (testing.set_resp_defaults) def on_options(self, req, resp, **kwargs): pass
def _get_neighbor_conf(neigh_ip_address): neigh_conf = CORE_MANAGER.neighbors_conf.get_neighbor_conf(neigh_ip_address) if (not neigh_conf): raise RuntimeConfigError(desc=('No Neighbor configuration with IP address %s' % neigh_ip_address)) assert isinstance(neigh_conf, NeighborConf) return neigh_conf
def write_obj(folder, fname, mesh, idx, save_material=True, feat=None, resolution=[256, 256]): obj_file = os.path.join(folder, (fname + '.obj')) print('Writing mesh: ', obj_file) with open(obj_file, 'w') as f: f.write(f'''mtllib {fname}.mtl ''') f.write('g default\n') v_pos = (mesh.v_pos[idx].detach().cpu().numpy() if (mesh.v_pos is not None) else None) v_nrm = (mesh.v_nrm[idx].detach().cpu().numpy() if (mesh.v_nrm is not None) else None) v_tex = (mesh.v_tex[idx].detach().cpu().numpy() if (mesh.v_tex is not None) else None) t_pos_idx = (mesh.t_pos_idx[0].detach().cpu().numpy() if (mesh.t_pos_idx is not None) else None) t_nrm_idx = (mesh.t_nrm_idx[0].detach().cpu().numpy() if (mesh.t_nrm_idx is not None) else None) t_tex_idx = (mesh.t_tex_idx[0].detach().cpu().numpy() if (mesh.t_tex_idx is not None) else None) print((' writing %d vertices' % len(v_pos))) for v in v_pos: f.write('v {} {} {} \n'.format(v[0], v[1], v[2])) if ((v_tex is not None) and save_material): print((' writing %d texcoords' % len(v_tex))) assert (len(t_pos_idx) == len(t_tex_idx)) for v in v_tex: f.write('vt {} {} \n'.format(v[0], (1.0 - v[1]))) if (v_nrm is not None): print((' writing %d normals' % len(v_nrm))) assert (len(t_pos_idx) == len(t_nrm_idx)) for v in v_nrm: f.write('vn {} {} {}\n'.format(v[0], v[1], v[2])) f.write('s 1 \n') f.write('g pMesh1\n') f.write('usemtl defaultMat\n') print((' writing %d faces' % len(t_pos_idx))) for i in range(len(t_pos_idx)): f.write('f ') for j in range(3): f.write((' %s/%s/%s' % (str((t_pos_idx[i][j] + 1)), ('' if (v_tex is None) else str((t_tex_idx[i][j] + 1))), ('' if (v_nrm is None) else str((t_nrm_idx[i][j] + 1)))))) f.write('\n') if (save_material and (mesh.material is not None)): mtl_file = os.path.join(folder, (fname + '.mtl')) print('Writing material: ', mtl_file) material.save_mtl(mtl_file, mesh.material, mesh=mesh.get_n(idx), feat=feat, resolution=resolution) print('Done exporting mesh')
_meta(definition.SinsackHatCard) class SinsackHatCard(): name = '' illustrator = '' cv = '' description = '2<style=Card.Name></style>,1-8,2,<style=Card.Name></style>' def is_action_valid(self, c, tl): if (not tl): return (False, '') t = tl[0] if (self.me is t): return (True, '?') return (True, ',......,') effect_string = suppress_launch_card_effect_string
def analyze(document: str, bracket_skip_len=None) -> Iterator[Iterator[List[Token]]]: tok = Tokenizer(replace_not_contraction=False) for (offset, paragraph) in preprocess_with_offsets(document): tokens = tok.tokenize(paragraph, offset) (yield segment(tokens, bracket_skip_len))
class Solution(object): def toGoatLatin(self, S): ls = S.split(' ') r = [] vowels = set('aeiou') for (i, w) in enumerate(ls): if (w[0].lower() in vowels): r.append(((w + 'ma') + ('a' * (i + 1)))) else: r.append((((w[1:] + w[0]) + 'ma') + ('a' * (i + 1)))) return ' '.join(r)
def extractTuzhitranslationsWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class SyncSharedSparseServer(SyncServer): def __init__(self, *, global_model: IFLModel, channel: Optional[SparseMaskChannel]=None, **kwargs): init_self_cfg(self, component_class=__class__, config_class=SyncSharedSparseServerConfig, **kwargs) super().__init__(global_model=global_model, channel=channel, **kwargs) if (not isinstance(self._channel, SparseMaskChannel)): raise TypeError('SyncSharedSparseServer expects channel of type SparseMaskChannel,', f' {type(self._channel)} given.') if (self._channel.sparsity_method != 'random'): raise TypeError('SyncSharedSparseServer expects channel sparsity method', f'of type random. {type(self._channel.sparsity_method)} given.') self._global_mask_params: Dict[(str, Tensor)] = {} def global_mask_params(self): return self._global_mask_params def update_mask_params(self, aggregated_model: nn.Module, sparsity_method: str): self._global_mask_params = self._channel.compute_mask(aggregated_model.state_dict(), sparsity_method) def receive_update_from_client(self, message: Message): message.sparsity_mask_params = self.global_mask_params super().receive_update_from_client(message)
.parametrize('value', [(- 1), 1, 2], ids=(lambda x: ('Scaling[%d]' % x))) .parametrize('typ', ['number', 'Constant', 'Function'], ids=(lambda x: ('Type=%s' % x))) .parametrize('degree', [0, 1, 2], ids=(lambda x: ('DG(%d)' % x))) def test_scalar_scaled_mass(m, value, typ, degree): if (typ == 'number'): c = value elif (typ == 'Constant'): c = Constant(value) elif (typ == 'Function'): V = FunctionSpace(m, 'DG', 0) c = Function(V) c.assign(value) V = FunctionSpace(m, 'DG', degree) u = TrialFunction(V) v = TestFunction(V) mass = assemble((inner(u, v) * dx)) scaled = assemble(((c * inner(u, v)) * dx)) assert np.allclose((mass.M.values * value), scaled.M.values) scaled_sum = assemble((((c * inner(u, v)) * dx) + (inner(u, v) * dx))) assert np.allclose((mass.M.values * (value + 1)), scaled_sum.M.values)
.parametrize('request_1__time_requested', [(datetime.utcnow() - timedelta(hours=6))]) .parametrize('request_1__duration', [5000]) .parametrize('request_1__status_code', [500]) .parametrize('request_2__time_requested', [(datetime.utcnow() - timedelta(days=1, hours=6))]) .parametrize('request_2__duration', [100]) .skipif((sys.version_info < (3,)), reason="For some reason, this doesn't work in python 2.7.") def test_make_report_post_not_significant(dashboard_user, endpoint, request_1, request_2, session): epoch = datetime(1970, 1, 1) response = dashboard_user.post('dashboard/api/reporting/make_report/intervals', json={'interval': {'from': ((datetime.utcnow() - timedelta(days=1)) - epoch).total_seconds(), 'to': (datetime.utcnow() - epoch).total_seconds()}, 'baseline_interval': {'from': ((datetime.utcnow() - timedelta(days=2)) - epoch).total_seconds(), 'to': ((datetime.utcnow() - timedelta(days=1)) - epoch).total_seconds()}}) assert (response.status_code == 200) assert (len(response.json['summaries']) == session.query(Endpoint).count()) [data] = [row for row in response.json['summaries'] if (row['endpoint_id'] == endpoint.id)] assert (data['endpoint_id'] == endpoint.id) assert (data['endpoint_name'] == endpoint.name) assert (not data['has_anything_significant']) (question1, question2) = data['answers'] assert (question1['type'] == 'MEDIAN_LATENCY') assert (question1['percentual_diff'] == 4900) assert (question1['median'] == request_1.duration) assert (question1['latencies_samples'] == {'baseline': [request_2.duration], 'comparison': [request_1.duration]}) assert (not question1['is_significant']) assert (question1['baseline_median'] == request_2.duration) assert (question2['type'] == 'STATUS_CODE_DISTRIBUTION') assert (not question2['is_significant']) assert (question2['percentages'] is None)
.parametrize('_selector', _selectors) .parametrize('_classifier, _regressor, _roc, _r2', _estimators_and_results) def test_fit_initial_model_performance(_selector, _classifier, _regressor, _roc, _r2, df_test): (X, y) = df_test sel = _selector(_classifier, threshold=(- 100)).fit(X, y) assert (np.round(sel.initial_model_performance_, 4) == _roc) sel = _selector(_regressor, scoring='r2').fit(X, y) assert (np.round(sel.initial_model_performance_, 4) == _r2)
class DevP2PHandshakeParams(NamedTuple): client_version_string: str listen_port: int version: int def get_base_protocol_class(self) -> Type[BaseP2PProtocol]: if (self.version == 5): return P2PProtocolV5 elif (self.version == 4): return P2PProtocolV4 else: raise Exception(f'Unknown protocol version: {self.version}. Expected one of `4` or `5`')
class TimePicker(Control): def __init__(self, ref: Optional[Ref]=None, expand: Optional[Union[(bool, int)]]=None, col: Optional[ResponsiveNumber]=None, opacity: OptionalNumber=None, tooltip: Optional[str]=None, visible: Optional[bool]=None, disabled: Optional[bool]=None, data: Any=None, open: bool=False, value: Optional[time]=None, time_picker_entry_mode: Optional[TimePickerEntryMode]=None, hour_label_text: Optional[str]=None, minute_label_text: Optional[str]=None, help_text: Optional[str]=None, cancel_text: Optional[str]=None, confirm_text: Optional[str]=None, error_invalid_text: Optional[str]=None, on_change=None, on_dismiss=None): Control.__init__(self, ref=ref, expand=expand, col=col, opacity=opacity, tooltip=tooltip, visible=visible, disabled=disabled, data=data) self.value = value self.help_text = help_text self.cancel_text = cancel_text self.confirm_text = confirm_text self.error_invalid_text = error_invalid_text self.hour_label_text = hour_label_text self.minute_label_text = minute_label_text self.time_picker_entry_mode = time_picker_entry_mode self.on_change = on_change self.on_dismiss = on_dismiss self.open = open def _get_control_name(self): return 'timepicker' def pick_time(self): self.open = True self.update() async def pick_time_async(self): self.open = True (await self.update_async()) def open(self) -> Optional[bool]: return self._get_attr('open', data_type='bool', def_value=False) def open(self, value: Optional[bool]): self._set_attr('open', value) def value(self) -> Optional[time]: value_string = self._get_attr('value', def_value=None) splitted = value_string.split(':') return (time(hour=int(splitted[0]), minute=int(splitted[1])) if value_string else None) def value(self, value: Optional[Union[(time, str)]]): if isinstance(value, time): value = value.strftime('%H:%M') self._set_attr('value', value) def hour_label_text(self) -> Optional[str]: return self._get_attr('hourLabelText', def_value=None) _label_text.setter def hour_label_text(self, value: Optional[str]): self._set_attr('hourLabelText', value) def minute_label_text(self) -> Optional[str]: return self._get_attr('minuteLabelText', def_value=None) _label_text.setter def minute_label_text(self, value: Optional[str]): self._set_attr('minuteLabelText', value) def help_text(self) -> Optional[str]: return self._get_attr('helpText', def_value=None) _text.setter def help_text(self, value: Optional[str]): self._set_attr('helpText', value) def cancel_text(self) -> Optional[str]: return self._get_attr('cancelText', def_value=None) _text.setter def cancel_text(self, value: Optional[str]): self._set_attr('cancelText', value) def confirm_text(self) -> Optional[str]: return self._get_attr('confirmText', def_value=None) _text.setter def confirm_text(self, value: Optional[str]): self._set_attr('confirmText', value) def error_invalid_text(self) -> Optional[str]: return self._get_attr('errorInvalidText', def_value=None) _invalid_text.setter def error_invalid_text(self, value: Optional[str]): self._set_attr('errorInvalidText', value) def time_picker_entry_mode(self) -> Optional[TimePickerEntryMode]: return self.__time_picker_entry_mode _picker_entry_mode.setter def time_picker_entry_mode(self, value: Optional[TimePickerEntryMode]): self.__time_picker_entry_mode = value self._set_attr('timePickerEntryMode', (value.value if (value is not None) else None)) def on_change(self): return self._get_event_handler('change') _change.setter def on_change(self, handler): self._add_event_handler('change', handler) def on_dismiss(self): return self._get_event_handler('dismiss') _dismiss.setter def on_dismiss(self, handler): self._add_event_handler('dismiss', handler)
class Discriminator(nn.Module): def __init__(self, img_shape: Tuple[(int, int, int)], hidden_dim: int=1024) -> None: super().__init__() in_dim = int(np.prod(img_shape)) self.fc1 = nn.Linear(in_dim, hidden_dim) self.fc2 = nn.Linear(self.fc1.out_features, (self.fc1.out_features // 2)) self.fc3 = nn.Linear(self.fc2.out_features, (self.fc2.out_features // 2)) self.fc4 = nn.Linear(self.fc3.out_features, 1) def forward(self, img: torch.Tensor) -> torch.Tensor: x = img.view(img.size(0), (- 1)) x = F.leaky_relu(self.fc1(x), 0.2) x = F.dropout(x, 0.3) x = F.leaky_relu(self.fc2(x), 0.2) x = F.dropout(x, 0.3) x = F.leaky_relu(self.fc3(x), 0.2) x = F.dropout(x, 0.3) return torch.sigmoid(self.fc4(x))
class ConditionCall(NamedTuple): method: mariana_trench.Method port: mariana_trench.Port position: mariana_trench.Position def from_call_info(call_info: mariana_trench.CallInfo) -> 'ConditionCall': if (call_info.method is None): raise sapp.ParseError(f'Cannot construct a ConditionCall without a valid method {call_info}') return ConditionCall(call_info.method, call_info.port, call_info.position) def from_origin(origin: mariana_trench.Origin, call_info: mariana_trench.CallInfo) -> 'ConditionCall': return ConditionCall(method=origin.callee_name, port=origin.callee_port, position=call_info.position)
def _apply_updates_dom0(): sdlog.info('Updating dom0') try: subprocess.check_call(['sudo', 'qubes-dom0-update', '-y']) except subprocess.CalledProcessError as e: sdlog.error('An error has occurred updating dom0. Please contact your administrator.') sdlog.error(str(e)) return UpdateStatus.UPDATES_FAILED sdlog.info('dom0 updates have been applied and a reboot is required.') return UpdateStatus.REBOOT_REQUIRED
class Inventory(object): def __init__(self, config): self.config = config self._create_lock = threading.Lock() init_storage(self.config.get_engine()) def create(self, background, model_name): with self._create_lock: queue = Queue() if background: progresser = FirstMessageQueueProgresser(queue) else: progresser = QueueProgresser(queue) def do_inventory(): with self.config.scoped_session() as session: try: result = run_inventory(self.config, queue, session, progresser, background) if (not result): LOGGER.error('Error during inventory run.') queue.put(None) return None if model_name: run_import(self.config.client(), model_name, result.inventory_index_id, background) return result.get_summary() except Exception as e: LOGGER.exception(e) queue.put(e) queue.put(None) if background: self.config.run_in_background(do_inventory) (yield queue.get()) else: result = self.config.run_in_background(do_inventory) for progress in iter(queue.get, None): if isinstance(progress, Exception): raise progress (yield progress) if result: (yield result.get()) def list(self): with self.config.scoped_session() as session: for result in DataAccess.list(session): (yield result) def get(self, inventory_id): with self.config.scoped_session() as session: result = DataAccess.get(session, inventory_id) return result def delete(self, inventory_id): with self.config.scoped_session() as session: result = DataAccess.delete(session, inventory_id) return result def purge(self, retention_days): LOGGER.info('retention_days is: %s', retention_days) if (not retention_days): LOGGER.info('retention_days is not specified. Will use configuration default.') retention_days = self.config.inventory_config.retention_days retention_days = int(retention_days) if (retention_days < 0): result_message = 'Purge is disabled. Nothing will be purged.' LOGGER.info(result_message) return result_message utc_now = date_time.get_utc_now_datetime() cutoff_datetime = (utc_now - datetime.timedelta(days=retention_days)) LOGGER.info('Cut-off datetime to start purging is: %s', cutoff_datetime) with self.config.scoped_session() as session: inventory_indexes_to_purge = DataAccess.get_inventory_indexes_older_than_cutoff(session, cutoff_datetime) if (not inventory_indexes_to_purge): result_message = 'No inventory to be purged.' LOGGER.info(result_message) return result_message purged_inventory_indexes = [] for inventory_index in inventory_indexes_to_purge: _ = self.delete(inventory_index.id) purged_inventory_indexes.append(str(inventory_index.id)) purged_inventory_indexes_as_str = ', '.join(purged_inventory_indexes) result_message = 'Inventory data from these inventory indexes have been purged: {}'.format(purged_inventory_indexes_as_str) LOGGER.info(result_message) return result_message
def __getattr__(name): not_found = object() result = not_found if (name in _relative_imports): from importlib import import_module source = _relative_imports[name] module = import_module(f'pyface.{source}') result = getattr(module, name) elif (name in _toolkit_imports): from pyface.toolkit import toolkit_object source = _toolkit_imports[name] result = toolkit_object(f'{source}:{name}') elif (name in _optional_imports): from importlib import import_module import logging from pyface.util._optional_dependencies import optional_import (dependency, source) = _optional_imports[name] with optional_import(dependency, msg=f'{name} is not available due to missing {dependency}.', logger=logging.getLogger(__name__)): module = import_module(f'pyface.{source}') result = getattr(module, name) if (result is not_found): raise AttributeError(f'module {__name__!r} has no attribute {name!r}') globals()[name] = result return result
def test_repcode_invalid_in_objects_no_value(tmpdir, merge_files_oneLR, assert_log, assert_message_count): path = os.path.join(str(tmpdir), 'invalid-repcode-object-no-value.dlis') content = ['data/chap3/start.dlis.part', 'data/chap3/template/global-default.dlis.part', 'data/chap3/object/object.dlis.part', 'data/chap3/objattr/reprcode-invalid-no-value.dlis.part', 'data/chap3/object/object2.dlis.part', 'data/chap3/objattr/all-set.dlis.part'] merge_files_oneLR(path, content) with dlis.load(path) as (f, *_): obj = f.object('VERY_MUCH_TESTY_SET', 'OBJECT', 1, 1) with pytest.raises(RuntimeError) as excinfo: _ = obj['GLOBAL_DEFAULT_ATTRIBUTE'] assert ('invalid representation code' in str(excinfo.value)) assert_log('value is not explicitly set') obj = f.object('VERY_MUCH_TESTY_SET', 'OBJECT2', 1, 1) attr = obj.attic['GLOBAL_DEFAULT_ATTRIBUTE'] assert (attr.value == [1, 2, 3, 4]) assert_message_count('value is not explicitly set', 1) assert_message_count('One or more attributes', 1)
def extractCiaranHillock(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if ('Manga' in item['tags']): return None if ('WATTT' in item['tags']): return buildReleaseMessageWithType(item, 'WATTT', vol, chp, frag=frag, postfix=postfix) return False
def test_even_non_custom_analyzers_can_have_params(): a1 = analysis.analyzer('whitespace', type='pattern', pattern='\\\\s+') m = mapping.Mapping() m.field('title', 'text', analyzer=a1) assert ({'analyzer': {'whitespace': {'type': 'pattern', 'pattern': '\\\\s+'}}} == m._collect_analysis())
def mock_access_to_su_command_is_restricted_pass(self, cmd): returncode = 0 stderr = [''] if ('/etc/pam.d/su' in cmd): stdout = ['auth required pam_wheel.so use_uid group=<group_name>'] elif ('/etc/group' in cmd): stdout = ['pytest:x:1000:'] return SimpleNamespace(returncode=returncode, stderr=stderr, stdout=stdout)
def _handle_optimizer(trainer): if ('optimizer' not in trainer): return trainer['client'] = (trainer['client'] if ('client' in trainer) else {}) client = trainer['client'] client['optimizer'] = trainer['optimizer'] del trainer['optimizer'] optimizer = client['optimizer'] if ('type' not in optimizer): pass elif ('sgd' == optimizer['type'].lower()): optimizer['_base_'] = 'base_optimizer_sgd' elif ('fedprox' == optimizer['type'].lower()): optimizer['_base_'] = 'base_optimizer_fedprox' optimizer.pop('type', None)
.compilertest def test_irratelimit_grpcsvc_version_v3(): yaml = '\n---\napiVersion: getambassador.io/v3alpha1\nkind: RateLimitService\nmetadata:\n name: myrls\n namespace: default\nspec:\n service: {}\n protocol_version: "v3"\n'.format(SERVICE_NAME) econf = _get_envoy_config(yaml) conf = _get_rl_config(econf.as_dict()) assert conf assert (conf.get('typed_config') == _get_ratelimit_default_conf()) assert ('ir.ratelimit' not in econf.ir.aconf.errors)
def generate_rules(): rules = ('# Generated by rivalcfg v%s\n' % VERSION) rules += '# Do not edit this file. It can be regenerated with the following command:\n' rules += '# \n' rules += '# rivalcfg --update-udev\n\n' for profile in PROFILES.values(): rules += ('# %s\n' % profile['name']) rules += ('SUBSYSTEM=="hidraw", ATTRS{idVendor}=="%04x", ATTRS{idProduct}=="%04x", MODE="0666"\n' % (profile['vendor_id'], profile['product_id'])) rules += ('SUBSYSTEM=="usb", ATTRS{idVendor}=="%04x", ATTRS{idProduct}=="%04x", MODE="0666"\n\n' % (profile['vendor_id'], profile['product_id'])) return rules
class OptionPlotoptionsPictorialSonificationTracksMappingTremoloSpeed(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class TestTypedObjectManager(BaseEvenniaTest): def _manager(self, methodname, *args, **kwargs): return list(getattr(self.obj1.__class__.objects, methodname)(*args, **kwargs)) def test_get_by_tag_no_category(self): self.obj1.tags.add('tag1') self.obj1.tags.add('tag2') self.obj1.tags.add('tag2c') self.obj2.tags.add('tag2') self.obj2.tags.add('tag2a') self.obj2.tags.add('tag2b') self.obj2.tags.add('tag3 with spaces') self.obj2.tags.add('tag4') self.obj2.tags.add('tag2c') self.assertEqual(self._manager('get_by_tag', 'tag1'), [self.obj1]) self.assertEqual(set(self._manager('get_by_tag', 'tag2')), set([self.obj1, self.obj2])) self.assertEqual(self._manager('get_by_tag', 'tag2a'), [self.obj2]) self.assertEqual(self._manager('get_by_tag', 'tag3 with spaces'), [self.obj2]) self.assertEqual(self._manager('get_by_tag', ['tag2a', 'tag2b']), [self.obj2]) self.assertEqual(self._manager('get_by_tag', ['tag2a', 'tag1']), []) self.assertEqual(self._manager('get_by_tag', ['tag2a', 'tag4', 'tag2c']), [self.obj2]) def test_get_by_tag_and_category(self): self.obj1.tags.add('tag5', 'category1') self.obj1.tags.add('tag6') self.obj1.tags.add('tag7', 'category1') self.obj1.tags.add('tag6', 'category3') self.obj1.tags.add('tag7', 'category4') self.obj2.tags.add('tag5', 'category1') self.obj2.tags.add('tag5', 'category2') self.obj2.tags.add('tag6', 'category3') self.obj2.tags.add('tag7', 'category1') self.obj2.tags.add('tag7', 'category5') self.obj1.tags.add('tag8', 'category6') self.obj2.tags.add('tag9', 'category6') self.assertEqual(self._manager('get_by_tag', 'tag5', 'category1'), [self.obj1, self.obj2]) self.assertEqual(self._manager('get_by_tag', 'tag6', 'category1'), []) self.assertEqual(self._manager('get_by_tag', 'tag6', 'category3'), [self.obj1, self.obj2]) self.assertEqual(self._manager('get_by_tag', ['tag5', 'tag6'], ['category1', 'category3']), [self.obj1, self.obj2]) self.assertEqual(self._manager('get_by_tag', ['tag5', 'tag7'], 'category1'), [self.obj1, self.obj2]) self.assertEqual(self._manager('get_by_tag', category='category1'), [self.obj1, self.obj2]) self.assertEqual(self._manager('get_by_tag', category='category2'), [self.obj2]) self.assertEqual(self._manager('get_by_tag', category=['category1', 'category3']), [self.obj1, self.obj2]) self.assertEqual(self._manager('get_by_tag', category=['category1', 'category2']), [self.obj1, self.obj2]) self.assertEqual(self._manager('get_by_tag', category=['category5', 'category4']), []) self.assertEqual(self._manager('get_by_tag', category='category1'), [self.obj1, self.obj2]) self.assertEqual(self._manager('get_by_tag', category='category6'), [self.obj1, self.obj2]) def test_get_tag_with_all(self): self.obj1.tags.add('tagA', 'categoryA') self.assertEqual(self._manager('get_by_tag', ['tagA', 'tagB'], ['categoryA', 'categoryB'], match='all'), []) def test_get_tag_with_any(self): self.obj1.tags.add('tagA', 'categoryA') self.assertEqual(self._manager('get_by_tag', ['tagA', 'tagB'], ['categoryA', 'categoryB'], match='any'), [self.obj1]) def test_get_tag_with_any_including_nones(self): self.obj1.tags.add('tagA', 'categoryA') self.assertEqual(self._manager('get_by_tag', ['tagA', 'tagB'], ['categoryA', 'categoryB', None], match='any'), [self.obj1]) def test_get_tag_withnomatch(self): self.obj1.tags.add('tagC', 'categoryC') self.assertEqual(self._manager('get_by_tag', ['tagA', 'tagB'], ['categoryA', 'categoryB'], match='any'), []) def test_batch_add(self): tags = ['tag1', ('tag2', 'category2'), 'tag3', ('tag4', 'category4', 'data4')] self.obj1.tags.batch_add(*tags) self.assertEqual(self.obj1.tags.get('tag1'), 'tag1') tagobj = self.obj1.tags.get('tag4', category='category4', return_tagobj=True) self.assertEqual(tagobj.db_key, 'tag4') self.assertEqual(tagobj.db_category, 'category4') self.assertEqual(tagobj.db_data, 'data4')
(_generate_image_name_from_tag_mock_name) ('src.lib.docker_management.docker_image_builder.build_missing_envoy_docker_image') def test_build_envoy_image_from_source(mock_build_missing_image, mock_generate_image_from_tag): mock_build_missing_image.return_value = None mock_generate_image_from_tag.return_value = _DEFAULT_ENVOY_IMAGE_TAG manager = generate_image_manager_with_source_url() image_tag = image_builder.build_envoy_image_from_source(manager, 'envoy_tag') assert (image_tag == _DEFAULT_ENVOY_IMAGE_TAG) mock_generate_image_from_tag.assert_called_once() mock_build_missing_image.assert_called_once_with(manager, 'envoy_tag')
def download_background_video(background_config: Tuple[(str, str, str, Any)]): Path('./assets/backgrounds/video/').mkdir(parents=True, exist_ok=True) (uri, filename, credit, _) = background_config if Path(f'assets/backgrounds/video/{credit}-{filename}').is_file(): return print_step("We need to download the backgrounds videos. they are fairly large but it's only done once. ") print_substep('Downloading the backgrounds videos... please be patient ') print_substep(f'Downloading {filename} from {uri}') ydl_opts = {'format': 'bestvideo[height<=1080][ext=mp4]', 'outtmpl': f'assets/backgrounds/video/{credit}-{filename}', 'retries': 10} with yt_dlp.YoutubeDL(ydl_opts) as ydl: ydl.download(uri) print_substep('Background video downloaded successfully! ', style='bold green')
class OptionPlotoptionsTreemapAccessibility(Options): def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def descriptionFormat(self): return self._config_get(None) def descriptionFormat(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(None) def enabled(self, flag: bool): self._config(flag, js_type=False) def exposeAsGroupOnly(self): return self._config_get(None) def exposeAsGroupOnly(self, flag: bool): self._config(flag, js_type=False) def keyboardNavigation(self) -> 'OptionPlotoptionsTreemapAccessibilityKeyboardnavigation': return self._config_sub_data('keyboardNavigation', OptionPlotoptionsTreemapAccessibilityKeyboardnavigation) def point(self) -> 'OptionPlotoptionsTreemapAccessibilityPoint': return self._config_sub_data('point', OptionPlotoptionsTreemapAccessibilityPoint)
def mailchimp_transactional_dataset_config(db: Session, mailchimp_transactional_connection_config: ConnectionConfig, mailchimp_transactional_dataset: Dict[(str, Any)]) -> Generator: fides_key = mailchimp_transactional_dataset['fides_key'] mailchimp_transactional_connection_config.name = fides_key mailchimp_transactional_connection_config.key = fides_key mailchimp_transactional_connection_config.save(db=db) ctl_dataset = CtlDataset.create_from_dataset_dict(db, mailchimp_transactional_dataset) dataset = DatasetConfig.create(db=db, data={'connection_config_id': mailchimp_transactional_connection_config.id, 'fides_key': fides_key, 'ctl_dataset_id': ctl_dataset.id}) (yield dataset) dataset.delete(db=db) ctl_dataset.delete(db=db)
def test_as_tuple_incorrect(): with pytest.raises(TypeError, match='Tensor, str'): AsTuple(torch.full((2, 2), 42), [torch.ones(3, 3), 'Glove80'], None, [AsDict(torch.full((5, 5), 80), torch.full((6, 6), (- 1)))]) with pytest.raises(TypeError, match='unsupported.*int'): InvalidAsTuple(42)
def test_value_mape_test_render_json() -> None: test_dataset = pd.DataFrame({'category_feature': ['n', 'd', 'p', 'n'], 'numerical_feature': [0, 1, 2, 5], 'target': [0, 0, 0, 1], 'prediction': [0, 0, 0, 0]}) suite = TestSuite(tests=[TestValueMAPE()]) suite.run(current_data=test_dataset, reference_data=test_dataset, column_mapping=ColumnMapping()) result_json = suite.json() assert isinstance(result_json, str) result = json.loads(result_json)['tests'][0] assert (result == {'description': 'The MAPE is 25.0. The test threshold is eq=25 2.5.', 'group': 'regression', 'name': 'Mean Absolute Percentage Error (MAPE)', 'parameters': {'condition': {'eq': {'absolute': 1e-12, 'relative': 0.1, 'value': 25.0}}, 'value': 25.0}, 'status': 'SUCCESS'})
class ImportSortingTransformer(cst.CSTTransformer): def __init__(self, config: Config, module: cst.Module, sorter: ImportSorter) -> None: self.config = config self.module = module self.sorter = sorter self.statement_map: Dict[(cst.CSTNode, cst.SimpleStatementLine)] = {} self.default_indent: str = module.default_indent self.indent: str = '' def get_original_node(self, node: cst.CSTNode) -> cst.CSTNode: return self.statement_map.get(node, node) def on_visit(self, node: cst.CSTNode) -> bool: if isinstance(node, (cst.BaseExpression, cst.BaseSmallStatement)): return False return super().on_visit(node) def leave_SimpleStatementLine(self, original_node: cst.SimpleStatementLine, updated_node: cst.SimpleStatementLine) -> cst.BaseStatement: self.statement_map[updated_node] = original_node return updated_node def leave_Module(self, original_node: cst.Module, updated_node: cst.Module) -> cst.Module: sorted_body = self.sorter.find_and_sort_blocks(updated_node.body, module=self.module, indent='') return updated_node.with_changes(body=sorted_body) def visit_IndentedBlock(self, node: cst.IndentedBlock) -> Optional[bool]: node_indent = node.indent self.indent += (self.default_indent if (node_indent is None) else node_indent) return True def leave_IndentedBlock(self, original_node: cst.IndentedBlock, updated_node: cst.IndentedBlock) -> cst.BaseSuite: node_indent = original_node.indent if (node_indent is None): node_indent = self.default_indent sorted_body = self.sorter.find_and_sort_blocks(updated_node.body, module=self.module, indent=self.indent) self.indent = self.indent[:(- len(node_indent))] return updated_node.with_changes(body=sorted_body)
class PerformanceReport(): def __init__(self, exec_reports: List[ExecReport]): self.exec_reports = exec_reports def arguments(self) -> Tuple[(Any, ...)]: return self.exec_reports[(- 1)].args def report_time(self) -> datetime: return self.exec_reports[(- 1)].report_created def number_of_runs(self) -> int: return len(self.exec_reports) def number_of_terminates(self) -> int: return sum((i.is_killed for i in self.exec_reports), 0) def resources(self) -> List[ResourceRecord]: resources: List[ResourceRecord] = [] resources.append(self._make_resource('Time passed', 'seconds', 'time_passed', None)) for (name, unit) in [('cpu', '%'), ('mem', 'kb')]: for func in [min, max, mean]: resources.append(self._make_resource(f'{name} {func.__name__}', unit, name, cast(Callable, func))) return resources def _make_resource(self, name: str, unit: str, attr_name: str, aggr_function: Optional[Callable]) -> ResourceRecord: return ResourceRecord(name, unit, *self._count_resource(attr_name, aggr_function)) def _count_resource(self, attr_name: str, aggr_function: Optional[Callable]=None) -> Tuple[(float, float)]: if (not aggr_function): aggr_function = (lambda x: x) values = [aggr_function(getattr(i, attr_name)) for i in self.exec_reports] mean_value = mean(values) std_dev = (stdev(values) if (len(values) > 1) else 0) return (mean_value, std_dev)
class Container(_common.FlyteIdlEntity): def __init__(self, image, command, args, resources, env, config, data_loading_config=None): self._data_loading_config = data_loading_config self._image = image self._command = command self._args = args self._resources = resources self._env = env self._config = config def image(self): return self._image def command(self): return self._command def args(self): return self._args def resources(self): return self._resources def env(self): return self._env def add_env(self, key: str, val: str): self._env[key] = val def config(self): return self._config def data_loading_config(self): return self._data_loading_config def to_flyte_idl(self): return _core_task.Container(image=self.image, command=self.command, args=self.args, resources=self.resources.to_flyte_idl(), env=[_literals_pb2.KeyValuePair(key=k, value=v) for (k, v) in self.env.items()], config=[_literals_pb2.KeyValuePair(key=k, value=v) for (k, v) in self.config.items()], data_config=(self._data_loading_config.to_flyte_idl() if self._data_loading_config else None)) def from_flyte_idl(cls, pb2_object): return cls(image=pb2_object.image, command=pb2_object.command, args=pb2_object.args, resources=Resources.from_flyte_idl(pb2_object.resources), env={kv.key: kv.value for kv in pb2_object.env}, config={kv.key: kv.value for kv in pb2_object.config}, data_loading_config=(DataLoadingConfig.from_flyte_idl(pb2_object.data_config) if pb2_object.HasField('data_config') else None))
def is_date(value): try: datetime.strptime(value, '%Y') except ValueError: try: datetime.strptime(value, '%Y-%m') except ValueError: try: datetime.strptime(value, '%Y-%m-%d') except ValueError as exc: msg = f'Invalid date {value}.' raise ValueError(msg) from exc return True
def test_create_station_store_pax(db, client, jwt, user): user.is_super_admin = True event = EventFactoryBasic() microlocation = MicrolocationSubFactory(event=event) station = StationFactory(event=event, microlocation=microlocation, station_type='registration') session = SessionSubFactory(event=event, microlocation=microlocation) db.session.commit() data = json.dumps({'data': {'type': 'station-store-pax', 'attributes': {'current_pax': 10}, 'relationships': {'station': {'data': {'id': str(station.id), 'type': 'station'}}, 'session': {'data': {'id': str(session.id), 'type': 'session'}}}}}) response = client.post('/v1/station-store-paxs', data=data, content_type='application/vnd.api+json', headers=jwt) assert (response.status_code == 201) data = response.json['data'] assert (data['type'] == 'station-store-pax') attributes = data['attributes'] assert (attributes['current-pax'] == 10) assert (attributes['created-at'] is not None) assert (attributes['modified-at'] is not None)
def patient_leave_service_unit(inpatient_record, check_out, leave_from): if inpatient_record.inpatient_occupancies: for inpatient_occupancy in inpatient_record.inpatient_occupancies: if ((inpatient_occupancy.left != 1) and (inpatient_occupancy.service_unit == leave_from)): inpatient_occupancy.left = True inpatient_occupancy.check_out = check_out frappe.db.set_value('Healthcare Service Unit', inpatient_occupancy.service_unit, 'occupancy_status', 'Vacant') inpatient_record.save(ignore_permissions=True)
def to_lms(S, h): a = (S * np.cos(h)) b = (S * np.sin(h)) l_ = ((OKLAB_TO_LMS3[0][0] + (OKLAB_TO_LMS3[0][1] * a)) + (OKLAB_TO_LMS3[0][2] * b)) m_ = ((OKLAB_TO_LMS3[1][0] + (OKLAB_TO_LMS3[1][1] * a)) + (OKLAB_TO_LMS3[1][2] * b)) s_ = ((OKLAB_TO_LMS3[2][0] + (OKLAB_TO_LMS3[2][1] * a)) + (OKLAB_TO_LMS3[2][2] * b)) l = ((l_ * l_) * l_) m = ((m_ * m_) * m_) s = ((s_ * s_) * s_) return (l, m, s)
class MyLinkedList(LinkedList): def find_loop_start(self): if ((self.head is None) or (self.head.next is None)): return None slow = self.head fast = self.head while (fast.next is not None): slow = slow.next fast = fast.next.next if (fast is None): return None if (slow == fast): break slow = self.head while (slow != fast): slow = slow.next fast = fast.next if (fast is None): return None return slow
def extract87Percent(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if ('Return of the former hero' in item['tags']): return buildReleaseMessageWithType(item, 'Return of the Former Hero', vol, chp, frag=frag, postfix=postfix) if ('Dragon egg' in item['tags']): return buildReleaseMessageWithType(item, 'Reincarnated as a dragons egg ~Lets aim to be the strongest~', vol, chp, frag=frag, postfix=postfix) if ('Summoning at random' in item['tags']): return buildReleaseMessageWithType(item, 'Summoning at Random', vol, chp, frag=frag, postfix=postfix) if ('Legend' in item['tags']): return buildReleaseMessageWithType(item, '', vol, chp, frag=frag, postfix=postfix) if ('Death game' in item['tags']): return buildReleaseMessageWithType(item, 'The world is fun as it has become a death game', vol, chp, frag=frag, postfix=postfix) if ('Elf Tensei' in item['tags']): return buildReleaseMessageWithType(item, 'Elf Tensei Kara no Cheat Kenkoku-ki', vol, chp, frag=frag, postfix=postfix) return False
class Remote(Calculator): def __init__(self, remote_calc, host, prefix='', **kwargs): super().__init__(**kwargs) self.remote_calc = remote_calc self.host = host self.prefix = prefix self.calc_inp = yaml.dump(self.remote_calc) self.tar_fn = 'pysis.tar.gz' self.yaml_fn = 'inp.yaml' self.run_dict = {'geom': {'fn': None}, 'calc': {'run_func': None}} self.run_dict['calc'].update(remote_calc) def run_calculation(self, atoms, coords, run_func='get_energy'): con = Connection(self.host) res = con.run('mktemp -d', hide=True) tmp_dir = res.stdout.strip() xyz_str = self.prepare_xyz_string(atoms, coords) run_dict = self.run_dict.copy() run_dict['geom']['fn'] = xyz_str run_dict['calc']['run_func'] = run_func with open(self.yaml_fn, 'w') as handle: yaml.dump(run_dict, handle) with con.cd(tmp_dir): tmp_parent = Path(tmp_dir).parent tar_target = (tmp_parent / self.tar_fn) con.put(self.yaml_fn, tmp_dir) with con.prefix(self.prefix): con.run(f'pysis {self.yaml_fn}', hide=True) con.run(f'tar -czf {tar_target} .') con.get(tar_target, self.tar_fn) con.run(f'rm -r {tmp_dir}') return self.parse_results() def parse_results(self): with tarfile.open(self.tar_fn, 'r:gz') as tfile: as_json = tfile.extractfile('./calculator_000.000.results').read() results = json_to_results(as_json) return results def get_energy(self, atoms, coords, **prepare_kwargs): return self.run_calculation(atoms, coords, run_func='get_energy') def get_forces(self, atoms, coords, **prepare_kwargs): return self.run_calculation(atoms, coords, run_func='get_forces') def get_hessian(self, atoms, coords, **prepare_kwargs): return self.run_calculation(atoms, coords, run_func='get_hessian')
class ApiExecutionError(Error): CUSTOM_ERROR_MESSAGE = 'GCP API Error: unable to get {0} from GCP:\n{1}\n{2}' def __init__(self, resource_name, e, resource_key=None, resource_value=None): if (resource_key and resource_value): resource_name = API_EXECUTION_ERROR_ARG_FORMAT.format(resource_name, resource_key, resource_value) super(ApiExecutionError, self).__init__(self.CUSTOM_ERROR_MESSAGE.format(resource_name, e, e.content.decode('utf-8'))) self. = e
class OptionSeriesTreegraphSonificationDefaultspeechoptionsMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
.compilertest def test_long_cluster_1(tmp_path): builder1 = Builder(logger, tmp_path, 'cache_test_3.yaml') builder2 = Builder(logger, tmp_path, 'cache_test_3.yaml', enable_cache=False) b1 = builder1.build() b2 = builder2.build() print('checking baseline...') builder1.check('baseline', b1, b2, strip_cache_keys=True) builder1.apply_yaml('cache_delta_3.yaml') builder2.apply_yaml('cache_delta_3.yaml') b1 = builder1.build() b2 = builder2.build() print('checking after apply...') builder1.check('after apply', b1, b2, strip_cache_keys=True) print('test_long_cluster_1 done')
def parse_debug_info_method_parameter_list(dex_object, offset): parameter_list = [] (n, current_line) = get_uleb128(dex_object.m_content[offset:(offset + 5)]) offset += n (n, parameters_size) = get_uleb128(dex_object.m_content[offset:(offset + 5)]) offset += n for i in range(0, parameters_size): (n, string_idx) = get_uleb128p1(dex_object.m_content[offset:(offset + 5)]) if (string_idx != (- 1)): parameter_list.append(dex_object.getstringbyid(string_idx)) offset += n return parameter_list
class TestFuzzTHBattleFaith(object): def testFuzzTHBattleFaith(self): env = Environ() t = EventTap() me = gevent.getcurrent() def fail_crash(g): e = Exception('GAME CRASH') e.__cause__ = g.runner.exception gevent.kill(me, e) return g s = env.server_core() cl = BatchList([env.client_core() for _ in range(6)]) t.tap(s, *cl) c1r = BatchList(cl[1:]) c = cl[0] names = ('Reimu', 'Marisa', 'Youmu', 'Sakuya', 'Satori', 'Koishi', 'Remilia', 'Flandre') for (i, name) in zip(cl, names): i.auth.login(name) wait() assert all(cl.auth.pid) c.room.create('Test1', 'THBattleFaith', {}) wait() gid = c.game.gid_of(t[c.events.game_joined]) c1r.room.join(gid) wait() assert (([gid] * 6) == [i.game.gid_of(t[i.events.game_joined]) for i in cl]) s.events.game_crashed += fail_crash for i in cl: g = t[i.events.game_joined] i.events.game_crashed += fail_crash g.event_observer = BotUserInputHandler(g) cl.room.get_ready() wait() assert (([gid] * 6) == [i.game.gid_of(t[i.events.game_started]) for i in cl]) wait() def game_ended(g): gevent.kill(me, GameEnded()) return g s.events.game_ended += game_ended [i.game.start_game(t[i.events.game_started]) for i in cl] try: let_it_go(*cl) except GameEnded: pass
class ExampleProtobufSerializer(Serializer): def encode(msg: Message) -> bytes: message_pb = ProtobufMessage() if msg.has_dialogue_info: dialogue_message_pb = Pb2DialogueMessage() dialogue_message_pb.message_id = msg.message_id dialogue_message_pb.dialogue_starter_reference = msg.dialogue_reference[0] dialogue_message_pb.dialogue_responder_reference = msg.dialogue_reference[1] dialogue_message_pb.target = msg.target new_body = copy(msg._body) new_body.pop('message_id') new_body.pop('dialogue_reference') new_body.pop('target') body_json = Struct() body_json.update(new_body) dialogue_message_pb.content = body_json.SerializeToString() message_pb.dialogue_message.CopyFrom(dialogue_message_pb) else: body_json = Struct() body_json.update(msg._body) message_pb.body.CopyFrom(body_json) return message_pb.SerializeToString() def decode(obj: bytes) -> Message: message_pb = ProtobufMessage() message_pb.ParseFromString(obj) message_type = message_pb.WhichOneof('message') if (message_type == 'body'): body = dict(message_pb.body) msg = TMessage(_body=body) return msg if (message_type == 'dialogue_message'): dialogue_message_pb = message_pb.dialogue_message message_id = dialogue_message_pb.message_id target = dialogue_message_pb.target dialogue_starter_reference = dialogue_message_pb.dialogue_starter_reference dialogue_responder_reference = dialogue_message_pb.dialogue_responder_reference body_json = Struct() body_json.ParseFromString(dialogue_message_pb.content) body = dict(body_json) body['message_id'] = message_id body['target'] = target body['dialogue_reference'] = (dialogue_starter_reference, dialogue_responder_reference) return TMessage(_body=body) raise ValueError('Message type not recognized.')
class StripHtmlPostprocessor(Postprocessor): def __init__(self, strip_comments, strip_js_on_attributes, strip_attributes, md): self.strip_comments = strip_comments self.re_attributes = None attributes = [re.escape(a.strip()) for a in strip_attributes] if strip_js_on_attributes: attributes.append('on[\\w]+') if attributes: self.re_attributes = re.compile((TAG_BAD_ATTR % '|'.join(attributes)), (re.DOTALL | re.UNICODE)) super(StripHtmlPostprocessor, self).__init__(md) def repl(self, m): if m.group('comments'): tag = ('' if self.strip_comments else m.group('comments')) elif m.group('scripts'): tag = m.group('script_open') if (self.re_attributes is not None): tag += self.re_attributes.sub('', m.group('script_attr')) else: tag += m.group('script_attr') tag += m.group('script_rest') elif m.group('close_tag'): tag = m.group(0) else: tag = m.group('open') if (self.re_attributes is not None): tag += self.re_attributes.sub('', m.group('attr')) else: tag += m.group('attr') tag += m.group('close') return tag def run(self, text): strip = (self.strip_comments or self.strip_js_on_attributes or self.re_attributes) return (RE_TAG_HTML.sub(self.repl, text) if strip else text)
class RecentFileManager(object): def __new__(cls): return super(RecentFileManager, cls).__new__(cls) def cache_file_full_path(cls): from anima import defaults return os.path.normpath(os.path.expandvars(os.path.expanduser(os.path.join(defaults.local_cache_folder, defaults.recent_file_name)))) def __init__(self): self.recent_files = dict() self.restore() def save(self): dumped_data = json.dumps(self.recent_files, sort_keys=True, indent=4, separators=(',', ': ')) self._write_data(dumped_data) def _write_data(self, data): file_full_path = self.cache_file_full_path() file_path = os.path.dirname(file_full_path) try: os.makedirs(file_path) except OSError: pass finally: with open(file_full_path, 'w+') as data_file: data_file.writelines(data) def restore(self): try: with open(RecentFileManager.cache_file_full_path(), 'r') as s: self.recent_files = json.loads(s.read()) except (IOError, ValueError): pass from anima import defaults for dcc in self.recent_files: self.recent_files[dcc] = self.recent_files[dcc][:defaults.max_recent_files] def add(self, dcc_name, file_path): if (dcc_name not in self.recent_files): self.recent_files[dcc_name] = [] if (file_path in self.recent_files[dcc_name]): self.recent_files[dcc_name].remove(file_path) self.recent_files[dcc_name].insert(0, file_path) from anima import defaults self.recent_files[dcc_name] = self.recent_files[dcc_name][:defaults.max_recent_files] self.save() def remove(self, dcc_name, file_path): self[dcc_name].remove(file_path) def __getitem__(self, item): return self.recent_files[item] def __setitem__(self, key, value): self.recent_files[key] = value
class TestCheckUdevRules(object): def rules(self): return '# Generated by rivalcfg v42.0.0\nFoo\nBar...\n' def test_right_version(self, rules): assert (udev.are_rules_up_to_date(rules, current_version='42.0.0') is True) def test_wrong_version(self, rules): assert (udev.are_rules_up_to_date(rules, current_version='13.37.0') is False)
def bootstrap_cli(cmdline_args=None) -> Optional[int]: logging.basicConfig(format='%(asctime)s.%(msecs)03d [%(levelname).1s] %(message)s', level=logging.INFO, datefmt='%H:%M:%S') root_parser = argparse.ArgumentParser() root_parser.add_argument('--no-check-certificate', help='Disable SSL certificate verification', action='store_true', default=False) root_parser.add_argument('--ufbt-home', '-d', help='uFBT state directory', default=os.environ.get('UFBT_HOME', DEFAULT_UFBT_HOME)) root_parser.add_argument('--force', '-f', help='Force operation', action='store_true', default=False) root_parser.add_argument('--verbose', help='Enable extra logging', action='store_true', default=False) parsers = root_parser.add_subparsers() for subcommand_cls in bootstrap_subcommand_classes: subcommand_cls().add_to_parser(parsers) args = root_parser.parse_args(cmdline_args) if args.verbose: logging.getLogger().setLevel(logging.DEBUG) if args.no_check_certificate: import ssl _ssl_context = ssl.create_default_context() _ssl_context.check_hostname = False _ssl_context.verify_mode = ssl.CERT_NONE BaseSdkLoader._SSL_CONTEXT = _ssl_context if ('func' not in args): root_parser.print_help() return 1 try: return args.func(args) except Exception as e: log.error(f'Failed to run operation: {e}. See --verbose for details') if args.verbose: raise return 2
_instruction_type([ofproto.OFPIT_STAT_TRIGGER]) class OFPInstructionStatTrigger(OFPInstruction): def __init__(self, flags, thresholds, type_=None, len_=None): super(OFPInstructionStatTrigger, self).__init__() self.type = ofproto.OFPIT_STAT_TRIGGER self.len = len_ self.flags = flags self.thresholds = thresholds def parser(cls, buf, offset): (type_, len_, flags) = struct.unpack_from(ofproto.OFP_INSTRUCTION_STAT_TRIGGER_PACK_STR0, buf, offset) offset += 8 thresholds = OFPStats.parser(buf, offset) inst = cls(flags, thresholds) inst.len = len_ return inst def serialize(self, buf, offset): stats_len = self.thresholds.serialize(buf, (offset + 8)) self.len = (8 + stats_len) msg_pack_into(ofproto.OFP_INSTRUCTION_STAT_TRIGGER_PACK_STR0, buf, offset, self.type, self.len, self.flags)
.parametrize('tracing_bits,expected', [('00', {'recorded': 0}), ('01', {'recorded': 1})]) def test_tracing_options(tracing_bits, expected): header = '00-0af7651916cd43dd8448eb211c80319c-b7ad6b-{}'.format(tracing_bits) trace_parent = TraceParent.from_string(header) assert (trace_parent.trace_options.recorded == expected['recorded'])
class NamespaceNotActiveError(Exception): message: str namespaceName: str currentCluster: str activeCluster: str def namespace_name(self): return self.namespaceName def current_cluster(self): return self.currentCluster def active_cluster(self): return self.activeCluster
def client1(sock, readq, initial_timeout): assert_queue_contains(readq, b'*', timeout=initial_timeout) sock.send(b'abcdef') assert_queue_empty(readq) sock.send(b'^') assert_queue_empty(readq) sock.send(b'f') assert_queue_contains(readq, b'g') sock.send(b'1234') assert_queue_contains(readq, b'2') assert_queue_contains(readq, b'3') assert_queue_contains(readq, b'4') assert_queue_contains(readq, b'5') sock.send(b'$') assert_queue_empty(readq) sock.send(b'1234') assert_queue_empty(readq) sock.send(b'^') sock.send(b'xy') assert_queue_contains(readq, b'y') assert_queue_contains(readq, b'z')
def bulk_generate_invoices(sales_orders: List[SOCode], warehouse_allocation: Optional[WHAllocation]=None, request_id=None, client=None): if (client is None): client = UnicommerceAPIClient() frappe.flags.request_id = request_id update_invoicing_status(sales_orders, 'Queued') failed_orders = [] for so_code in sales_orders: try: so = frappe.get_doc('Sales Order', so_code) channel = so.get(CHANNEL_ID_FIELD) channel_config = frappe.get_cached_doc('Unicommerce Channel', channel) wh_allocation = (warehouse_allocation.get(so_code) if warehouse_allocation else None) _generate_invoice(client, so, channel_config, warehouse_allocation=wh_allocation) except Exception as e: create_unicommerce_log(status='Failure', exception=e, rollback=True, make_new=True) failed_orders.append(so_code) _log_invoice_generation(sales_orders, failed_orders)
_toolkit([ToolkitName.qt, ToolkitName.wx]) class TestSimpleFileEditor(BaseTestMixin, unittest.TestCase): def setUp(self): BaseTestMixin.setUp(self) def tearDown(self): BaseTestMixin.tearDown(self) _toolkit([ToolkitName.qt]) def test_simple_editor_set_text_to_nonexisting_path(self): view = View(Item('filepath', editor=FileEditor())) obj = FileModel() tester = UITester() with tester.create_ui(obj, dict(view=view)) as ui: filepath_field = tester.find_by_name(ui, 'filepath') filepath_field.perform(KeySequence('some_file.txt')) filepath_field.perform(KeyClick('Enter')) self.assertEqual(obj.filepath, 'some_file.txt') def test_simple_editor_display_path(self): view = View(Item('filepath', editor=FileEditor())) obj = FileModel() tester = UITester() with tester.create_ui(obj, dict(view=view)) as ui: filepath_field = tester.find_by_name(ui, 'filepath') self.assertEqual(filepath_field.inspect(DisplayedText()), '') obj.filepath = 'some_file.txt' self.assertEqual(filepath_field.inspect(DisplayedText()), 'some_file.txt') _toolkit([ToolkitName.qt]) def test_simple_editor_auto_set_text(self): view = View(Item('filepath', editor=FileEditor(auto_set=True))) obj = FileModel() tester = UITester() with tester.create_ui(obj, dict(view=view)) as ui: filepath_field = tester.find_by_name(ui, 'filepath') filepath_field.perform(KeySequence('some_file.txt')) self.assertEqual(obj.filepath, 'some_file.txt') def test_simple_editor_reset_text_if_validation_error(self): view = View(Item('existing_filepath', editor=FileEditor())) obj = FileModel() tester = UITester() with tester.create_ui(obj, dict(view=view)) as ui: filepath_field = tester.find_by_name(ui, 'existing_filepath') filepath_field.perform(KeySequence('some_file.txt')) filepath_field.perform(KeyClick('Enter')) self.assertEqual(obj.existing_filepath, '') self.assertEqual(filepath_field.inspect(DisplayedText()), '') ('pyface.api.FileDialog.open', autospec=True, side_effect=trait_set_side_effect(return_code=OK, path='some_file.txt')) def test_show_file_dialog(self, mock_open): view = View(Item('filepath', editor=FileEditor())) obj = FileModel() tester = UITester() with tester.create_ui(obj, dict(view=view)) as ui: editor = ui.get_editors('filepath')[0] editor.show_file_dialog() self.assertEqual(editor.value, 'some_file.txt') ('pyface.api.FileDialog.open', autospec=True, side_effect=trait_set_side_effect(return_code=OK, path='some_file.txt')) def test_show_file_dialog_truncate_ext(self, mock_open): view = View(Item('filepath', editor=FileEditor(truncate_ext=True))) obj = FileModel() tester = UITester() with tester.create_ui(obj, dict(view=view)) as ui: editor = ui.get_editors('filepath')[0] editor.show_file_dialog() self.assertEqual(editor.value, 'some_file')
def main(): FLAGS = ArgsParser().parse_args() config = load_config(FLAGS.config) merge_config(FLAGS.opt) logger = get_logger() post_process_class = build_post_process(config['PostProcess'], config['Global']) if hasattr(post_process_class, 'character'): char_num = len(getattr(post_process_class, 'character')) if (config['Architecture']['algorithm'] in ['Distillation']): for key in config['Architecture']['Models']: config['Architecture']['Models'][key]['Head']['out_channels'] = char_num config['Architecture']['Models'][key]['return_all_feats'] = False else: config['Architecture']['Head']['out_channels'] = char_num model = build_model(config['Architecture']) _ = load_dygraph_params(config, model, logger, None) model.eval() save_path = config['Global']['save_inference_dir'] arch_config = config['Architecture'] if (arch_config['algorithm'] in ['Distillation']): archs = list(arch_config['Models'].values()) for (idx, name) in enumerate(model.model_name_list): sub_model_save_path = os.path.join(save_path, name, 'inference') export_single_model(model.model_list[idx], archs[idx], sub_model_save_path, logger) else: save_path = os.path.join(save_path, 'inference') export_single_model(model, arch_config, save_path, logger)
def dump_login_status(self, fileDir=None): fileDir = (fileDir or self.hotReloadDir) status = {'version': VERSION, 'loginInfo': self.loginInfo, 'cookies': self.s.cookies.get_dict(), 'storage': self.storageClass.dumps()} if (not os.path.exists(fileDir)): with open(fileDir, 'wb') as f: pickle.dump(status, f) else: logger.debug('Attempting to overwrite session file.') temp_path = f'{fileDir}.{secrets.token_urlsafe(8)}' logger.debug(f'Write session file to {temp_path}.') with open(temp_path, 'wb') as f: pickle.dump(status, f) logger.debug(f'Remove old session file at {fileDir}') os.unlink(fileDir) logger.debug(f'Move new session file from {temp_path} to {fileDir}') os.rename(temp_path, fileDir) logger.debug(f'Session file overwrite completed.') logger.debug('Dump login status for hot reload successfully.')
class _ArrayOmittedIndices(): def __init__(self, ast, shape): self.ast = tuple(ast) self.shape = tuple(shape) self.ndim = len(self.shape) def __add__(self, other): if (self.shape != other.shape): raise _IntermediateError('Cannot add arrays with omitted indices because the shapes differ: {}, {}.'.format(self.shape, other.shape)) return _ArrayOmittedIndices(('add', self.ast, other.ast), self.shape) def __sub__(self, other): if (self.shape != other.shape): raise _IntermediateError('Cannot subtract arrays with omitted indices because the shapes differ: {}, {}.'.format(self.shape, other.shape)) return _ArrayOmittedIndices(('sub', self.ast, other.ast), self.shape) def __mul__(self, other): if (self.ndim != 0): raise _IntermediateError('Arrays with omitted indices cannot be multiplied.') self_ast = self.ast for n in other.shape: self_ast = ('append_axis', self_ast, _(n)) return _ArrayOmittedIndices(('mul', self_ast, other.ast), other.shape) def __neg__(self): return _ArrayOmittedIndices(('neg', self.ast), self.shape) def __truediv__(self, other): if (other.ndim > 0): raise _IntermediateError('A denominator must have dimension 0.') return _ArrayOmittedIndices(('truediv', self.ast, other.ast), self.shape) def __pow__(self, other): if (other.ndim > 0): raise _IntermediateError('An exponent must have dimension 0.') return _ArrayOmittedIndices(('pow', self.ast, other.ast), self.shape) def replace(self, ast=None): return _ArrayOmittedIndices((self.ast if (ast is None) else ast), self.shape)
def event_contract(w3, wait_for_transaction, wait_for_block, address_conversion_func): wait_for_block(w3) event_contract_factory = w3.eth.contract(**EVENT_CONTRACT_DATA) deploy_txn_hash = event_contract_factory.constructor().transact({'from': w3.eth.coinbase, 'gas': 1000000}) deploy_receipt = wait_for_transaction(w3, deploy_txn_hash) contract_address = address_conversion_func(deploy_receipt['contractAddress']) bytecode = w3.eth.get_code(contract_address) assert (bytecode == event_contract_factory.bytecode_runtime) event_contract = event_contract_factory(address=contract_address) assert (event_contract.address == contract_address) return event_contract
class CastPlayer(xbmc.Player): def __init__(self, cast): super(CastPlayer, self).__init__() self.cast = cast self.from_yt = False def play_from_youtube(self, url): self.from_yt = True self.play(url) def status_code(self): if xbmc.getCondVisibility('Player.Paused'): return STATUS_PAUSED if self.isPlaying(): return STATUS_PLAYING return STATUS_STOPPED def playing(self): return xbmc.getCondVisibility('Player.Playing') def __should_report(self): return (self.cast.has_client and self.from_yt) def __report_state_change(self, status_code=None): if (not self.__should_report()): return if (status_code is None): status_code = self.status_code self.cast.report_state_change(status_code, int(self.getTime()), int(self.getTotalTime())) def onPlayBackStarted(self): if (not self.__should_report()): return self.cast.report_now_playing() while (self.isPlaying() and self.__should_report() and (not monitor.abortRequested())): self.__report_state_change() monitor.waitForAbort(5) def onPlayBackResumed(self): self.__report_state_change() def onPlayBackPaused(self): self.__report_state_change() def onPlayBackEnded(self): should_report = self.__should_report() self.from_yt = False if should_report: self.cast.report_playback_ended() def onPlayBackSeek(self, time, seek_offset): self.__report_state_change(status_code=STATUS_LOADING) def onPlayBackStopped(self): if self.__should_report(): self.cast.report_playback_stopped()
class Received(): data_sent = None def __init__(self, data=None, tags=None): self.__data = data self.__tags = tags self.__response_tags = set() def set(self, value: Any, name: str=None): self.__response_tags.add((name or sys._getframe().f_back.f_code.co_name)) self.__data[(name or sys._getframe().f_back.f_code.co_name)] = value def get(self, name: str=None): if (self.__data is not None): return self.__data[(name or sys._getframe().f_back.f_code.co_name)] return events.data[(name or sys._getframe().f_back.f_code.co_name)] def s(self): if (self.data_sent is None): raise ValueError('data_sent must be defined') return self.data_sent(self.__data) def flask_request(cls, req): if (req.method == 'POST'): return cls(req.get_json()) return cls(req) def response(self): data = {} for t in list(self.__response_tags): data[t] = self.__data[t] return json.dumps(data)
def test_iter_sse_id_retry() -> None: class Body( def __iter__(self) -> Iterator[bytes]: (yield b'retry: 10000\n') (yield b'\n') response = headers={'content-type': 'text/event-stream'}, stream=Body()) events = list(EventSource(response).iter_sse()) assert (len(events) == 1) assert (events[0].event == 'message') assert (events[0].data == '') assert (events[0].id == '') assert (events[0].retry == 10000)
def extractXCrossJ(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if ('Character Analysis' in item['title']): return False if ('Cross Gun' in item['tags']): return buildReleaseMessageWithType(item, 'Cross Gun', vol, chp, frag=frag, postfix=postfix, tl_type='oel') if ('Konjiki no Moji Tsukai' in item['title']): postfix = item['title'].split(':', 1)[(- 1)].strip() return buildReleaseMessageWithType(item, 'Konjiki no Wordmaster', vol, chp, frag=frag, postfix=postfix) if ('Shinwa Densetsu no Eiyuu no Isekaitan' in item['tags']): return buildReleaseMessageWithType(item, 'Shinwa Densetsu no Eiyuu no Isekaitan', vol, chp, frag=frag, postfix=postfix) if ('Isekai Mahou wa Okureteru' in item['tags']): return buildReleaseMessageWithType(item, 'Isekai Mahou wa Okureteru', vol, chp, frag=frag, postfix=postfix) if ('Nidome no Jinsei wo Isekai de' in item['tags']): return buildReleaseMessageWithType(item, 'Nidome no Jinsei wo Isekai de', vol, chp, frag=frag, postfix=postfix) return False
_for(TimeLog.__table__, 'after_create') def add_exclude_constraint(table, connection, **kwargs): from sqlalchemy import DDL from sqlalchemy.exc import ProgrammingError, InternalError if (connection.engine.dialect.name == 'postgresql'): logger.debug('add_exclude_constraint is Running!') create_extension = DDL('CREATE EXTENSION btree_gist;') try: logger.debug('running "btree_gist" extension creation!') create_extension.execute(bind=connection) logger.debug('successfully created "btree_gist" extension!') except (ProgrammingError, InternalError) as e: logger.debug(('add_exclude_constraint: %s' % e)) ts_to_box = DDL("CREATE FUNCTION ts_to_box(TIMESTAMPTZ, TIMESTAMPTZ)\nRETURNS BOX\nAS\n$$\n SELECT BOX(\n POINT(DATE_PART('epoch', $1), 0),\n POINT(DATE_PART('epoch', $2 - interval '1 minute'), 1)\n )\n$$\nLANGUAGE 'sql'\nIMMUTABLE;\n") try: logger.debug('creating ts_to_box function!') ts_to_box.execute(bind=connection) logger.debug('successfully created ts_to_box function') except (ProgrammingError, InternalError) as e: logger.debug(('failed creating ts_to_box function!: %s' % e)) exclude_constraint = DDL('ALTER TABLE "TimeLogs" ADD CONSTRAINT\n overlapping_time_logs EXCLUDE USING GIST (\n resource_id WITH =,\n ts_to_box(start, "end") WITH &&\n )') try: logger.debug('running ExcludeConstraint for "TimeLogs" table creation!') exclude_constraint.execute(bind=connection) logger.debug('successfully created ExcludeConstraint for "TimeLogs" table!') except (ProgrammingError, InternalError) as e: logger.debug(('failed creating ExcludeConstraint for TimeLogs table!: %s' % e)) else: logger.debug('it is not a PostgreSQL database not creating Exclude Constraint')
(ISplashScreen) class SplashScreen(MSplashScreen, Window): image = Image(ImageResource('splash')) log_level = Int(DEBUG) show_log_messages = Bool(True) text = Str() text_color = Any() text_font = Any() text_location = Tuple(5, 5) def _create_control(self, parent): splash_screen = QtGui.QSplashScreen(self.image.create_image()) self._qt4_show_message(splash_screen) return splash_screen def _text_changed(self): if (self.control is not None): self._qt4_show_message(self.control) def _qt4_show_message(self, control): if (self.text_font is not None): control.setFont(self.text_font) if (self.text_color is None): text_color = QtCore.Qt.GlobalColor.black else: text_color = QtGui.QColor(self.text_color) control.showMessage(self.text, QtCore.Qt.AlignmentFlag.AlignLeft, text_color)
_defaults() class UserSchemaPublic(SoftDeletionSchema): class Meta(): type_ = 'user' self_view = 'v1.user_detail' self_view_kwargs = {'id': '<id>'} inflect = dasherize id = fields.Str(dump_only=True) email = TrimmedEmail(required=True) avatar_url = fields.Url(allow_none=True) first_name = fields.Str(allow_none=True) last_name = fields.Str(allow_none=True) public_name = fields.Str(allow_none=True) is_profile_public = fields.Bool(default=False, allow_none=False) original_image_url = fields.Url(dump_only=True, allow_none=True) thumbnail_image_url = fields.Url(dump_only=True, allow_none=True) small_image_url = fields.Url(dump_only=True, allow_none=True) icon_image_url = fields.Url(dump_only=True, allow_none=True) was_registered_with_order = fields.Boolean() _dump def handle_deleted_or_private_users(self, data): if (not data): return data can_access = (require_current_user() and (current_user.is_staff or (current_user.id == data.id))) if ((data.deleted_at != None) and (not can_access)): user = User(id=0, email='', first_name='deleted', last_name='user') return user return data
def graph(data, name, engine=None, inv=False): g = Digraph() g.attr('graph', fontname='mono') g.attr('node', fontname='mono') g.attr('node', shape='box') if engine: g.engine = engine for (next, prev) in data: if inv: g.edge(next.strip(), prev.strip()) else: g.edge(prev.strip(), next.strip()) g.render(('out/' + name), format='png', cleanup=True)
class IMPALALearner(acme.Learner): def __init__(self, obs_spec: specs.Array, unroll_fn: networks.PolicyValueRNN, initial_state_fn: Callable[([], hk.LSTMState)], iterator: Iterator[reverb.ReplaySample], optimizer: optax.GradientTransformation, random_key: networks.PRNGKey, discount: float=0.99, entropy_cost: float=0.0, baseline_cost: float=1.0, max_abs_reward: float=np.inf, counter: counting.Counter=None, logger: loggers.Logger=None, devices: Optional[Sequence[jax.xla.Device]]=None, prefetch_size: int=2, num_prefetch_threads: Optional[int]=None): local_devices = jax.local_devices() self._devices = (devices or local_devices) self._local_devices = [d for d in self._devices if (d in local_devices)] unroll_fn = hk.without_apply_rng(hk.transform(unroll_fn, apply_rng=True)) initial_state_fn = hk.without_apply_rng(hk.transform(initial_state_fn, apply_rng=True)) loss_fn = impala_loss(unroll_fn, discount=discount, max_abs_reward=max_abs_reward, baseline_cost=baseline_cost, entropy_cost=entropy_cost) def sgd_step(state: TrainingState, sample: reverb.ReplaySample) -> Tuple[(TrainingState, Dict[(str, jnp.ndarray)])]: grad_fn = jax.value_and_grad(loss_fn, has_aux=True) ((loss_value, stats), gradients) = grad_fn(state.params, sample) gradients = jax.lax.pmean(gradients, _PMAP_AXIS_NAME) grad_norm_unclipped = optax.global_norm(gradients) (updates, new_opt_state) = optimizer.update(gradients, state.opt_state) new_params = optax.apply_updates(state.params, updates) weight_norm = optimizers.l2_norm(new_params) metrics = {'loss': loss_value, 'weight_norm': weight_norm, 'grad_norm_unclipped': grad_norm_unclipped, **stats} new_state = TrainingState(params=new_params, opt_state=new_opt_state) return (new_state, metrics) def make_initial_state(key: jnp.ndarray) -> TrainingState: dummy_obs = utils.zeros_like(obs_spec) dummy_obs = utils.add_batch_dim(dummy_obs) dummy_reset = jnp.array([False]) initial_state = initial_state_fn.apply(None) initial_params = unroll_fn.init(key, dummy_obs, initial_state, dummy_reset) initial_opt_state = optimizer.init(initial_params) return TrainingState(params=initial_params, opt_state=initial_opt_state) state = make_initial_state(random_key) self._state = utils.replicate_in_all_devices(state, self._local_devices) if (num_prefetch_threads is None): num_prefetch_threads = len(self._local_devices) self._prefetched_iterator = utils.sharded_prefetch(iterator, buffer_size=prefetch_size, devices=self._local_devices, num_threads=num_prefetch_threads) self._sgd_step = jax.pmap(sgd_step, axis_name=_PMAP_AXIS_NAME, devices=self._devices) self._counter = (counter or counting.Counter()) self._logger = (logger or loggers.make_default_logger('learner')) def step(self): samples = next(self._prefetched_iterator) start = time.time() (self._state, results) = self._sgd_step(self._state, samples) results = utils.get_from_first_device(results) counts = self._counter.increment(steps=1, time_elapsed=(time.time() - start)) self._logger.write({**results, **counts}) def get_variables(self, names: Sequence[str]) -> List[networks.Params]: del names return [utils.get_from_first_device(self._state.params, as_numpy=False)] def save(self) -> TrainingState: return jax.tree_map(utils.get_from_first_device, self._state) def restore(self, state: TrainingState): self._state = utils.replicate_in_all_devices(state, self._local_devices)
def ws(event_loop: asyncio.AbstractEventLoop): t = Thread(target=event_loop.run_forever) t.start() async def _process_request(path, request_headers): if (path == '/healthcheck'): return (HTTPStatus.OK, {}, b'') def _start_ws(host: str, port: int, handler, ssl=None, sock=None): kwargs = {'process_request': _process_request, 'ssl': ssl} if sock: kwargs['sock'] = sock else: kwargs['host'] = host kwargs['port'] = port event_loop.call_soon_threadsafe(asyncio.ensure_future, websockets.server.serve(handler, **kwargs)) (yield _start_ws) event_loop.call_soon_threadsafe(event_loop.stop) t.join()
class OptionSeriesStreamgraphAccessibility(Options): def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def descriptionFormat(self): return self._config_get(None) def descriptionFormat(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(None) def enabled(self, flag: bool): self._config(flag, js_type=False) def exposeAsGroupOnly(self): return self._config_get(None) def exposeAsGroupOnly(self, flag: bool): self._config(flag, js_type=False) def keyboardNavigation(self) -> 'OptionSeriesStreamgraphAccessibilityKeyboardnavigation': return self._config_sub_data('keyboardNavigation', OptionSeriesStreamgraphAccessibilityKeyboardnavigation) def point(self) -> 'OptionSeriesStreamgraphAccessibilityPoint': return self._config_sub_data('point', OptionSeriesStreamgraphAccessibilityPoint)
class OptionSeriesPieSonificationDefaultinstrumentoptionsMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)