code
stringlengths
281
23.7M
class Poller(object): def __init__(self, fileno, exceptions, timeout=5): self.select = select self._fileno = fileno self._exceptions = exceptions self.timeout = timeout def fileno(self): return self._fileno def is_ready(self): try: (ready, _, _) = self.select.select([self.fileno], [], [], self.timeout) return bool(ready) except self.select.error as why: if (why.args[0] != EINTR): self._exceptions.append(AMQPConnectionError(why)) return False
class Persistence(Options): def sort(self): return self._config_get() def sort(self, val): self._config(val) def filter(self): return self._config_get() def filter(self, val): self._config(val) def group(self) -> PersistenceGroup: return self.has_attribute(PersistenceGroup) def page(self) -> PersistencePage: return self.has_attribute(PersistencePage) def columns(self): return self._config_get() def columns(self, val): self._config(val)
def compress_zstd(input_file): import zstandard as zstd output_file = f'{input_file}.zs' cctx = zstd.ZstdCompressor() with open(input_file, 'rb') as f_in, open(output_file, 'wb') as f_out: compressed_data = cctx.compress(f_in.read()) f_out.write(compressed_data) print(f'Compressed file: {output_file}')
class IDPP(Interpolator): def interpolate(self, initial_geom, final_geom, **kwargs): linear_interpol = super().interpolate(initial_geom, final_geom) idpp_geoms = (([initial_geom] + linear_interpol) + [final_geom]) align_geoms(idpp_geoms) for geom in idpp_geoms: geom.coords *= BOHR2ANG initial_pd = pdist(initial_geom.coords3d) final_pd = pdist(final_geom.coords3d) steps = (1 + self.between) pd_diff = ((final_pd - initial_pd) / steps) for (i, geom) in enumerate(idpp_geoms): geom.set_calculator(IDPPCalculator((initial_pd + (i * pd_diff)))) neb = NEB(idpp_geoms) opt_kwargs = {'max_cycles': 1000, 'rms_force': 0.01, 'align': False, 'check_coord_diffs': False} opt = FIRE(neb, **opt_kwargs) opt.run() for geom in idpp_geoms: geom.clear() geom.coords *= ANG2BOHR interpolated_geoms = idpp_geoms[1:(- 1)] return interpolated_geoms
def test_wallet_storage_database_nonexistent_creates(tmp_path) -> None: wallet_filepath = os.path.join(tmp_path, 'walletfile') storage = WalletStorage(wallet_filepath) try: assert (type(storage._store) is DatabaseStore) assert (storage.get('migration') == MIGRATION_CURRENT) finally: storage.close()
class Dispatcher(object): def __init__(self, comm=None, on=False): self.comm = comm self.on = on def __call__(self, func, func_args, func_kwargs, profile_name=''): if (not self.on): return func(*func_args, **func_kwargs) else: return self.profile_function(func, func_args, func_kwargs, profile_name) def profile_function(self, func, args, kwargs, profile_name): comm = self.comm if (type(comm) is None): raise ValueError('The Dispatcher does not have a valid Comm object') prof = Profile() func_return = prof.runcall(func, *args, **kwargs) profile_rank_name = (profile_name + str(comm.rank())) stripped_profile_name = ((profile_name + '_c') + str(comm.rank())) prof.dump_stats(profile_rank_name) comm.barrier() if comm.isMaster(): import copy import io profilingLog = io.StringIO() stats = pstats.Stats(profile_rank_name, stream=profilingLog) stats.__dict__['files'] = ['Maximum times across MPI tasks for', stats.__dict__['files'][0]] statsm = stats.stats for i in range(1, comm.size()): pstatsi = pstats.Stats((profile_name + str(i))) statsi = pstatsi.stats stats.__dict__['files'].append(pstatsi.__dict__['files'][0]) for (f, c) in statsi.items(): if (f in statsm): if (c[2] > statsm[f][2]): statsm[f] = c else: statsm[f] = c stats.sort_stats('cumulative') stats.print_stats(30) stats.sort_stats('time') stats.print_stats(30) logEvent(profilingLog.getvalue()) msg = '\nWall clock percentage of top 20 calls\n\n' total = 0.0 for f in stats.__dict__['fcn_list'][0:20]: if (f[0] == '~'): fname = f[(- 1)].strip('<').strip('>') else: fname = "function '{2:s}' at {0:s}:{1:d}".format(*f) msg += '{0:11.1%} {1:s}\n'.format(old_div(statsm[f][2], stats.__dict__['total_tt']), str(fname)) total += old_div(statsm[f][2], stats.__dict__['total_tt']) logEvent(msg) logEvent((('Representing ' + repr((total * 100.0))) + '%')) return func_return
def named_parameter(**kwargs): if ((kwargs['default'] is False) and (kwargs['conv'] is is_true)): del kwargs['default'] return FlagParameter(value=True, **kwargs) elif (kwargs['conv'] is _implicit_converters[int]): return IntOptionParameter(**kwargs) else: return OptionParameter(**kwargs)
class OptionSeriesTimelineStatesInactive(Options): def animation(self) -> 'OptionSeriesTimelineStatesInactiveAnimation': return self._config_sub_data('animation', OptionSeriesTimelineStatesInactiveAnimation) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def opacity(self): return self._config_get(0.2) def opacity(self, num: float): self._config(num, js_type=False)
def test_line_continuations(): file_path = ((test_dir / 'parse') / 'line_continuations.f90') file = FortranFile(str(file_path)) (err_str, _) = file.load_from_disk() assert (err_str is None) try: file.parse() assert True except Exception as e: print(e) assert False
class ForemastRunner(): def __init__(self): debug_flag() self.email = os.getenv('EMAIL') self.env = os.getenv('ENV') self.group = os.getenv('PROJECT') self.region = os.getenv('REGION') self.repo = os.getenv('GIT_REPO') self.runway_dir = os.getenv('RUNWAY_DIR') self.artifact_path = os.getenv('ARTIFACT_PATH') self.artifact_version = os.getenv('ARTIFACT_VERSION') self.artifact_branch = os.getenv('ARTIFACT_BRANCH', 'master') self.promote_stage = os.getenv('PROMOTE_STAGE', 'latest') self.provider = os.getenv('PROVIDER', 'aws') self.git_project = '{}/{}'.format(self.group, self.repo) parsed = foremastutils.Parser(self.git_project) generated = foremastutils.Generator(*parsed.parse_url(), formats=consts.APP_FORMATS) self.app = generated.app_name() self.trigger_job = generated.jenkins()['name'] self.git_short = generated.gitlab()['main'] self.raw_path = './raw.properties' self.json_path = (self.raw_path + '.json') self.configs = None def write_configs(self): utils.banner('Generating Configs') if (not self.runway_dir): app_configs = configs.process_git_configs(git_short=self.git_short) else: app_configs = configs.process_runway_configs(runway_dir=self.runway_dir) self.configs = configs.write_variables(app_configs=app_configs, out_file=self.raw_path, git_short=self.git_short) def create_app(self): utils.banner('Creating Spinnaker App') spinnakerapp = SpinnakerApp(provider=self.provider, app=self.app, email=self.email, project=self.group, repo=self.repo, pipeline_config=self.configs['pipeline']) spinnakerapp.create() def create_pipeline(self, onetime=None): utils.banner('Creating Pipeline') kwargs = {'app': self.app, 'trigger_job': self.trigger_job, 'prop_path': self.json_path, 'base': None, 'runway_dir': self.runway_dir} pipeline_type = self.configs['pipeline']['type'] if ((pipeline_type not in consts.ALLOWED_TYPES) and (pipeline_type not in consts.MANUAL_TYPES)): raise NotImplementedError('Pipeline type "{0}" not permitted.'.format(pipeline_type)) if (not onetime): if (pipeline_type == 'lambda'): spinnakerpipeline = pipeline.SpinnakerPipelineLambda(**kwargs) elif (pipeline_type == 's3'): spinnakerpipeline = pipeline.SpinnakerPipelineS3(**kwargs) elif (pipeline_type == 'datapipeline'): spinnakerpipeline = pipeline.SpinnakerPipelineDataPipeline(**kwargs) elif (pipeline_type == 'stepfunction'): spinnakerpipeline = pipeline.SpinnakerPipelineStepFunction(**kwargs) elif (pipeline_type in consts.MANUAL_TYPES): spinnakerpipeline = pipeline.SpinnakerPipelineManual(**kwargs) elif (pipeline_type == 'cloudfunction'): spinnakerpipeline = pipeline.SpinnakerPipelineCloudFunction(**kwargs) else: spinnakerpipeline = pipeline.SpinnakerPipeline(**kwargs) else: spinnakerpipeline = pipeline.SpinnakerPipelineOnetime(onetime=onetime, **kwargs) spinnakerpipeline.create_pipeline() def create_aws_iam(self): utils.banner('Creating AWS IAM') iam.create_iam_resources(env=self.env, app=self.app) def create_gcp_iam(self): utils.banner('Creating GCP IAM') env = self.get_current_gcp_env() gcp_iam_client = GcpIamResourceClient(env=env, app_name=self.app, group_name=self.group, repo_name=self.repo, configs=self.configs) gcp_iam_client.create_iam_resources() def create_archaius(self): utils.banner('Creating S3') s3.init_properties(env=self.env, app=self.app) def create_s3app(self): utils.banner('Creating S3 App Infrastructure') primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Apps(app=self.app, env=self.env, region=self.region, prop_path=self.json_path, primary_region=primary_region) s3obj.create_bucket() def deploy_s3app(self): utils.banner('Deploying S3 App') primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Deployment(app=self.app, env=self.env, region=self.region, prop_path=self.json_path, artifact_path=self.artifact_path, artifact_version=self.artifact_version, artifact_branch=self.artifact_branch, primary_region=primary_region) s3obj.upload_artifacts() def promote_s3app(self): utils.banner('Promoting S3 App') primary_region = self.configs['pipeline']['primary_region'] s3obj = s3.S3Deployment(app=self.app, env=self.env, region=self.region, prop_path=self.json_path, artifact_path=self.artifact_path, artifact_version=self.artifact_version, artifact_branch=self.artifact_branch, primary_region=primary_region) s3obj.promote_artifacts(promote_stage=self.promote_stage) def create_secgroups(self): utils.banner('Creating Security Group') sgobj = securitygroup.SpinnakerSecurityGroup(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) sgobj.create_security_group() def create_awslambda(self): utils.banner('Creating Lambda Function') awslambdaobj = awslambda.LambdaFunction(app=self.app, env=self.env, region=self.region, prop_path=self.json_path, artifact_path=None) awslambdaobj.deploy_lambda_function() utils.banner('Creating Lambda Event') lambdaeventobj = awslambda.LambdaEvent(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) lambdaeventobj.create_lambda_events() def deploy_awslambda(self): utils.banner('Deploying Lambda Function') lambda_function = awslambda.LambdaFunction(app=self.app, env=self.env, region=self.region, prop_path=self.json_path, artifact_path=self.artifact_path) lambda_function.deploy_lambda_function() utils.banner('Creating Lambda Event') lambdaeventobj = awslambda.LambdaEvent(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) lambdaeventobj.create_lambda_events() def create_elb(self): utils.banner('Creating ELB') elbobj = elb.SpinnakerELB(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) elbobj.create_elb() def create_dns(self): utils.banner('Creating DNS') elb_subnet = self.configs[self.env]['elb']['subnet_purpose'] regions = self.configs[self.env]['regions'] failover = self.configs[self.env]['dns']['failover_dns'] primary_region = self.configs['pipeline']['primary_region'] regionspecific_dns = self.configs[self.env]['dns']['region_specific'] dnsobj = dns.SpinnakerDns(app=self.app, env=self.env, region=self.region, prop_path=self.json_path, elb_subnet=elb_subnet) if ((len(regions) > 1) and failover): dnsobj.create_elb_dns(regionspecific=True) dnsobj.create_failover_dns(primary_region=primary_region) else: if regionspecific_dns: dnsobj.create_elb_dns(regionspecific=True) if (self.region == primary_region): dnsobj.create_elb_dns(regionspecific=False) def create_autoscaling_policy(self): utils.banner('Creating Scaling Policy') policyobj = autoscaling_policy.AutoScalingPolicy(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) policyobj.create_policy() def create_scheduled_actions(self): utils.banner('Creating Scheduled Actions') actionsobj = scheduled_actions.ScheduledActions(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) actionsobj.create_scheduled_actions() def create_datapipeline(self): utils.banner('Creating Data Pipeline') dpobj = datapipeline.AWSDataPipeline(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) dpobj.create_datapipeline() dpobj.set_pipeline_definition() if self.configs[self.env].get('datapipeline').get('activate_on_deploy'): dpobj.activate_pipeline() def create_stepfunction(self): utils.banner('Creating AWS Step Function') sfnobj = stepfunction.AWSStepFunction(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) sfnobj.create_stepfunction() def deploy_cloudfunction(self): utils.banner('Creating GCP Cloud Function') env = self.get_current_gcp_env() cloud_function_client = CloudFunctionsClient(self.app, env, self.configs) cloud_function_client.prepare_client() cloud_function_client.deploy_function(self.artifact_path, self.region) LOG.info('Finished deploying cloud function') def slack_notify(self): utils.banner('Sending slack notification') if self.env.startswith('prod'): notify = slacknotify.SlackNotification(app=self.app, env=self.env, prop_path=self.json_path) notify.post_message() else: LOG.info('No slack message sent, not production environment') def get_current_gcp_env(self): all_gcp_envs = GcpEnvironment.get_environments_from_config() if (self.env not in all_gcp_envs): raise ForemastError('GCP environment %s not found in configuration', self.env) return all_gcp_envs[self.env] def cleanup(self): os.remove(self.raw_path) def check_env_defined(self): if (not self.env): raise ForemastError('Environment not set') if (self.env not in self.configs): raise ForemastError(("Environment '{}' not found in pipeline configs.".format(self.env) + 'Check pipeline.json and application-master-{}.json'.format(self.env)))
def add_fcos_configs(cfg): cfg.MODEL.FCOS = CN() cfg.MODEL.FCOS.NUM_CLASSES = 80 cfg.MODEL.FCOS.IN_FEATURES = ['p3', 'p4', 'p5', 'p6', 'p7'] cfg.MODEL.FCOS.NUM_CONVS = 4 cfg.MODEL.FCOS.HEAD_NORM = 'GN' cfg.MODEL.FCOS.SCORE_THRESH_TEST = 0.04 cfg.MODEL.FCOS.TOPK_CANDIDATES_TEST = 1000 cfg.MODEL.FCOS.NMS_THRESH_TEST = 0.6 cfg.MODEL.FCOS.FOCAL_LOSS_ALPHA = 0.25 cfg.MODEL.FCOS.FOCAL_LOSS_GAMMA = 2.0 cfg.FCOS_PREPARE_FOR_EXPORT = 'default_fcos_prepare_for_export'
def convert_to_bitvec(val): if isinstance(val, z3.BoolRef): return z3.If(val, z3.BitVecVal(1, 256), z3.BitVecVal(0, 256)) elif isinstance(val, bool): return (z3.BitVecVal(1, 256) if val else z3.BitVecVal(0, 256)) elif isinstance(val, int): return z3.BitVecVal(val, 256) else: return z3.simplify(val)
class PluginHandler(): def __init__(self, ert: 'EnKFMain', notifier: 'ErtNotifier', plugin_jobs: List['WorkflowJob'], parent_window): self.__ert = ert self.__plugins = [] for job in plugin_jobs: plugin = Plugin(self.__ert, notifier, job) self.__plugins.append(plugin) plugin.setParentWindow(parent_window) self.__plugins = sorted(self.__plugins, key=Plugin.getName) def ert(self) -> 'EnKFMain': return self.__ert def __iter__(self) -> Iterator[Plugin]: index = 0 while (index < len(self.__plugins)): (yield self.__plugins[index]) index += 1 def __getitem__(self, index) -> Plugin: return self.__plugins[index] def __len__(self): return len(self.__plugins)
class TestListIOSApps(BaseProjectManagementTest): _LISTING_URL = ' _LISTING_PAGE_2_URL = ' def test_list_ios_apps(self): recorder = self._instrument_service(statuses=[200], responses=[LIST_IOS_APPS_RESPONSE]) ios_apps = project_management.list_ios_apps() expected_app_ids = set(['1::ios:ca5cade5', '1::ios:ca5cade5cafe']) assert (set((app.app_id for app in ios_apps)) == expected_app_ids) assert (len(recorder) == 1) self._assert_request_is_correct(recorder[0], 'GET', TestListIOSApps._LISTING_URL) def test_list_ios_apps_rpc_error(self): recorder = self._instrument_service(statuses=[503], responses=[UNAVAILABLE_RESPONSE]) with pytest.raises(exceptions.UnavailableError) as excinfo: project_management.list_ios_apps() assert ('Backend servers are over capacity' in str(excinfo.value)) assert (excinfo.value.cause is not None) assert (excinfo.value. is not None) assert (len(recorder) == 1) def test_list_ios_apps_empty_list(self): recorder = self._instrument_service(statuses=[200], responses=[json.dumps(dict())]) ios_apps = project_management.list_ios_apps() assert (ios_apps == []) assert (len(recorder) == 1) self._assert_request_is_correct(recorder[0], 'GET', TestListIOSApps._LISTING_URL) def test_list_ios_apps_multiple_pages(self): recorder = self._instrument_service(statuses=[200, 200], responses=[LIST_IOS_APPS_PAGE_1_RESPONSE, LIST_IOS_APPS_PAGE_2_RESPONSE]) ios_apps = project_management.list_ios_apps() expected_app_ids = set(['1::ios:ca5cade5', '1::ios:ca5cade5cafe']) assert (set((app.app_id for app in ios_apps)) == expected_app_ids) assert (len(recorder) == 2) self._assert_request_is_correct(recorder[0], 'GET', TestListIOSApps._LISTING_URL) self._assert_request_is_correct(recorder[1], 'GET', TestListIOSApps._LISTING_PAGE_2_URL) def test_list_ios_apps_multiple_pages_rpc_error(self): recorder = self._instrument_service(statuses=[200, 503], responses=[LIST_IOS_APPS_PAGE_1_RESPONSE, UNAVAILABLE_RESPONSE]) with pytest.raises(exceptions.UnavailableError) as excinfo: project_management.list_ios_apps() assert ('Backend servers are over capacity' in str(excinfo.value)) assert (excinfo.value.cause is not None) assert (excinfo.value. is not None) assert (len(recorder) == 2)
class DeleteAllSubjects(): class Request(): pass def __init__(self, domain: str, port: str, api_key: str): self.add_subject = SubjectClient(api_key=api_key, domain=domain, port=port) def execute(self) -> dict: result: dict = self.add_subject.delete() return result
.skipif((not can_import('magic')), reason='Libmagic is not installed') def test_mismatching_file_types(local_dummy_txt_file): def t1(path: FlyteFile[typing.TypeVar('txt')]) -> FlyteFile[typing.TypeVar('jpeg')]: return path def my_wf(path: FlyteFile[typing.TypeVar('txt')]) -> FlyteFile[typing.TypeVar('jpeg')]: f = t1(path=path) return f with pytest.raises(TypeError) as excinfo: my_wf(path=local_dummy_txt_file) assert ('Incorrect file type, expected image/jpeg, got text/plain' in str(excinfo.value))
class Solution(): def minCut(self, s: str) -> int: n = len(s) _cache(None) def isPalindrome(l, r): if (l >= r): return True if (s[l] != s[r]): return False return isPalindrome((l + 1), (r - 1)) _cache(None) def dp(i): if (i == n): return 0 ans = math.inf for j in range(i, n): if isPalindrome(i, j): ans = min(ans, (dp((j + 1)) + 1)) return ans return (dp(0) - 1)
class ShanghaiBackwardsHeader(BlockHeaderSedesAPI): def serialize(cls, obj: BlockHeaderAPI) -> List[bytes]: return obj.serialize(obj) def deserialize(cls, encoded: List[bytes]) -> BlockHeaderAPI: num_fields = len(encoded) if (num_fields == 17): return ShanghaiBlockHeader.deserialize(encoded) if (num_fields == 16): return LondonBlockHeader.deserialize(encoded) elif (num_fields == 15): return BlockHeader.deserialize(encoded) else: raise ValueError(f'Unexpected number of fields in block header.Got {num_fields} in {encoded!r}')
def create_perturbed_dataclass(perturbed_ds, perturbations_per_sample: int, original_dataset_size: int, perturbation_type: str): total_perturbations = (len(perturbed_ds.data) * perturbations_per_sample) return PerturbedTextDataset(data=perturbed_ds.data, metadata=perturbed_ds.meta, total_perturbations=total_perturbations, original_dataset_size=original_dataset_size, perturbations_per_sample=perturbations_per_sample, perturbation_type=perturbation_type)
class Filehead(): version_number: int year: int version_bound: int type_of_grid: TypeOfGrid rock_model: RockModel grid_format: GridFormat def from_egrid(cls, values: list[int]): if (len(values) < 7): raise ValueError(f'Filehead given too few values, {len(values)} < 7') return cls(version_number=values[0], year=values[1], version_bound=values[3], type_of_grid=TypeOfGrid.alternate_code(values[4]), rock_model=RockModel(values[5]), grid_format=GridFormat(values[6])) def to_egrid(self) -> np.ndarray: result = np.zeros((100,), dtype=np.int32) result[0] = self.version_number result[1] = self.year result[3] = self.version_bound result[4] = self.type_of_grid.alternate_value result[5] = self.rock_model.value result[6] = self.grid_format.value return result
class FiltersAPI(APIClient): def get_filters(self, test_results_totals: Dict[(Optional[str], TotalsSchema)], test_runs_totals: Dict[(Optional[str], TotalsSchema)], models: Dict[(str, NormalizedModelSchema)], sources: Dict[(str, NormalizedSourceSchema)], models_runs: List[ModelRunsSchema]) -> FiltersSchema: test_results_filters = self._get_test_filters(test_results_totals, models, sources) test_runs_filters = self._get_test_filters(test_runs_totals, models, sources) model_runs_filters = self._get_model_runs_filters(models_runs) return FiltersSchema(test_results=test_results_filters, test_runs=test_runs_filters, model_runs=model_runs_filters) def _get_test_filters(totals: Dict[(Optional[str], TotalsSchema)], models: Dict[(str, NormalizedModelSchema)], sources: Dict[(str, NormalizedSourceSchema)]) -> List[FilterSchema]: failures_filter = FilterSchema(name='failures', display_name='Failures') warnings_filter = FilterSchema(name='warnings', display_name='Warnings') errors_filter = FilterSchema(name='errors', display_name='Errors') passed_filter = FilterSchema(name='passed', display_name='Passed') no_tests_filter = FilterSchema(name='no_test', display_name='No Tests') totals_models_ids = totals.keys() artifacts: List[ArtifactSchema] = [*models.values(), *sources.values()] for artifact in artifacts: if (artifact.unique_id and (artifact.unique_id not in totals_models_ids)): no_tests_filter.add_model_unique_id(artifact.unique_id) for (model_unique_id, total) in totals.items(): if total.failures: failures_filter.add_model_unique_id(model_unique_id) if total.warnings: warnings_filter.add_model_unique_id(model_unique_id) if total.errors: errors_filter.add_model_unique_id(model_unique_id) if total.passed: passed_filter.add_model_unique_id(model_unique_id) if ((not total.failures) and (not total.warnings) and (not total.errors) and (not total.passed)): no_tests_filter.add_model_unique_id(model_unique_id) filters = [failures_filter, warnings_filter, errors_filter, passed_filter, no_tests_filter] return [filter for filter in filters if len(filter.model_unique_ids)] def _get_model_runs_filters(models_runs: List[ModelRunsSchema]) -> List[FilterSchema]: successful_runs_filter = FilterSchema(name='success', display_name='Successful Runs') failed_runs_filter = FilterSchema(name='errors', display_name='Failed Runs') no_runs_filter = FilterSchema(name='no_runs', display_name='No Runs') for model_runs in models_runs: totals = model_runs.totals unique_id = model_runs.unique_id if totals.success: successful_runs_filter.add_model_unique_id(unique_id) if totals.errors: failed_runs_filter.add_model_unique_id(unique_id) if ((not totals.success) and (not totals.errors)): no_runs_filter.add_model_unique_id(unique_id) filters = [successful_runs_filter, failed_runs_filter, no_runs_filter] return [filter for filter in filters if len(filter.model_unique_ids)]
class OptionSeriesWindbarbSonificationTracksMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def test_cli_ethpm_with_type_error_exception(cli_tester, testproject): cli_tester.monkeypatch.setattr('brownie._cli.ethpm._list', (lambda project_path: cli_tester.raise_type_error_exception('foobar'))) cli_tester.run_and_test_parameters('ethpm list', parameters=None) assert (cli_tester.mock_subroutines.called is False) assert (cli_tester.mock_subroutines.call_count == 0)
def test_working_hours_attribute_is_working_properly(): import copy from stalker import defaults working_hours = copy.copy(defaults.working_hours) working_hours['sun'] = [[540, 1000]] working_hours['sat'] = [[500, 800], [900, 1440]] wh = WorkingHours() wh.working_hours = working_hours assert (wh.working_hours == working_hours) assert (wh.working_hours['sun'] == working_hours['sun']) assert (wh.working_hours['sat'] == working_hours['sat'])
def text2kata(text: str) -> str: parsed = pyopenjtalk.run_frontend(text) res = [] for parts in parsed: (word, yomi) = (replace_punctuation(parts['orig']), parts['pron'].replace('', '')) if yomi: if re.match(_MARKS, yomi): if (len(word) > 1): word = [replace_punctuation(i) for i in list(word)] yomi = word res += yomi sep += word continue elif ((word not in rep_map.keys()) and (word not in rep_map.values())): word = ',' yomi = word res.append(yomi) elif (word in _SYMBOL_TOKENS): res.append(word) elif (word in ('', '')): res.append('') elif (word in _NO_YOMI_TOKENS): pass else: res.append(word) return hira2kata(''.join(res))
(scope='function') def manual_dataset_config(integration_manual_config: ConnectionConfig, db: Session, example_datasets: List[Dict]) -> Generator: manual_dataset = example_datasets[8] fides_key = manual_dataset['fides_key'] integration_manual_config.name = fides_key integration_manual_config.key = fides_key integration_manual_config.save(db=db) ctl_dataset = CtlDataset.create_from_dataset_dict(db, manual_dataset) dataset = DatasetConfig.create(db=db, data={'connection_config_id': integration_manual_config.id, 'fides_key': fides_key, 'ctl_dataset_id': ctl_dataset.id}) (yield dataset) dataset.delete(db=db) ctl_dataset.delete(db=db)
def from_ethpm(uri: str) -> 'TempProject': manifest = get_manifest(uri) compiler_config = {'evm_version': None, 'solc': {'version': None, 'optimize': True, 'runs': 200}, 'vyper': {'version': None}} project = TempProject(manifest['package_name'], manifest['sources'], compiler_config) if web3.isConnected(): for contract_name in project.keys(): for address in get_deployment_addresses(manifest, contract_name): project[contract_name].at(address) return project
class OptionSeriesVennStatesSelect(Options): def animation(self) -> 'OptionSeriesVennStatesSelectAnimation': return self._config_sub_data('animation', OptionSeriesVennStatesSelectAnimation) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def lineWidth(self): return self._config_get(None) def lineWidth(self, num: float): self._config(num, js_type=False) def lineWidthPlus(self): return self._config_get(1) def lineWidthPlus(self, num: float): self._config(num, js_type=False) def marker(self) -> 'OptionSeriesVennStatesSelectMarker': return self._config_sub_data('marker', OptionSeriesVennStatesSelectMarker)
class MergePipeline(): def __init__(self, pipeline_list: List, execute_merge_on): self.pipeline_list = pipeline_list self.execute_merge_on = execute_merge_on def fit(self, index): for pipeline in self.pipeline_list: pipeline.fit(index) def _single_batch(self, batch): dfs = [] for pipeline in self.pipeline_list: dfs.append(pipeline.execute(batch)) batch_result = reduce((lambda l, r: pd.merge(l, r, on=self.execute_merge_on, how='left')), dfs) return batch_result def execute(self, index, batch_size=None) -> pd.DataFrame: if (batch_size is None): batch_size = len(index) batches = [index[k:(k + batch_size)] for k in range(0, len(index), batch_size)] result = [] for batch in batches: result.append(self._single_batch(batch)) result = pd.concat(result, axis=0) return result
class OptionSeriesAreasplineSonificationContexttracksMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
_plot _dependency(plt, 'matplotlib') def plot_spectra(freqs, power_spectra, log_freqs=False, log_powers=False, freq_range=None, colors=None, labels=None, ax=None, **plot_kwargs): ax = check_ax(ax, plot_kwargs.pop('figsize', PLT_FIGSIZES['spectral'])) plot_kwargs = check_plot_kwargs(plot_kwargs, {'linewidth': 2.0}) grid = plot_kwargs.pop('grid', True) if (freq_range is not None): freq_range = (np.log10(freq_range) if log_freqs else freq_range) plt_powers = (np.reshape(power_spectra, (1, (- 1))) if (isinstance(freqs, np.ndarray) and (np.ndim(power_spectra) == 1)) else power_spectra) plt_freqs = (repeat(freqs) if (isinstance(freqs, np.ndarray) and (freqs.ndim == 1)) else freqs) labels = (plot_kwargs.pop('label') if (('label' in plot_kwargs.keys()) and (labels is None)) else labels) labels = (repeat(labels) if (not isinstance(labels, list)) else cycle(labels)) colors = (repeat(colors) if (not isinstance(colors, list)) else cycle(colors)) for (freqs, powers, color, label) in zip(plt_freqs, plt_powers, colors, labels): freqs = (np.log10(freqs) if log_freqs else freqs) powers = (np.log10(powers) if log_powers else powers) if color: plot_kwargs['color'] = color ax.plot(freqs, powers, label=label, **plot_kwargs) ax.set_xlim(freq_range) style_spectrum_plot(ax, log_freqs, log_powers, grid)
def extractNovellibrariumBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Banished to Another World', 'Banished to Another World', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) titlemap = [('The Dragon Talisman, Chapter ', 'The Dragon Talisman', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')] for (titlecomponent, name, tl_type) in titlemap: if (titlecomponent.lower() in item['title'].lower()): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class Test(unittest.TestCase): def test_utcnow(self): dt_now = datetime.utcnow() now = udatetime.utcnow() self.assertIsInstance(now, datetime) self.assertEqual(now.year, dt_now.year) self.assertEqual(now.month, dt_now.month) self.assertEqual(now.day, dt_now.day) self.assertEqual(now.hour, dt_now.hour) self.assertEqual(now.minute, dt_now.minute) self.assertEqual(now.second, dt_now.second) self.assertEqual(now.utcoffset(), timedelta(0)) self.assertEqual(now.dst(), NO_DST) def test_now(self): dt_now = datetime.now() now = udatetime.now() self.assertIsInstance(now, datetime) self.assertEqual(now.year, dt_now.year) self.assertEqual(now.month, dt_now.month) self.assertEqual(now.day, dt_now.day) self.assertEqual(now.hour, dt_now.hour) self.assertEqual(now.minute, dt_now.minute) self.assertEqual(now.second, dt_now.second) def test_from_and_to_string(self): rfc3339 = '2016-07-15T12:33:20.123000+01:30' dt = udatetime.from_string(rfc3339) self.assertIsInstance(dt, datetime) self.assertEqual(dt.year, 2016) self.assertEqual(dt.month, 7) self.assertEqual(dt.day, 15) self.assertEqual(dt.hour, 12) self.assertEqual(dt.minute, 33) self.assertEqual(dt.second, 20) self.assertEqual(dt.microsecond, 123000) self.assertEqual(dt.utcoffset(), timedelta(hours=1, minutes=30)) self.assertEqual(dt.dst(), NO_DST) self.assertEqual(udatetime.to_string(dt), rfc3339) rfc3339 = '2016-07-18T12:58:26.485897-02:00' dt = udatetime.from_string(rfc3339) self.assertEqual(udatetime.to_string(dt), rfc3339) def test_fromtimestamp(self): DAY = 86400 HOUR = 3600 TZ_CEST = udatetime.TZFixedOffset((60 * 2)) for t in range(0, (DAY - (2 * HOUR)), HOUR): dt = datetime.fromtimestamp(t) udt = udatetime.fromtimestamp(t) self.assertIsInstance(udt, datetime) self.assertEqual(udt.year, dt.year) self.assertEqual(udt.month, dt.month) self.assertEqual(udt.day, dt.day) self.assertEqual(udt.hour, dt.hour) self.assertEqual(udt.minute, dt.minute) self.assertEqual(udt.second, dt.second) self.assertEqual(udt.microsecond, dt.microsecond) self.assertEqual(udt.utcoffset(), timedelta(0)) self.assertEqual(udt.dst(), NO_DST) for t in range(0, DAY, HOUR): dt = datetime.fromtimestamp(t, TZ_CEST) udt = udatetime.fromtimestamp(t, TZ_CEST) self.assertIsInstance(udt, datetime) self.assertEqual(udt.year, dt.year) self.assertEqual(udt.month, dt.month) self.assertEqual(udt.day, dt.day) self.assertEqual(udt.hour, dt.hour) self.assertEqual(udt.minute, dt.minute) self.assertEqual(udt.second, dt.second) self.assertEqual(udt.microsecond, dt.microsecond) self.assertEqual(udt.utcoffset(), timedelta(hours=2)) self.assertEqual(udt.dst(), NO_DST) for t in range(0, (DAY * (- 1)), (HOUR * (- 1))): dt = datetime.fromtimestamp(t, TZ_CEST) udt = udatetime.fromtimestamp(t, TZ_CEST) self.assertIsInstance(udt, datetime) self.assertEqual(udt.year, dt.year) self.assertEqual(udt.month, dt.month) self.assertEqual(udt.day, dt.day) self.assertEqual(udt.hour, dt.hour) self.assertEqual(udt.minute, dt.minute) self.assertEqual(udt.second, dt.second) self.assertEqual(udt.microsecond, dt.microsecond) self.assertEqual(udt.utcoffset(), timedelta(hours=2)) self.assertEqual(udt.dst(), NO_DST) def test_utcfromtimestamp(self): DAY = 86400 HOUR = 3600 for t in range(0, DAY, HOUR): dt = datetime.utcfromtimestamp(t) udt = udatetime.utcfromtimestamp(t) self.assertIsInstance(udt, datetime) self.assertEqual(udt.year, dt.year) self.assertEqual(udt.month, dt.month) self.assertEqual(udt.day, dt.day) self.assertEqual(udt.hour, dt.hour) self.assertEqual(udt.minute, dt.minute) self.assertEqual(udt.second, dt.second) self.assertEqual(udt.microsecond, dt.microsecond) self.assertEqual(udt.utcoffset(), timedelta(0)) self.assertEqual(udt.dst(), NO_DST) for t in range(0, (DAY * (- 1)), (HOUR * (- 1))): dt = datetime.utcfromtimestamp(t) udt = udatetime.utcfromtimestamp(t) self.assertIsInstance(udt, datetime) self.assertEqual(udt.year, dt.year) self.assertEqual(udt.month, dt.month) self.assertEqual(udt.day, dt.day) self.assertEqual(udt.hour, dt.hour) self.assertEqual(udt.minute, dt.minute) self.assertEqual(udt.second, dt.second) self.assertEqual(udt.microsecond, dt.microsecond) self.assertEqual(udt.utcoffset(), timedelta(0)) self.assertEqual(udt.dst(), NO_DST) def test_broken_from_string(self): invalid = ['2016-07-15 12:33:20.123000+01:30', '2016-13-15T12:33:20.123000+01:30', 'T12:33:20.123000+01:30', 'Hello World', '2016-07-15 12:33:20.123000+01:302016-07-15 12:33:20.123000+01:30', '2016-07-15T12:33:20.1Z0', '2016-07-15T12:33:20.1 +01:30f'] for r in invalid: with self.assertRaises(ValueError): udatetime.from_string(r) def test_ok_from_string(self): rfc3339s = ['2016-07-15 T 12:33:20.123000 +01:30', '2016-07-15 T 12:33:20.123000 +01:30', '2016-07-15T12:33:20.123 +01:30', '2016-07-15T12:33:20 +01:30', '2016-07-15T12:33:20 Z', '2016-07-15T12:33:20', '2016-07-15t12:33:20', '2016-07-15T12:33:20.1 +01:30'] for r in rfc3339s: self.assertIsInstance(udatetime.from_string(r), datetime) def test_tzone(self): rfc3339 = '2016-07-15T12:33:20.123000+01:30' dt = udatetime.from_string(rfc3339) offset = dt.tzinfo.utcoffset() dst = dt.tzinfo.dst() self.assertIsInstance(offset, timedelta) self.assertEqual((offset.total_seconds() / 60), 90) self.assertEqual(dst, NO_DST) rfc3339 = '2016-07-15T12:33:20.123000Z' dt = udatetime.from_string(rfc3339) offset = dt.tzinfo.utcoffset() dst = dt.tzinfo.dst() self.assertIsInstance(offset, timedelta) self.assertEqual(offset.total_seconds(), 0) self.assertEqual(dst, NO_DST) rfc3339 = '2016-07-15T12:33:20.123000-02:00' dt = udatetime.from_string(rfc3339) offset = dt.tzinfo.utcoffset() dst = dt.tzinfo.dst() self.assertIsInstance(offset, timedelta) self.assertEqual((offset.total_seconds() / 60), (- 120)) self.assertEqual(dst, NO_DST) def test_precision(self): t = .549871 dt = datetime.fromtimestamp(t) udt = udatetime.fromtimestamp(t) self.assertEqual(udt.microsecond, dt.microsecond) def test_raise_on_not_TZFixedOffset(self): class TZInvalid(tzinfo): def utcoffset(self, dt=None): return timedelta(seconds=0) def dst(self, dt=None): return timedelta(seconds=0) dt = datetime.now(TZInvalid()) with self.assertRaises(ValueError): udatetime.to_string(dt) def test_variable_fraction(self): rfc3339 = '2016-07-15T12:33:20.1' d1 = udatetime.from_string(((rfc3339 + ('0' * 5)) + 'Z')) for x in range(0, 6): d2 = udatetime.from_string(((rfc3339 + ('0' * x)) + 'Z')) self.assertEqual(d1, d2) self.assertEqual(udatetime.from_string('2016-07-15T12:33:20.123Z'), udatetime.from_string('2016-07-15T12:33:20.123000Z')) self.assertEqual(udatetime.from_string('2016-07-15T12:33:20.0Z'), udatetime.from_string('2016-07-15T12:33:20Z'))
class Source(Box, AbstractSource, ABC): source_time: SourceTimeType = pydantic.Field(..., title='Source Time', description='Specification of the source time-dependence.', discriminator=TYPE_TAG_STR) _property def plot_params(self) -> PlotParams: return plot_params_source _property def geometry(self) -> Box: return Box(center=self.center, size=self.size) _property def _injection_axis(self): return None _property def _dir_vector(self) -> Tuple[(float, float, float)]: return None _property def _pol_vector(self) -> Tuple[(float, float, float)]: return None ('source_time', always=True) def _freqs_lower_bound(cls, val): _assert_min_freq(val.freq0, msg_start="'source_time.freq0'") return val def plot(self, x: float=None, y: float=None, z: float=None, ax: Ax=None, **patch_kwargs) -> Ax: kwargs_arrow_base = patch_kwargs.pop('arrow_base', None) ax = Box.plot(self, x=x, y=y, z=z, ax=ax, **patch_kwargs) kwargs_alpha = patch_kwargs.get('alpha') arrow_alpha = (ARROW_ALPHA if (kwargs_alpha is None) else kwargs_alpha) if (self._dir_vector is not None): bend_radius = None bend_axis = None if hasattr(self, 'mode_spec'): bend_radius = self.mode_spec.bend_radius bend_axis = self._bend_axis ax = self._plot_arrow(x=x, y=y, z=z, ax=ax, direction=self._dir_vector, bend_radius=bend_radius, bend_axis=bend_axis, color=ARROW_COLOR_SOURCE, alpha=arrow_alpha, both_dirs=False, arrow_base=kwargs_arrow_base) if (self._pol_vector is not None): ax = self._plot_arrow(x=x, y=y, z=z, ax=ax, direction=self._pol_vector, color=ARROW_COLOR_POLARIZATION, alpha=arrow_alpha, both_dirs=False, arrow_base=kwargs_arrow_base) return ax
class AEATestCaseManyFlaky(AEATestCaseMany): run_count: int = 0 def setup_class(cls) -> None: super(AEATestCaseManyFlaky, cls).setup_class() if (len(cls.method_list) > 1): raise ValueError(f'{cls.__name__} can only contain one test method!') cls.run_count += 1 def teardown_class(cls) -> None: super(AEATestCaseManyFlaky, cls).teardown_class()
def _generate_python_code_line(opt): if opt.is_flag(): funcargs = (opt.opttext() + ", action='store_true'") elif opt.is_option(): funcargs = opt.opttext() if (opt.type == ArgType.STRING): choices = opt.value.split(',') if (len(choices) > 1): funcargs += f', choices={choices}' value = f"'{choices[0]}'" else: value = f"'{opt.value}'" elif (opt.type == ArgType.FILE): value = f"'{opt.value}'" else: value = opt.value default_str = ('None' if (opt.type is ArgType.FILE) else str(value)) funcargs += f', default={default_str}' if (opt.type is not ArgType.STRING): funcargs += f', type={opt.type}' elif opt.is_positional(): if opt.value.isidentifier(): funcargs = f"'{opt.value}'" else: funcargs = f"'{opt.var_name}'" if (opt.type is not ArgType.STRING): funcargs += f', type={opt.type}' else: raise RuntimeError('Invalid options provided') if (opt.type is not None): if (opt.type == ArgType.INT): helptext = 'an int value' elif (opt.type == ArgType.FLOAT): helptext = 'a float value' elif (opt.type == ArgType.FILE): helptext = 'a filename' elif (opt.type == ArgType.STRING): helptext = 'a string' else: raise RuntimeError('Invalid type setting') elif opt.is_flag(): helptext = f'{opt.desc} flag' funcargs += f", help='{helptext}'" return f'parser.add_argument({funcargs})'
class reversed(_coconut_has_iter): __slots__ = () __doc__ = getattr(_coconut.reversed, '__doc__', '<see help(py_reversed)>') def __new__(cls, iterable): if _coconut.isinstance(iterable, _coconut.range): return iterable[::(- 1)] if ((_coconut.getattr(iterable, '__reversed__', None) is None) or _coconut.isinstance(iterable, (_coconut.list, _coconut.tuple))): self = _coconut_has_iter.__new__(cls, iterable) return self return _coconut.reversed(iterable) def __repr__(self): return ('reversed(%s)' % (_coconut.repr(self.iter),)) def __reduce__(self): return (self.__class__, (self.iter,)) def __copy__(self): return self.__class__(self.get_new_iter()) def __iter__(self): return _coconut.iter(_coconut.reversed(self.iter)) def __getitem__(self, index): if _coconut.isinstance(index, _coconut.slice): return _coconut_iter_getitem(self.iter, _coconut.slice(((- (index.start + 1)) if (index.start is not None) else None), ((- (index.stop + 1)) if index.stop else None), (- (index.step if (index.step is not None) else 1)))) return _coconut_iter_getitem(self.iter, (- (index + 1))) def __reversed__(self): return self.iter def __len__(self): if (not _coconut.isinstance(self.iter, _coconut.abc.Sized)): return _coconut.NotImplemented return _coconut.len(self.iter) def __contains__(self, elem): return (elem in self.iter) def count(self, elem): return self.iter.count(elem) def index(self, elem): return ((_coconut.len(self.iter) - self.iter.index(elem)) - 1) def __fmap__(self, func): return self.__class__(_coconut_map(func, self.iter))
class TestUnshardedLightningDLRM(unittest.TestCase): def test_train_model(self) -> None: num_embeddings = 100 embedding_dim = 10 num_dense = 50 eb1_config = EmbeddingBagConfig(name='t1', embedding_dim=embedding_dim, num_embeddings=num_embeddings, feature_names=['f1', 'f3']) eb2_config = EmbeddingBagConfig(name='t2', embedding_dim=embedding_dim, num_embeddings=num_embeddings, feature_names=['f2']) ebc = EmbeddingBagCollection(tables=[eb1_config, eb2_config]) model = UnshardedLightningDLRM(ebc, dense_in_features=num_dense, dense_arch_layer_sizes=[20, embedding_dim], over_arch_layer_sizes=[5, 1]) datamodule = RandomRecDataModule(num_dense=num_dense) trainer = pl.Trainer(max_epochs=3, enable_checkpointing=False, limit_train_batches=100, limit_val_batches=100, limit_test_batches=100, logger=False) batch = next(iter(datamodule.init_loader)) model(dense_features=batch.dense_features, sparse_features=batch.sparse_features) trainer.fit(model, datamodule=datamodule) trainer.test(model, datamodule=datamodule)
def test_updates_query_total(bodhi_container, db_container): db_ip = db_container.get_IPv4s()[0] query = 'SELECT COUNT(*) FROM updates' conn = psycopg2.connect('dbname=bodhi2 user=postgres host={}'.format(db_ip)) with conn: with conn.cursor() as curs: curs.execute(query) total = curs.fetchone()[0] conn.close() result = run_cli(bodhi_container, ['updates', 'query']) assert (result.exit_code == 0) last_line = result.output.split('\n')[(- 2)] assert (last_line == '{} updates found ({} shown)'.format(total, min(total, 20)))
def test_raises_only_field_errors_unexpected_missing(unknown_event_id_field_error, invalid_organization_id_field_error): errors = [unknown_event_id_field_error, invalid_organization_id_field_error] with pytest.raises(pytest.raises.Exception): with raises_only_field_errors({'event_id': 'MISSING'}): raise Client.CallActionError(actions=[ActionResponse(action='', errors=errors)])
def test_ttcompile_ttf_to_woff_without_zopfli(tmpdir): inttx = os.path.join('Tests', 'ttx', 'data', 'TestTTF.ttx') outwoff = tmpdir.join('TestTTF.woff') options = ttx.Options([], 1) options.flavor = 'woff' ttx.ttCompile(inttx, str(outwoff), options) assert outwoff.check(file=True) ttf = TTFont(str(outwoff)) expected_tables = ('head', 'hhea', 'maxp', 'OS/2', 'name', 'cmap', 'hmtx', 'fpgm', 'prep', 'cvt ', 'loca', 'glyf', 'post', 'gasp', 'DSIG') for table in expected_tables: assert (table in ttf)
def test_write_to_runpath_produces_the_transformed_field_in_storage(snake_oil_field_example, storage): ensemble_config = snake_oil_field_example.ensemble_config experiment_id = storage.create_experiment(parameters=ensemble_config.parameter_configuration) prior_ensemble = storage.create_ensemble(experiment_id, name='prior', ensemble_size=5) active_realizations = [0, 3, 4] sample_prior(prior_ensemble, active_realizations) permx_field = ensemble_config['PERMX'] assert ((permx_field.nx, permx_field.ny, permx_field.nz) == (10, 10, 5)) assert (permx_field.truncation_min is None) assert (permx_field.truncation_max is None) assert (permx_field.input_transformation is None) assert (permx_field.output_transformation is None) for real in active_realizations: permx_field.write_to_runpath(Path(f'export/with/path/{real}'), real, prior_ensemble) assert (read_field(f'export/with/path/{real}/permx.grdecl', 'PERMX', permx_field.mask, Shape(permx_field.nx, permx_field.ny, permx_field.nz)).flatten().tolist() == pytest.approx(permx_field._transform_data(permx_field._fetch_from_ensemble(real, prior_ensemble)).flatten().tolist())) for real in [1, 2]: with pytest.raises(KeyError, match=f"No dataset 'PERMX' in storage for realization {real}"): permx_field.write_to_runpath(Path(f'export/with/path/{real}'), real, prior_ensemble) assert (not os.path.isfile(f'export/with/path/{real}/permx.grdecl'))
class TestOSHelpers(unittest.TestCase): def test_current_os(self): for plat in ['linux', 'linux2']: with mock.patch.object(sys, 'platform', plat): self.assertEqual(current_os(), 'linux') with mock.patch.object(sys, 'platform', 'darwin'): self.assertEqual(current_os(), 'macos') for plat in ['win32', 'cygwin']: with mock.patch.object(sys, 'platform', plat): self.assertEqual(current_os(), 'windows') with mock.patch.object(sys, 'platform', 'unknown'): with self.assertRaises(Exception): current_os() def test_other_oses(self): plat_and_others = [('linux', ['windows', 'macos']), ('darwin', ['windows', 'linux']), ('win32', ['linux', 'macos'])] for (plat, others) in plat_and_others: with mock.patch.object(sys, 'platform', plat): self.assertEqual(set(others), set(other_oses()))
def _ragged_forward(layer: Model[(Padded, Padded)], Xr: Ragged, is_train: bool) -> Tuple[(Ragged, Callable)]: list2padded = layer.ops.list2padded padded2list = layer.ops.padded2list unflatten = layer.ops.unflatten flatten = layer.ops.flatten (Yp, get_dXp) = layer(list2padded(unflatten(Xr.data, Xr.lengths)), is_train) def backprop(dYr: Ragged): flattened = flatten(padded2list(get_dXp(list2padded(unflatten(dYr.data, dYr.lengths))))) return Ragged(flattened, dYr.lengths) flattened = flatten(padded2list(Yp)) return (Ragged(flattened, Xr.lengths), backprop)
class TestTachoMotorSpeedPValue(ptc.ParameterizedTestCase): def test_speed_i_negative(self): with self.assertRaises(IOError): self._param['motor'].speed_p = (- 1) def test_speed_p_zero(self): self._param['motor'].speed_p = 0 self.assertEqual(self._param['motor'].speed_p, 0) def test_speed_p_positive(self): self._param['motor'].speed_p = 1 self.assertEqual(self._param['motor'].speed_p, 1) def test_speed_p_after_reset(self): self._param['motor'].speed_p = 1 self._param['motor'].command = 'reset' if ('speed_pid' in self._param): expected = self._param['speed_pid']['kP'] else: expected = motor_info[self._param['motor'].driver_name]['speed_p'] self.assertEqual(self._param['motor'].speed_p, expected)
class QuadLimbDark(pm.Flat): __citations__ = ('kipping13',) def __init__(self, *args, **kwargs): add_citations_to_model(self.__citations__, kwargs.get('model', None)) shape = kwargs.get('shape', 2) try: if (list(shape)[0] != 2): raise ValueError('the first dimension should be exactly 2') except TypeError: if (shape != 2): raise ValueError('the first dimension should be exactly 2') kwargs['shape'] = shape kwargs['transform'] = tr.quad_limb_dark super(QuadLimbDark, self).__init__(*args, **kwargs) default = np.zeros(shape) default[0] = np.sqrt(0.5) default[1] = 0.0 self._default = default def _random(self, size=None): q = np.moveaxis(np.random.uniform(0, 1, size=size), 0, (- len(self.shape))) sqrtq1 = np.sqrt(q[0]) twoq2 = (2 * q[1]) u = np.stack([(sqrtq1 * twoq2), (sqrtq1 * (1 - twoq2))], axis=0) return np.moveaxis(u, 0, (- len(self.shape))) def random(self, point=None, size=None): return generate_samples(self._random, dist_shape=self.shape, broadcast_shape=self.shape, size=size) def logp(self, value): return tt.zeros_like(tt.as_tensor_variable(value))
class OptionSeriesTilemapSonificationContexttracks(Options): def activeWhen(self) -> 'OptionSeriesTilemapSonificationContexttracksActivewhen': return self._config_sub_data('activeWhen', OptionSeriesTilemapSonificationContexttracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionSeriesTilemapSonificationContexttracksMapping': return self._config_sub_data('mapping', OptionSeriesTilemapSonificationContexttracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionSeriesTilemapSonificationContexttracksPointgrouping': return self._config_sub_data('pointGrouping', OptionSeriesTilemapSonificationContexttracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def timeInterval(self): return self._config_get(None) def timeInterval(self, num: float): self._config(num, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False) def valueInterval(self): return self._config_get(None) def valueInterval(self, num: float): self._config(num, js_type=False) def valueMapFunction(self): return self._config_get('linear') def valueMapFunction(self, value: Any): self._config(value, js_type=False) def valueProp(self): return self._config_get('"x"') def valueProp(self, text: str): self._config(text, js_type=False)
def extractHereticunboundWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def encrypt(rHost='127.0.0.1', rUsername='user_iptvpro', rPassword='', rDatabase='xtream_iptvpro', rServerID=1, rPort=7999): try: os.remove('/home/xtreamcodes/iptv_xtream_codes/config') except: pass rf = open('/home/xtreamcodes/iptv_xtream_codes/config', 'wb') rf.write(''.join((chr((ord(c) ^ ord(k))) for (c, k) in izip(('{"host":"%s","db_user":"%s","db_pass":"%s","db_name":"%s","server_id":"%d", "db_port":"%d", "pconnect":"0"}' % (rHost, rUsername, rPassword, rDatabase, rServerID, rPort)), cycle('5709650b0dc6de575025b1')))).encode('base64').replace('\n', '')) rf.close()
def jwt_none(url, method, headers, body, jwt_loc, jwt_key, jwt_token, jwt_data, scanid=None): encoded_jwt = jwt.encode(jwt_data, '', algorithm='none') if (jwt_loc == 'url'): url = url.replace(value[0], encoded_jwt) if (jwt_loc == 'header'): headers[key] = encoded_jwt jwt_request = req.api_request(url, method, headers, body) if ((str(jwt_request.status_code)[0] == '5') or (str(jwt_request.status_code)[0] == '4')): pass else: print(('%s[+]API is vulnearalbe to JWT none algo vulnerability%s'.format(url) % (api_logger.R, api_logger.W))) attack_result = {'id': 8, 'scanid': scanid, 'url': url, 'alert': 'JWT none Algorithm vulnerability', 'impact': 'High', 'req_headers': headers, 'req_body': body, 'res_headers': jwt_request.headers, 'res_body': jwt_request.text} dbupdate.insert_record(attack_result) return True
class AttachmentView(PermissionRequiredMixin, DetailView): model = Attachment def render_to_response(self, context, **response_kwargs): filename = os.path.basename(self.object.file.name) (content_type, _) = mimetypes.guess_type(self.object.file.name) if (not content_type): content_type = 'text/plain' response = HttpResponse(self.object.file, content_type=content_type) response['Content-Disposition'] = 'attachment; filename={}'.format(filename) return response def get_controlled_object(self): return self.get_object().post.topic.forum def perform_permissions_check(self, user, obj, perms): return self.request.forum_permission_handler.can_download_files(obj, user)
class TestOPAWDLParser(unittest.TestCase): def setUp(self): self.parser = OPAWDLParser() self.test_plugin_name = 'test_plugin' self.test_cmd_args_list = ['-a=b', '-c=d'] self.test_opawdl_state = OPAWDLState(plugin_name=self.test_plugin_name, cmd_args_list=self.test_cmd_args_list) self.test_state_name = 'state_1' self.test_opawdl_workflow = OPAWDLWorkflow(starts_at=self.test_state_name, states={self.test_state_name: self.test_opawdl_state}) def test_parse_json_str_to_workflow(self): valid_workflow = OPAWDLWorkflow(starts_at=self.test_state_name, states={self.test_state_name: self.test_opawdl_state}) result = self.parser.parse_json_str_to_workflow(str(valid_workflow)) self.assertEqual(result, valid_workflow) def test_parse_json_str_to_workflow_no_end(self): test_state_no_end = self.test_opawdl_state test_state_no_end.is_end = False test_workflow_no_end = OPAWDLWorkflow(starts_at=self.test_state_name, states={self.test_state_name: test_state_no_end}) with self.assertRaisesRegex(Exception, 'Input workflow string does not have an ending state.'): self.parser.parse_json_str_to_workflow(str(test_workflow_no_end)) def test_parse_json_str_to_workflow_multiple_ends(self): test_workflow_multiple_ends = OPAWDLWorkflow(starts_at=self.test_state_name, states={self.test_state_name: self.test_opawdl_state, 'state_2': self.test_opawdl_state}) with self.assertRaisesRegex(Exception, 'Input workflow string has multiple'): self.parser.parse_json_str_to_workflow(str(test_workflow_multiple_ends))
.django_db def test_validate_recipient_id_failures(): recipient_id = 'a52a7544-829b-c925-e1ba-d04d3171c09a-P' baker.make('recipient.RecipientProfile', **TEST_RECIPIENT_PROFILES[recipient_id]) def call_validate_recipient_id(recipient_id): try: recipients.validate_recipient_id(recipient_id) return False except InvalidParameterException: return True recipient_id = 'broken_recipient_id' assert (call_validate_recipient_id(recipient_id) is True) recipient_id = 'broken_recipient-id' assert (call_validate_recipient_id(recipient_id) is True) recipient_id = 'broken_recipient-R' assert (call_validate_recipient_id(recipient_id) is True) recipient_id = '-fdbe-3fc5-9252-000000-R' assert (call_validate_recipient_id(recipient_id) is True)
def _parse_pc_validator_config(pc_config: Dict[(str, Any)]) -> PCValidatorConfig: raw_pc_validator_config = pc_config['dependency'].get('PCValidatorConfig') if (not raw_pc_validator_config): storage_svc_region = pc_config['dependency']['StorageService']['constructor']['region'] return PCValidatorConfig(region=storage_svc_region) return reflect.get_instance(raw_pc_validator_config, PCValidatorConfig)
def test_get_architecture_stats(stats_updater, backend_db): insert_test_fw(backend_db, 'root_fw', vendor='foobar') insert_test_fo(backend_db, 'fo1', parent_fw='root_fw', analysis={'cpu_architecture': generate_analysis_entry(summary=['MIPS, 32-bit, big endian (M)'])}) insert_test_fo(backend_db, 'fo2', parent_fw='root_fw', analysis={'cpu_architecture': generate_analysis_entry(summary=['ARM, 32-bit, big endian (M)'])}) insert_test_fo(backend_db, 'fo3', parent_fw='root_fw', analysis={'cpu_architecture': generate_analysis_entry(summary=['MIPS, 32-bit, big endian (M)'])}) assert (stats_updater.get_architecture_stats() == {'cpu_architecture': [('MIPS, 32-bit', 1)]}) stats_updater.set_match({'vendor': 'foobar'}) assert (stats_updater.get_architecture_stats() == {'cpu_architecture': [('MIPS, 32-bit', 1)]}) stats_updater.set_match({'vendor': 'something else'}) assert (stats_updater.get_architecture_stats() == {'cpu_architecture': []})
class NonlinearAD_SteadyState(LinearAD_SteadyState): def __init__(self, b=1.0, q=2, a=0.5, r=1): LinearAD_SteadyState.__init__(self, b, a) self.r_ = r self.q_ = q if ((q == 2) and (r == 1)): if (b != 0.0): def f(rtmC): return ((rtmC * tanh(((b * rtmC) / a))) - 1.0) def df(rtmC): return (((rtmC * old_div(1.0, (cosh(((b * rtmC) / a)) ** 2))) * old_div(b, a)) + tanh(((b * rtmC) / a))) logEvent('Solving for sqrt(-C) for q=2,r=1') rtmC = sqrt(1.5) while (abs(f(rtmC)) > 1e-08): rtmC -= old_div(f(rtmC), df(rtmC)) logEvent(('sqrt(-C)=' + repr(rtmC))) self.rtmC_ = rtmC self.sol_ = (lambda x: (self.rtmC_ * tanh(((((- self.b_) * self.rtmC_) / self.a_) * (x - 1.0))))) else: self.sol_ = (lambda x: (1.0 - x)) elif ((q == 1) and (r == 2)): logEvent('Solving for C in q=1,r=2') def f(C): return ((((2.0 * C) * (log((C - 1.0)) - log(C))) + 2.0) + old_div(self.b_, self.a_)) def df(C): return ((2.0 * (log((C - 1.0)) - log(C))) + ((2.0 * C) * (old_div(1.0, (C - 1.0)) - old_div(1.0, C)))) C = (1.0 + 1e-10) f0 = f(C) print(f0) while (abs(f(C)) > ((1e-07 * abs(f0)) + 1e-07)): dC = old_div((- f(C)), df(C)) logEvent('dc') print(dC) Ctmp = (C + dC) while ((abs(f(Ctmp)) > (0.99 * abs(f(C)))) or (Ctmp <= 1.0)): print(f(Ctmp)) print(f(C)) logEvent('ls') dC *= 0.9 Ctmp = (C + dC) logEvent('out') print(Ctmp) print(f(Ctmp)) print(df(Ctmp)) C = Ctmp logEvent(('C=' + repr(C))) self.nlC_ = C self.nlD_ = (0.5 * (((((2.0 * C) * log((C * (C - 1)))) - (4.0 * C)) + 2.0) - old_div(self.b_, self.a_))) logEvent(('D=' + repr(self.nlD_))) else: logEvent('q,r not implemented') def uOfX(self, X): x = X[0] if ((self.q_ == 2) and (self.r_ == 1)): return self.sol_(x) elif ((self.q_ == 1) and (self.r_ == 2)): def f(u): return (((2.0 * ((self.nlC_ * log((self.nlC_ - u))) - (self.nlC_ - u))) - self.nlD_) - ((self.b_ * x) / self.a_)) def df(u): return (((2.0 * self.nlC_) / (u - self.nlC_)) + 2.0) u = LinearAD_SteadyState.uOfX(self, X) f0 = f(u) while (abs(f(u)) > ((1e-06 * abs(f0)) + 1e-06)): u -= old_div(f(u), df(u)) return u else: logEvent('q,r not implemented')
class FacebookOAuth2(BaseOAuth2[Dict[(str, Any)]]): display_name = 'Facebook' logo_svg = LOGO_SVG def __init__(self, client_id: str, client_secret: str, scopes: Optional[List[str]]=BASE_SCOPES, name: str='facebook'): super().__init__(client_id, client_secret, AUTHORIZE_ENDPOINT, ACCESS_TOKEN_ENDPOINT, name=name, base_scopes=scopes) async def get_long_lived_access_token(self, token: str): async with self.get_ as client: response = (await client.post(self.access_token_endpoint, data={'grant_type': 'fb_exchange_token', 'fb_exchange_token': token, 'client_id': self.client_id, 'client_secret': self.client_secret})) data = cast(Dict[(str, Any)], response.json()) if (response.status_code >= 400): raise GetLongLivedAccessTokenError(data) return OAuth2Token(data) async def get_id_email(self, token: str) -> Tuple[(str, Optional[str])]: async with self.get_ as client: response = (await client.get(PROFILE_ENDPOINT, params={'fields': 'id,email', 'access_token': token})) if (response.status_code >= 400): raise GetIdEmailError(response.json()) data = cast(Dict[(str, Any)], response.json()) return (data['id'], data.get('email'))
('method', ['resize', 'sample']) def test_resize_and_sample_errors(method): with Image(filename='rose:') as img: with raises(TypeError): getattr(img, method)(width='100') with raises(TypeError): getattr(img, method)(height='100') with raises(ValueError): getattr(img, method)(width=0) with raises(ValueError): getattr(img, method)(height=0) with raises(ValueError): getattr(img, method)(width=(- 5)) with raises(ValueError): getattr(img, method)(height=(- 5))
def test_normalize_availability_on_method(): norm_decorator = normalize('param', ['a', 'b']) availability_decorator_1 = availability(C1) class A(): _decorator _decorator_1 def method1(self, level, param, step): return (level, param, step) assert (A().method1(level='1000', param='a', step='24') == ('1000', 'a', '24')) with pytest.raises(ValueError): A().method1(level='1032100', param='a', step='24')
class OptionPlotoptionsVennSonificationTracksMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def on_order_completed(order): if (not ((order.status == 'completed') or (order.status == 'placed'))): return create_pdf_tickets_for_holder(order) send_email_to_attendees(order) notify_ticket_purchase_attendee(order) if (order.payment_mode in ['free', 'bank', 'cheque', 'onsite']): order.completed_at = datetime.utcnow() organizer_set = set(filter(bool, ((order.event.organizers + order.event.coorganizers) + [order.event.owner]))) send_order_purchase_organizer_email(order, organizer_set) notify_ticket_purchase_organizer(order)
('/workflow/task-view') def task_view(): project_id = request.args.get('project_id') workflow_name = request.args.get('workflow_name') project_meta = store.get_project_by_id(project_id) if (project_meta is None): raise Exception('The project({}) for the workflow({}) is not found.'.format(project_id, workflow_name)) workflow_meta = store.get_workflow_by_name(project_name=project_meta.name, workflow_name=workflow_name) if (workflow_meta is None): raise Exception('The workflow({}) of the project({}) is not found.'.format(workflow_name, project_id)) else: return '{}/graph?dag_id={}'.format(airflow, '{}.{}'.format(project_meta.name, workflow_name))
class Migration(migrations.Migration): dependencies = [('bank', '0004_auto__0900')] operations = [migrations.AddField(model_name='transaction', name='date', field=models.DateTimeField(default=django.utils.timezone.now)), migrations.AddField(model_name='transaction', name='updated', field=models.DateTimeField(default=datetime.datetime(2016, 11, 20, 17, 22, 58, 790015, tzinfo=utc), auto_now=True), preserve_default=False)]
def _format_json_str(jstr): result = [] inside_quotes = False last_char = ' ' for char in jstr: if ((last_char != '\\') and (char == '"')): inside_quotes = (not inside_quotes) last_char = char if ((not inside_quotes) and (char == '\n')): continue if (inside_quotes and (char == '\n')): char = '\\n' if (inside_quotes and (char == '\t')): char = '\\t' result.append(char) return ''.join(result)
def test_transfer16(la: LogicAnalyzer, slave: SPISlave): la.capture(4, block=False) value = slave.transfer16(WRITE_DATA16) la.stop() (sck, sdo, cs, sdi) = la.fetch_data() sdi_initstate = la.get_initial_states()[SDI[0]] assert (len(cs) == (CS_START + CS_STOP)) assert (len(sck) == SCK_WRITE16) assert (len(sdo) == SDO_WRITE_DATA16) assert verify_value(value, sck, sdi_initstate, sdi)
class FBetaTopKMetric(TopKMetric): k: int beta: Optional[float] min_rel_score: Optional[int] no_feedback_users: bool _precision_recall_calculation: PrecisionRecallCalculation def __init__(self, k: int, beta: Optional[float]=1.0, min_rel_score: Optional[int]=None, no_feedback_users: bool=False, options: AnyOptions=None) -> None: self.k = k self.beta = beta self.min_rel_score = min_rel_score self.no_feedback_users = no_feedback_users self._precision_recall_calculation = PrecisionRecallCalculation(max(k, 10), min_rel_score) super().__init__(options=options, k=k, min_rel_score=min_rel_score, no_feedback_users=no_feedback_users) def calculate(self, data: InputData) -> TopKMetricResult: if self.no_feedback_users: pr_key = 'precision_include_no_feedback' rc_key = 'recall_include_no_feedback' else: pr_key = 'precision' rc_key = 'recall' result = self._precision_recall_calculation.get_result() current = pd.Series(index=result.current['k'], data=self.fbeta(result.current[pr_key], result.current[rc_key])) ref_data = result.reference reference: Optional[pd.Series] = None if (ref_data is not None): reference = pd.Series(index=ref_data['k'], data=self.fbeta(ref_data[pr_key], ref_data[rc_key])) return TopKMetricResult(k=self.k, reference=reference, current=current) def fbeta(self, precision, recall): beta_sqr = (self.beta ** 2) precision_arr = np.array(precision) recall_arr = np.array(recall) return ((((1 + beta_sqr) * precision_arr) * recall_arr) / ((beta_sqr * precision_arr) + recall_arr)) def key(self) -> str: return ''
def test_main_exit_invalid_location(varfont, tmpdir, capsys): fontfile = str((tmpdir / 'PartialInstancerTest-VF.ttf')) varfont.save(fontfile) with pytest.raises(SystemExit): instancer.main([fontfile, 'wght:100']) captured = capsys.readouterr() assert ('invalid location format' in captured.err)
def pretty_print(tree: Tree, name: str='root', depth: int=0) -> None: pad = ((' ' * depth) * 2) print(f'{pad}{name}({tree.value})') if (tree.left is not None): pretty_print(tree.left, name='left', depth=(depth + 1)) if (tree.right is not None): pretty_print(tree.right, name='right', depth=(depth + 1))
class PopStatsProcessor(AbstractGamestateDataProcessor): ID = 'pop_stats' DEPENDENCIES = [CountryProcessor.ID, SpeciesProcessor.ID, FactionProcessor.ID, CountryDataProcessor.ID] def __init__(self): super().__init__() self.country_by_planet_id = None def initialize_data(self): self._initialize_planet_owner_dict() def extract_data_from_gamestate(self, dependencies): def init_dict(): return dict(pop_count=0, crime=0, happiness=0, power=0) countries_dict = dependencies[CountryProcessor.ID] country_data_dict = dependencies[CountryDataProcessor.ID] (species_dict, robot_species) = dependencies[SpeciesProcessor.ID] faction_by_ingame_id = dependencies[FactionProcessor.ID] for (country_id_in_game, country_model) in countries_dict.items(): if ((not config.CONFIG.read_all_countries) and (not country_model.is_player)): continue if (country_id_in_game in self._basic_info.other_players): continue country_data = country_data_dict[country_id_in_game] stats_by_species = {} stats_by_faction = {} stats_by_job = {} stats_by_stratum = {} stats_by_ethos = {} stats_by_planet = {} for pop_dict in self._gamestate_dict['pop'].values(): if (not isinstance(pop_dict, dict)): continue planet_id = pop_dict.get('planet') planet_country_id_in_game = self.country_by_planet_id.get(planet_id) if (planet_country_id_in_game != country_id_in_game): continue species_id = pop_dict.get('species') job = pop_dict.get('job', 'unemployed') stratum = pop_dict.get('category', 'unknown stratum') faction_id = pop_dict.get('pop_faction') if (faction_id is None): if (stratum == 'slave'): faction_id = FactionProcessor.SLAVE_FACTION_ID elif (species_id in robot_species): faction_id = FactionProcessor.NON_SENTIENT_ROBOT_FACTION_ID elif (stratum == 'purge'): faction_id = FactionProcessor.PURGE_FACTION_ID else: faction_id = FactionProcessor.NO_FACTION_ID ethos = pop_dict.get('ethos', {}).get('ethic') if (not isinstance(ethos, str)): ethos = 'ethic_no_ethos' crime = pop_dict.get('crime', 0.0) happiness = pop_dict.get('happiness', 0.0) power = pop_dict.get('power', 0.0) if (species_id not in stats_by_species): stats_by_species[species_id] = init_dict() if (faction_id not in stats_by_faction): stats_by_faction[faction_id] = init_dict() if (job not in stats_by_job): stats_by_job[job] = init_dict() if (stratum not in stats_by_stratum): stats_by_stratum[stratum] = init_dict() if (ethos not in stats_by_ethos): stats_by_ethos[ethos] = init_dict() if (planet_id not in stats_by_planet): stats_by_planet[planet_id] = init_dict() stats_by_species[species_id]['pop_count'] += 1 stats_by_faction[faction_id]['pop_count'] += 1 stats_by_job[job]['pop_count'] += 1 stats_by_stratum[stratum]['pop_count'] += 1 stats_by_ethos[ethos]['pop_count'] += 1 stats_by_planet[planet_id]['pop_count'] += 1 stats_by_species[species_id]['crime'] += crime stats_by_faction[faction_id]['crime'] += crime stats_by_job[job]['crime'] += crime stats_by_stratum[stratum]['crime'] += crime stats_by_ethos[ethos]['crime'] += crime stats_by_planet[planet_id]['crime'] += crime stats_by_species[species_id]['happiness'] += happiness stats_by_faction[faction_id]['happiness'] += happiness stats_by_job[job]['happiness'] += happiness stats_by_stratum[stratum]['happiness'] += happiness stats_by_ethos[ethos]['happiness'] += happiness stats_by_planet[planet_id]['happiness'] += happiness stats_by_species[species_id]['power'] += power stats_by_faction[faction_id]['power'] += power stats_by_job[job]['power'] += power stats_by_stratum[stratum]['power'] += power stats_by_ethos[ethos]['power'] += power stats_by_planet[planet_id]['power'] += power for (species_id, stats) in stats_by_species.items(): if (stats['pop_count'] == 0): continue if ((species_id is None) or (species_id not in species_dict)): continue stats['crime'] /= stats['pop_count'] stats['happiness'] /= stats['pop_count'] stats['power'] /= stats['pop_count'] species = species_dict[species_id] self._session.add(datamodel.PopStatsBySpecies(country_data=country_data, species=species, **stats)) gamestate_dict_factions = self._gamestate_dict.get('pop_factions') if (not isinstance(gamestate_dict_factions, dict)): gamestate_dict_factions = {} for (faction_id, stats) in stats_by_faction.items(): if (stats['pop_count'] == 0): continue faction = faction_by_ingame_id.get(faction_id) if (faction is None): continue faction_dict = gamestate_dict_factions.get(faction_id, {}) if (not isinstance(faction_dict, dict)): faction_dict = {} stats['crime'] /= stats['pop_count'] stats['happiness'] /= stats['pop_count'] stats['power'] /= stats['pop_count'] stats['faction_approval'] = faction_dict.get('faction_approval', 0.0) stats['support'] = faction_dict.get('support', 0.0) self._session.add(datamodel.PopStatsByFaction(country_data=country_data, faction=faction, **stats)) for (planet_id, stats) in stats_by_planet.items(): if (stats['pop_count'] == 0): continue stats['crime'] /= stats['pop_count'] stats['happiness'] /= stats['pop_count'] stats['power'] /= stats['pop_count'] planet_dict = self._gamestate_dict['planets']['planet'].get(planet_id) if (not isinstance(planet_dict, dict)): continue stats['migration'] = planet_dict.get('migration', 0.0) stats['free_amenities'] = planet_dict.get('free_amenities', 0.0) stats['free_housing'] = planet_dict.get('free_housing', 0.0) stats['stability'] = planet_dict.get('stability', 0.0) planet = self._session.query(datamodel.Planet).filter_by(planet_id_in_game=planet_id).one_or_none() if (planet is None): logger.warning(f'{self._basic_info.logger_str} Could not find planet with ID {planet_id}!') continue self._session.add(datamodel.PlanetStats(country_data=country_data, planet=planet, **stats)) for (job, stats) in stats_by_job.items(): if (stats['pop_count'] == 0): continue stats['crime'] /= stats['pop_count'] stats['happiness'] /= stats['pop_count'] stats['power'] /= stats['pop_count'] job = self._get_or_add_shared_description(job) self._session.add(datamodel.PopStatsByJob(country_data=country_data, db_job_description=job, **stats)) for (stratum, stats) in stats_by_stratum.items(): if (stats['pop_count'] == 0): continue stats['crime'] /= stats['pop_count'] stats['happiness'] /= stats['pop_count'] stats['power'] /= stats['pop_count'] stratum = self._get_or_add_shared_description(stratum) self._session.add(datamodel.PopStatsByStratum(country_data=country_data, db_stratum_description=stratum, **stats)) for (ethos, stats) in stats_by_ethos.items(): if (stats['pop_count'] == 0): continue stats['crime'] /= stats['pop_count'] stats['happiness'] /= stats['pop_count'] stats['power'] /= stats['pop_count'] ethos = self._get_or_add_shared_description(ethos) self._session.add(datamodel.PopStatsByEthos(country_data=country_data, db_ethos_description=ethos, **stats)) def _initialize_planet_owner_dict(self): self.country_by_planet_id = {} for (country_id, country_dict) in sorted(self._gamestate_dict['country'].items()): if (not isinstance(country_dict, dict)): continue for planet_id in country_dict.get('owned_planets', []): self.country_by_planet_id[planet_id] = country_id
_only_with_numba def test_laplace(): region = ((- 10000.0), 10000.0, (- 10000.0), 10000.0) coords = vd.grid_coordinates(region, shape=(10, 10), extra_coords=300) prisms = [[1000.0, 7000.0, (- 5000.0), 2000.0, (- 1000.0), (- 500)], [(- 4000.0), 1000.0, 4000.0, 10000.0, (- 2000.0), 200]] densities = [2670.0, 2900.0] diagonal_components = {field: prism_gravity(coords, prisms, densities, field=field) for field in ('g_ee', 'g_nn', 'g_zz')} npt.assert_allclose((diagonal_components['g_ee'] + diagonal_components['g_nn']), (- diagonal_components['g_zz']))
def extractJapmtlWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [("one day, the engagement was suddenly cancelled. ......my little sister's.", "one day, the engagement was suddenly cancelled. ......my little sister's.", 'translated'), ('villainess (?) and my engagement cancellation', 'villainess (?) and my engagement cancellation', 'translated'), ('beloved villain flips the skies', 'beloved villain flips the skies', 'translated'), ("slow life villainess doesn't notice the prince's fondness", "slow life villainess doesn't notice the prince's fondness", 'translated'), ('is the villain not allowed to fall in love?', 'is the villain not allowed to fall in love?', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class DecisionEvents(): events: List[HistoryEvent] decision_events: List[HistoryEvent] replay: bool replay_current_time_milliseconds: datetime.datetime next_decision_event_id: int markers: List[HistoryEvent] = field(default_factory=list) def __post_init__(self): for event in self.decision_events: if (event.event_type == EventType.EVENT_TYPE_MARKER_RECORDED): self.markers.append(event) def get_optional_decision_event(self, event_id) -> HistoryEvent: index = (event_id - self.next_decision_event_id) if ((index < 0) or (index >= len(self.decision_events))): return None else: return self.decision_events[index]
(scope='module') def dolt_install(): tmp_dir = tempfile.TemporaryDirectory() dir_path = os.path.dirname(os.path.realpath(__file__)) install_script = os.path.join(dir_path, '..', '..', 'flytekit-dolt', 'scripts', 'install.sh') try: dolt_path = os.path.join(tmp_dir.name, 'dolt') subprocess.run(install_script, env={'INSTALL_PATH': tmp_dir.name}) for (attr, value) in [('user.name', 'First Last'), ('user.email', 'first.')]: subprocess.run([dolt_path, 'config', '--global', '--add', attr, value]) dolt.set_dolt_path(dolt_path) (yield dolt_path) finally: shutil.rmtree(tmp_dir.name) shutil.rmtree((Path.home() / '.dolt'))
class AttributeGrammar(Grammar): def __init__(self, classes, rule_extractor=..., check_acyclicity=False, **kwargs): super().__init__(classes, **kwargs) if (rule_extractor is ...): raise DeprecationWarning('A rule extractor must be passed explicitly.') self.attributes = {} (self.synthesized_rules, self.inheritable_rules, self.synthesized_attributes, self.inherited_attributes) = rule_extractor(self, classes) for key in itertools.chain(self.synthesized_attributes, self.inherited_attributes): self.attributes[key] = (self.synthesized_attributes.get(key, {}) | self.inherited_attributes.get(key, {})) self.validate_attribute_grammar() from securify.grammar.attributes.dependencies import AttributeDependenceRelations self.dependence_relations = AttributeDependenceRelations(self) if check_acyclicity: self.validate_acyclicity() def attribute_visitor(self): return AttributeVisitor(self) def overridden_attributes(self, node): production = self.production_of(node) attributes = self.attributes[production] overridden = {a for a in attributes if (a in node.__dict__)} return overridden def validate_attribute_grammar(self): self.validate_grammar() self.validate_rule_targets() return True def validate_acyclicity(self): if (not self.is_acyclic): raise AttributeGrammarValidationError(f'''Attribute grammar is not acyclic. See the lower dependence relation for more information: {self.lower_dependence}''') def validate_rule_dependencies(self): def validate_dependencies(symbol, rule): production = self.productions[symbol] for dependency in rule.dependencies: node = dependency.node attr = dependency.attribute if ((node not in production) and (node != 'self')): raise AttributeGrammarValidationError(f"Child '{node}' not in productions of symbol {symbol.__name__}") dependency_symbols = ([symbol] if (node == 'self') else production[node].symbol) for dependency_symbol in dependency_symbols: available_attributes = self.attributes[dependency_symbol] if (attr not in available_attributes): raise AttributeGrammarValidationError(f"Attribute '{attr}' not available for rule '{rule}' of symbol '{dependency_symbol.__name__}'.") rules = {k: {*self.synthesized_rules.get(k, {}).values(), *self.inheritable_rules.get(k, {}).values()} for k in {*self.synthesized_rules.keys(), *self.inheritable_rules.keys()}} for (symbol, rules) in rules.items(): for rule in rules: if isinstance(rule, (SynthesizeRule, PushdownRule)): validate_dependencies(symbol, rule) return True def validate_rule_targets(self): for (s, ps) in self.productions.items(): inheritable_rules = {(r.target.node, r.target.attribute): r for rules in self.inheritable_rules[s].values() for r in rules} for (n, p) in ps.items(): required_attributes = {a for a in self.inherited_attributes[p.symbol]} for a in required_attributes: if ((n, a) not in inheritable_rules): raise AttributeGrammarValidationError(f"Rule '{s.__name__}' defines child '{n}' which requires an attribute '{a}', but a corresponding semantic rule could not be found.") def local_functional_dependence(self): return self.dependence_relations.local_functional_dependence def lower_dependence(self): return self.dependence_relations.lower_dependence def lower_dependence_combined(self): return self.dependence_relations.lower_dependence_combined def is_acyclic(self): return self.dependence_relations.is_acyclic def is_absolutely_acyclic(self): return self.dependence_relations.is_absolutely_acyclic def grammar_info(self): def render_rule_dependencies(rule): return {k: ', '.join([t.attribute for t in g]) for (k, g) in itertools.groupby(rule.dependencies, (lambda r: r.node))} def render_semantic_rule(rule): if isinstance(rule, SynthesizeRule): return (f'self.{rule.name}', {'dependencies': render_rule_dependencies(rule), 'type': 'synthesized', 'annotations': rule.annotations, 'sourceLocation': ({'file': rule.source_location.file, 'line': rule.source_location.line} if rule.source_location else None)}) if isinstance(rule, PushdownRule): return (f'{rule.target.node}.{rule.target.attribute}', {'dependencies': render_rule_dependencies(rule), 'type': 'inherited', 'annotations': rule.annotations, 'sourceLocation': ({'file': rule.source_location.file, 'line': rule.source_location.line} if rule.source_location else None)}) raise AttributeGrammarError('Wtf') return {**super().grammar_info(), 'attributes': {'synthesized': {symbol.__name__: list(attributes) for (symbol, attributes) in self.synthesized_attributes.items() if (len(attributes) > 0)}, 'inherited': {symbol.__name__: list(attributes) for (symbol, attributes) in self.inherited_attributes.items() if (len(attributes) > 0)}}, 'semanticRules': {symbol.__name__: {r: data for (_, rules2) in [*self.synthesized_rules[symbol].items(), *self.inheritable_rules[symbol].items()] for rule in rules2 if (len(rules2) > 0) for (r, data) in [render_semantic_rule(rule)]} for symbol in (self.synthesized_rules.keys() | self.inheritable_rules.keys())}}
class OptionPlotoptionsBellcurveSonificationDefaultinstrumentoptionsMappingPitch(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get('y') def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get('c6') def max(self, text: str): self._config(text, js_type=False) def min(self): return self._config_get('c2') def min(self, text: str): self._config(text, js_type=False) def scale(self): return self._config_get(None) def scale(self, value: Any): self._config(value, js_type=False) def within(self): return self._config_get('yAxis') def within(self, text: str): self._config(text, js_type=False)
class TestComment(): def test_url_flag(self, mocked_client_class): mocked_client_class.send_request.return_value = client_test_data.EXAMPLE_COMMENT_MUNCH runner = testing.CliRunner() result = runner.invoke(cli.comment, ['nodejs-grunt-wrap-0.3.0-2.fc25', 'After installing this I found $100.', '--url', ' '--karma', '1']) assert (result.exit_code == 0) assert (result.output == client_test_data.EXPECTED_COMMENT_OUTPUT) mocked_client_class.send_request.assert_called_once_with('comments/', verb='POST', auth=True, data={'csrf_token': 'a_csrf_token', 'text': 'After installing this I found $100.', 'update': 'nodejs-grunt-wrap-0.3.0-2.fc25', 'karma': 1})
def extractReleasethatwitchWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Release that Witch', 'Release that Witch', 'translated')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def draw_background(screen, tile_img_file, field_rect): tile_img = pygame.image.load(tile_img_file).convert_alpha() img_rect = tile_img.get_rect() nrows = (int((screen.get_height() / img_rect.height)) + 1) ncols = (int((screen.get_width() / img_rect.width)) + 1) for y in range(nrows): for x in range(ncols): img_rect.topleft = ((x * img_rect.width), (y * img_rect.height)) screen.blit(tile_img, img_rect) field_color = (109, 41, 1) boundary_rect = Rect((field_rect.left - 4), (field_rect.top - 4), (field_rect.width + 8), (field_rect.height + 8)) pygame.draw.rect(screen, (0, 0, 0), boundary_rect) pygame.draw.rect(screen, field_color, field_rect)
def extractStarveCleric(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())): return None if ('protected' in item['title'].lower()): return None tagmap = [("Library of Heaven's Path", "Library of Heaven's Path", 'translated'), ('The Experimental Diaries of A Crazy Lich', 'The Experimental Diaries of A Crazy Lich', 'translated'), ('ninth special district', 'ninth special district', 'translated'), ('Tian Ying', 'Tian Ying', 'translated'), ('The Adonis Next Door', 'The Adonis Next Door', 'translated'), ('The Diary of the Truant Death God', 'The Diary of the Truant Death God', 'translated'), ('Dao Tian Xian Tu', 'Dao Tian Xian Tu', 'translated'), ('Rebirth - First Class Magician', 'Rebirth - First Class Magician', 'translated'), ('The Records of the Human Emperor', 'The Records of the Human Emperor', 'translated')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def main(page: Page): page.title = 'MyApp' def window_event(e): if (e.data == 'close'): page.dialog = confirm_dialog confirm_dialog.open = True page.update() page.window_prevent_close = True page.on_window_event = window_event def yes_click(e): page.window_destroy() def no_click(e): confirm_dialog.open = False page.update() confirm_dialog = AlertDialog(modal=True, title=Text('Please confirm'), content=Text('Do you really want to exit this app?'), actions=[ElevatedButton('Yes', on_click=yes_click), OutlinedButton('No', on_click=no_click)], actions_alignment='end') page.add(Text('Try exiting this app by clicking window\'s "Close" button!'))
def _add_peaks_dot(model, plt_log, ax, **plot_kwargs): defaults = {'color': PLT_COLORS['periodic'], 'alpha': 0.6, 'lw': 2.5, 'ms': 6} plot_kwargs = check_plot_kwargs(plot_kwargs, defaults) for peak in model.peak_params_: ap_point = np.interp(peak[0], model.freqs, model._ap_fit) freq_point = (np.log10(peak[0]) if plt_log else peak[0]) ax.plot([freq_point, freq_point], [ap_point, (ap_point + peak[1])], **plot_kwargs) ax.plot(freq_point, (ap_point + peak[1]), marker='o', **plot_kwargs)
class Actions(): def __init__(self, endpoint, sdnc): self.endpoint = endpoint self.sdnc = sdnc def mirror_endpoint(self): status = False if self.sdnc: endpoint_data = self.endpoint.endpoint_data if self.sdnc.mirror_mac(endpoint_data['mac'], endpoint_data['segment'], endpoint_data['port']): collector = Collector(self.endpoint, endpoint_data['segment']) if collector.nic: status = collector.start_collector() else: status = True return status def unmirror_endpoint(self): status = False if self.sdnc: endpoint_data = self.endpoint.endpoint_data if self.sdnc.unmirror_mac(endpoint_data['mac'], endpoint_data['segment'], endpoint_data['port']): collector = Collector(self.endpoint, endpoint_data['segment']) if collector.nic: status = collector.stop_collector() else: status = True return status def coprocess_endpoint(self): status = False if self.sdnc: endpoint_data = self.endpoint.endpoint_data if (self.sdnc.volos and self.sdnc.volos.enabled): acl = VolosAcl(self.endpoint, acl_dir=self.sdnc.volos.acl_dir, copro_vlans=[self.sdnc.volos.copro_vlan], copro_port=self.sdnc.volos.copro_port) endpoints = [self.endpoint] force_apply_rules = [acl.acl_key] coprocess_rules_files = [acl.acl_file] port_list = self.sdnc.volos.get_port_list(endpoint_data['mac'], ipv4=endpoint_data.get('ipv4', None), ipv6=endpoint_data.get('ipv6', None)) if (acl.ensure_acls_dir() and acl.write_acl_file(port_list)): status = self.sdnc.update_acls(rules_file=None, endpoints=endpoints, force_apply_rules=force_apply_rules, coprocess_rules_files=coprocess_rules_files) else: status = True return status def uncoprocess_endpoint(self): status = False if self.sdnc: if (self.sdnc.volos and self.sdnc.volos.enabled): acl = VolosAcl(self.endpoint, acl_dir=self.sdnc.volos.acl_dir, copro_vlans=[self.sdnc.volos.copro_vlan], copro_port=self.sdnc.volos.copro_port) endpoints = [self.endpoint] force_remove_rules = [acl.acl_key] if self.sdnc.update_acls(rules_file=None, endpoints=endpoints, force_remove_rules=force_remove_rules, coprocess_rules_files=None): status = acl.delete_acl_file() else: status = True return status def update_acls(self, rules_file=None, endpoints=None, force_apply_rules=None, force_remove_rules=None): status = False if self.sdnc: status = self.sdnc.update_acls(rules_file=rules_file, endpoints=endpoints, force_apply_rules=force_apply_rules, force_remove_rules=force_remove_rules) return status
class SourceHutBackendTests(DatabaseTestCase): def setUp(self): super().setUp() create_distro(self.session) self.create_project() def create_project(self): self.projects = {} project = models.Project(homepage=' name='builds.sr.ht', backend=BACKEND) self.session.add(project) self.projects['valid_with_homepage'] = project project = models.Project(homepage='scdoc', name='scdoc', version_url='sircmpwn/scdoc', backend=BACKEND) self.session.add(project) self.projects['valid_with_version_url'] = project project = models.Project(homepage=' name='hare', version_url='sircmpwn/hare', backend=BACKEND) self.session.add(project) self.projects['valid_without_release'] = project self.session.commit() def test_get_version(self): project = self.projects['valid_with_homepage'] exp = '0.82.8' obs = backend.SourceHutBackend.get_version(project) self.assertEqual(obs, exp) project = self.projects['valid_with_version_url'] exp = '1.11.2' obs = backend.SourceHutBackend.get_version(project) self.assertEqual(obs, exp) project = self.projects['valid_without_release'] self.assertRaises(AnityaPluginException, backend.SourceHutBackend.get_version, project) def test_get_version_url_project_homepage_only(self): project = self.projects['valid_with_homepage'] exp = ' obs = backend.SourceHutBackend.get_version_url(project) self.assertEqual(obs, exp) def test_get_version_url_project_version_url_only(self): project = self.projects['valid_with_version_url'] exp = ' obs = backend.SourceHutBackend.get_version_url(project) self.assertEqual(obs, exp) def test_get_versions_invalid_url(self): project = models.Project(homepage=' name='invalid', backend=BACKEND) self.assertRaises(AnityaPluginException, backend.SourceHutBackend.get_versions, project) def test_get_versions_invalid_status_code(self): project = models.Project(homepage=' name='invalid', backend=BACKEND) exp_url = ' with mock.patch('anitya.lib.backends.BaseBackend.call_url') as m_call: m_call.return_value = mock.Mock(status_code=404) self.assertRaises(AnityaPluginException, backend.SourceHutBackend.get_versions, project) m_call.assert_called_with(exp_url, None) def test_get_versions_not_modified(self): project = models.Project(homepage=' name='invalid', backend=BACKEND) exp_url = ' with mock.patch('anitya.lib.backends.BaseBackend.call_url') as m_call: m_call.return_value = mock.Mock(status_code=304) versions = backend.SourceHutBackend.get_versions(project) m_call.assert_called_with(exp_url, None) self.assertEqual(versions, []) def test_get_versions_project_homepage_only(self): project = self.projects['valid_with_homepage'] exp = ['0.82.7', '0.82.6', '0.82.8', '0.82.3', '0.82.4', '0.82.2', '0.82.1', '0.81.2', '0.82.5', '0.82.0', '0.81.1', '0.81.0', '0.80.0', '0.79.2', '0.79.1', '0.79.0', '0.78.0', '0.77.0', '0.76.0', '0.75.3'] obs = backend.SourceHutBackend.get_versions(project) self.assertEqual(obs, exp) def test_get_versions_project_version_url_only(self): project = self.projects['valid_with_version_url'] exp = ['1.11.2', '1.11.1', '1.11.0', '1.10.1', '1.10.0', '1.9.7', '1.9.6', '1.9.5', '1.9.4', '1.9.3', '1.9.2', '1.9.1', '1.9.0', '1.8.1', '1.8.0', '1.6.1', '1.6.0', '1.5.2', '1.5.1', '1.5.0'] obs = backend.SourceHutBackend.get_versions(project) self.assertEqual(obs, exp) def test_get_versions_project_without_any_release(self): project = self.projects['valid_without_release'] self.assertRaises(AnityaPluginException, backend.SourceHutBackend.get_versions, project)
(scope='module', autouse=True) def prepare_db(): dbs_path = pathlib.Path(DBT_PROJECT_DIR, 'dbs') dbs_path.mkdir(exist_ok=True, parents=True) database_file = pathlib.Path(dbs_path, 'database_name.db') database_file.touch() check_call(['dbt', '--log-format', 'json', 'seed', '--project-dir', DBT_PROJECT_DIR, '--profiles-dir', DBT_PROFILES_DIR, '--profile', DBT_PROFILE]) (yield) database_file.unlink()
def strhex2float(x, signed=True, n_word=None, n_frac=None, return_sizes=False): x = x.replace('0x', '') if (n_word is None): n_word = (len(x) * 4) x_bin = bin(int(x, 16)) if (len(x_bin[2:]) < n_word): x_bin = (('0b' + ('0' * (n_word - len(x_bin[2:])))) + x_bin[2:]) (val, signed, n_word, n_frac) = strbin2float(x_bin, signed, n_word, n_frac, return_sizes=True) if return_sizes: return (val, signed, n_word, n_frac) else: return val
.frontend_config_overwrite({'results_per_page': 10}) .WebInterfaceUnitTestConfig(database_mock_class=DbMock) class TestAppQuickSearch(): def test_quick_search_file_name(self, test_client): assert (TEST_FW_2.uid in _start_quick_search(test_client, TEST_FW_2.file_name)) def test_quick_search_device_name(self, test_client): assert (TEST_FW_2.uid in _start_quick_search(test_client, TEST_FW_2.device_name)) def test_quick_search_vendor(self, test_client): assert (TEST_FW_2.uid in _start_quick_search(test_client, TEST_FW_2.vendor)) def test_quick_search_sha256(self, test_client): assert (TEST_FW_2.uid in _start_quick_search(test_client, TEST_FW_2.sha256)) def test_quick_search_tags(self, test_client): assert (TEST_FW_2.uid in _start_quick_search(test_client, list(TEST_FW_2.tags)[0]))
def frequency_limit_decorator(key=None, cache_id='nid', msg=',{}!', mode='cache', timeout=TIME_OUT, hook=default_hook_function): def frequency_decorator(fun): (fun) def cache_control(*args, **kwargs): class_obj = args[0] cache_key = (key or class_obj.__class__.__name__) cache_ctl = CacheModel(mode, *args) _data = cache_ctl.get(cache_key, {}) get_id_the = kwargs.get(cache_id) old_time = _data.get(get_id_the, 0) now_time = time.time() time_difference = math.ceil((now_time - old_time)) if (time_difference < timeout): data = {'msg': msg.format((timeout - time_difference)), 'code': 567} return JsonResponse(data) res = fun(*args, **kwargs) _data[get_id_the] = time.time() cache_ctl.set(cache_key, _data) try: hook(*args, **kwargs) except HookException as e: return JsonResponse({'code': 767, 'msg': e.msg}) return res return cache_control return frequency_decorator
def get_registered_stattest(stattest_func: Optional[PossibleStatTestType], feature_type: ColumnType=None, engine: Type[Engine]=None) -> StatTest: if isinstance(stattest_func, StatTest): return stattest_func if (callable(stattest_func) and (stattest_func not in _registered_stat_test_funcs)): stat_test = StatTest(name='', display_name=f"custom function '{stattest_func.__name__}'", allowed_feature_types=[]) add_stattest_impl(stat_test, (engine or PythonEngine), create_impl_wrapper(stattest_func)) return stat_test if (callable(stattest_func) and (stattest_func in _registered_stat_test_funcs)): stattest_name = _registered_stat_test_funcs[stattest_func] elif isinstance(stattest_func, str): stattest_name = stattest_func else: raise ValueError(f'Unexpected type of stattest argument ({type(stattest_func)}), expected: str or Callable') funcs = _registered_stat_tests.get(stattest_name, None) if ((funcs is None) or (feature_type is None)): raise StatTestNotFoundError(stattest_name) func = funcs.get(feature_type) if (func is None): raise StatTestInvalidFeatureTypeError(stattest_name, feature_type) return func
('add-domain', help='Add a custom domain to a particular site') ('domain') ('--site', prompt=True) ('--ssl-certificate', help='Absolute path to SSL Certificate') ('--ssl-certificate-key', help='Absolute path to SSL Certificate Key') def add_domain(domain, site=None, ssl_certificate=None, ssl_certificate_key=None): if (not site): print('Please specify site') sys.exit(1) from bench.config.site_config import add_domain add_domain(site, domain, ssl_certificate, ssl_certificate_key, bench_path='.')
def test_issue_60_v0_4_6(): cfg = Config(dtype_notation='Q', rounding='around') t_fxp = Fxp(0.0, 1, n_int=16, n_frac=15, config=cfg) t_int = Fxp(0.0, 1, n_int=13, n_frac=0, config=cfg) arr = [(- 5), 0, 14.8, 7961.625] fullprec = Fxp(arr, like=t_fxp) rounded_direct = Fxp(arr, like=t_int) rounded = Fxp(fullprec, like=t_int) assert np.all((rounded_direct == rounded)) scalar_full = Fxp(7961.625, like=t_fxp) scalar_round_direct = Fxp(7961.625, like=t_int) scalar_round = Fxp(scalar_full, like=t_int) assert (scalar_round_direct == scalar_round)
def main(): ap = argparse.ArgumentParser(description='Generate Xerox twolcs for Finnish') ap.add_argument('--quiet', '-q', action='store_false', dest='verbose', default=False, help='do not print output to stdout while processing') ap.add_argument('--verbose', '-v', action='store_true', default=False, help='print each step to stdout while processing') ap.add_argument('--deletion', '-d', type=float, default=1.0, metavar='DW', help='weight each deletion DW') ap.add_argument('--addition', '-a', type=float, default=1.0, metavar='AW', help='weight each addition AW') ap.add_argument('--swap', '-s', type=float, default=1.0, metavar='SW', help='weight each swap SW') ap.add_argument('--change', '-c', type=float, default=1.0, metavar='CW', help='weight each change CW') ap.add_argument('--output', '-o', type=argparse.FileType('w'), required=True, metavar='OFILE', help='write output to OFILE') args = ap.parse_args() if args.verbose: print('Writing everything to', args.output.name) if args.verbose: print('Creating EDs') print(0, 0.0, sep='\t', file=args.output) for c in ((fin_lowercase + fin_uppercase) + fin_symbols): print(0, 0, c, c, 0.0, sep='\t', file=args.output) print(0, 1, c, '_EPSILON_SYMBOL_', args.deletion, sep='\t', file=args.output) print(0, 1, '_EPSILON_SYMBOL_', c, args.addition, sep='\t', file=args.output) print(1, 1, c, c, 0.0, sep='\t', file=args.output) print('%d\t%f\n', 0, 0.0, sep='\t', file=args.output) i = 2 for c in fin_lowercase: for g in fin_lowercase: print(0, i, c, g, 0.0, sep='\t', file=args.output) print(i, 1, g, c, args.swap, sep='\t', file=args.output) print(i, args.change, sep='\t', file=args.output) i += 1 exit(0)
def duno_filtro(img): contrast = ImageEnhance.Contrast(img) contrastado = contrast.enhance(1.1) brightness = ImageEnhance.Brightness(contrastado) brilho = brightness.enhance(1.1) saturation = ImageEnhance.Color(brilho) saturada = saturation.enhance(1.3) saturada.save('beijo_editada.jpg')
class ElementNewton(proteus.NonlinearSolvers.NonlinearSolver): def __init__(self, linearSolver, F, J=None, du=None, par_du=None, rtol_r=0.0001, atol_r=1e-16, rtol_du=0.0001, atol_du=1e-16, maxIts=100, norm=l2Norm, convergenceTest='r', computeRates=True, printInfo=True, fullNewton=True, directSolver=False, EWtol=True, maxLSits=100): import copy self.par_du = par_du if (par_du is not None): F.dim_proc = par_du.dim_proc NonlinearSolver.__init__(self, F, J, du, rtol_r, atol_r, rtol_du, atol_du, maxIts, norm, convergenceTest, computeRates, printInfo) self.updateJacobian = True self.fullNewton = fullNewton self.linearSolver = linearSolver self.directSolver = directSolver self.lineSearch = True self.EWtol = EWtol self.maxLSits = maxLSits if self.linearSolver.computeEigenvalues: self.JLast = copy.deepcopy(self.J) self.J_t_J = copy.deepcopy(self.J) self.dJ_t_dJ = copy.deepcopy(self.J) self.JLsolver = LU(self.J_t_J, computeEigenvalues=True) self.dJLsolver = LU(self.dJ_t_dJ, computeEigenvalues=True) self.u0 = numpy.zeros(self.F.dim, 'd') def info(self): return 'Not Implemented' def solve(self, u, r=None, b=None, par_u=None, par_r=None): self.F.maxIts = self.maxIts self.F.maxLSits = self.maxLSits self.F.atol = self.atol_r self.F.elementSolve(u, r) self.failedFlag = False return self.failedFlag
_toolkit([ToolkitName.qt, ToolkitName.wx]) class TestUIWrapperInteractionRegistries(unittest.TestCase): def test_registry_priority(self): registry1 = StubRegistry(handler=(lambda w, l: 1), supported_interaction_classes=[str]) registry2 = StubRegistry(handler=(lambda w, l: 2), supported_interaction_classes=[str]) wrapper = example_ui_wrapper(registries=[registry2, registry1]) value = wrapper.inspect('some string') self.assertEqual(value, 2) wrapper = example_ui_wrapper(registries=[registry1, registry2]) value = wrapper.inspect('some other string') self.assertEqual(value, 1) def test_registry_selection(self): registry1 = StubRegistry() registry2_handler = mock.Mock() registry2 = StubRegistry(handler=registry2_handler, supported_interaction_classes=[int]) wrapper = example_ui_wrapper(registries=[registry1, registry2]) wrapper.perform(123) self.assertEqual(registry2_handler.call_count, 1) def test_registry_all_declined(self): wrapper = example_ui_wrapper(registries=[StubRegistry(supported_interaction_classes=[int]), StubRegistry(supported_interaction_classes=[float])]) with self.assertRaises(InteractionNotSupported) as exception_context: wrapper.perform(None) self.assertCountEqual(exception_context.exception.supported, [int, float])
def extractCanemfurryWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None if ((item['tags'] == ['Releases']) or (item['tags'] == ['Uncategorized'])): titlemap = [('TDD Chapter ', 'The Defeated Dragon', 'translated'), ('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'), ('Master of Dungeon', 'Master of Dungeon', 'oel')] for (titlecomponent, name, tl_type) in titlemap: if (titlecomponent.lower() in item['title'].lower()): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) tagmap = [('TDD', 'The Defeated Dragon', 'translated'), ('The Defeated Dragon', 'The Defeated Dragon', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class DataBase(): dbase: str = '' kfile: str = '' pword: str = None atype: str = None totp: bool = False is_active: bool = False kpo: str = None def __post_init__(self): self.dbase = (realpath(expanduser(self.dbase)) if self.dbase else '') self.kfile = (realpath(expanduser(self.kfile)) if self.kfile else '')
class BadgeFieldFormListPost(ResourceList): def before_post(data): require_relationship(['badge_form'], data) if (not has_access('is_coorganizer', badge_form=data['badge_form'])): raise ObjectNotFound({'parameter': 'badge_form'}, f"Custom Form: {data['badge_form']} not found") schema = BadgeFieldFormSchema methods = ['POST'] data_layer = {'session': db.session, 'model': BadgeFieldForms}
_admin_required def BookingManageCreate(request, location_slug): username = '' if (request.method == 'POST'): location = get_object_or_404(Location, slug=location_slug) notify = request.POST.get('email_announce') logger.debug('notify was set to:') logger.debug(notify) try: username = request.POST.get('username') the_user = User.objects.get(username=username) except: messages.add_message(request, messages.INFO, ('There is no user with the username %s' % username)) return HttpResponseRedirect(reverse('booking_manage_create', args=(location.slug,))) form = AdminBookingForm(request.POST) if form.is_valid(): use = form.save(commit=False) use.location = location use.user = the_user if use.suggest_drft(): use.accounted_by = Use.DRFT use.save() use.status = request.POST.get('status') use.save() booking = Booking(use=use) booking.reset_rate() if notify: new_booking_notify(booking) messages.add_message(request, messages.INFO, ('The booking for %s %s was created.' % (use.user.first_name, use.user.last_name))) return HttpResponseRedirect(reverse('booking_manage', args=(location.slug, booking.id))) else: logger.debug('the form had errors') logger.debug(form.errors) else: form = AdminBookingForm() username = request.GET.get('username', '') all_users = User.objects.all().order_by('username') return render(request, 'booking_manage_create.html', {'all_users': all_users, 'booking_statuses': Booking.BOOKING_STATUSES, 'username': username})
class GPTask(HasMentions, HasActivity, Document): on_delete_cascade = ['GP Comment', 'GP Activity'] on_delete_set_null = ['GP Notification'] activities = ['Task Value Changed'] mentions_field = 'description' def before_insert(self): if (not self.status): self.status = 'Backlog' def after_insert(self): self.update_tasks_count(1) def on_update(self): self.update_project_progress() self.notify_mentions() self.log_value_updates() self.update_search_index() def log_value_updates(self): fields = ['title', 'description', 'status', 'priority', 'assigned_to', 'due_date', 'project'] for field in fields: prev_doc = self.get_doc_before_save() if (prev_doc and (str(self.get(field)) != str(prev_doc.get(field)))): self.log_activity('Task Value Changed', data={'field': field, 'field_label': self.meta.get_label(field), 'old_value': prev_doc.get(field), 'new_value': self.get(field)}) def update_search_index(self): if (self.has_value_changed('title') or self.has_value_changed('description')): search = GameplanSearch() search.index_doc(self) def on_trash(self): self.update_tasks_count((- 1)) search = GameplanSearch() search.remove_doc(self) def update_tasks_count(self, delta=1): if (not self.project): return current_tasks_count = (frappe.db.get_value('GP Project', self.project, 'tasks_count') or 0) frappe.db.set_value('GP Project', self.project, 'tasks_count', (current_tasks_count + delta)) def update_project_progress(self): if (self.project and self.has_value_changed('is_completed')): frappe.get_doc('GP Project', self.project).update_progress() () def track_visit(self): GPNotification.clear_notifications(task=self.name)
def main(): build_dir = 'gateware' platform = Platform() if ('load' in sys.argv[1:]): prog = platform.create_programmer() prog.load_bitstream(os.path.join(build_dir, 'impl', 'pnr', 'project.fs')) exit() if ('sim' in sys.argv[1:]): ring = RingSerialCtrl() run_simulation(ring, test(), clocks={'sys': (.0 / .0)}, vcd_name='sim.vcd') exit() design = Tuto(platform) platform.build(design, build_dir=build_dir)