code
stringlengths
281
23.7M
def test_remove_empties(): d = {'good value': 'val', 'empty string': '', 'string 0': '0', 'int 0': 0, 'float 0': 0.0, 'None': None, 'good list': [2], 'empty list': [], 'list with empty string': [''], 'list with 0': [0], 'list with None': [None], 'good numpy array': np.array([5.6]), 'empty numpy array': np.empty(0), 'numpy array with emtpy string': np.array(['']), 'numpy array with 0': np.array([0]), 'numpy array with None': np.array(None), 'empty multidimensional array': np.array([['', None], [None, '']]), 'multidimensional array with zero': np.array([['', None], [0, '']])} expected = ['good value', 'string 0', 'int 0', 'float 0', 'good list', 'list with 0', 'good numpy array', 'numpy array with 0', 'multidimensional array with zero'] clean = remove_empties(d) assert (clean.keys() == set(expected))
def paginate(client, path, params): response = request(client, path, params=params) elements = response['elements'] for element in elements: (yield element) while response.get('next'): response = request(client, response.get('next')) elements = response['elements'] for element in elements: (yield element)
def test_logging_handler_emit_error_non_str_message(capsys, elasticapm_client): handler = LoggingHandler(elasticapm_client) handler._emit = (lambda : (1 / 0)) handler.emit(LogRecord('x', 1, '/ab/c/', 10, ValueError('oh no'), (), None)) (out, err) = capsys.readouterr() assert ('Top level ElasticAPM exception caught' in err) assert ('oh no' in err)
class PyperfResults(): BENCHMARKS = Benchmarks() _metadata: 'PyperfResultsMetadata' _uploadid: PyperfUploadID _by_bench: Mapping[(str, Any)] _by_suite: Mapping[(SuiteType, Any)] def get_metadata_raw(cls, data) -> MutableMapping[(str, str)]: return data['metadata'] def iter_benchmarks_from_data(cls, data) -> Iterator[str]: (yield from data['benchmarks']) def get_benchmark_name(cls, benchdata) -> str: return benchdata['metadata']['name'] def get_benchmark_metadata_raw(cls, benchdata) -> MutableMapping[(str, str)]: return benchdata['metadata'] def iter_benchmark_runs_from_data(cls, benchdata) -> Iterator[Tuple[(Mapping[(str, str)], List[float], List[float])]]: for rundata in benchdata['runs']: (yield (rundata['metadata'], rundata.get('warmups'), rundata.get('values'))) def _validate_data(cls, data) -> None: if (data['version'] == '1.0'): for key in ('metadata', 'benchmarks', 'version'): if (key not in data): raise ValueError(f'invalid results data (missing {key})') else: raise NotImplementedError(data['version']) def __init__(self, data, resfile: Any): if (not data): raise ValueError('missing data') if (not resfile): raise ValueError('missing refile') self._validate_data(data) self._data = data self._resfile = PyperfResultsFile.from_raw(resfile) self._modified = False def _copy(self) -> 'PyperfResults': cls = type(self) copied = cls.__new__(cls) copied._data = self._data copied._resfile = self._resfile return copied def __eq__(self, other): raise NotImplementedError def data(self) -> Mapping[(str, Any)]: return self._data def raw(self) -> Mapping[(str, Any)]: return self._data def raw_metadata(self) -> Mapping[(str, str)]: return self._data['metadata'] def raw_benchmarks(self) -> List[Mapping[(str, Any)]]: return self._data['benchmarks'] def metadata(self) -> 'PyperfResultsMetadata': try: return self._metadata except AttributeError: self._metadata = PyperfResultsMetadata.from_full_results(self._data) return self._metadata def version(self) -> str: return self._data['version'] def resfile(self) -> 'PyperfResultsFile': return self._resfile def filename(self) -> str: return self._resfile.filename def date(self) -> datetime.datetime: run0 = self.raw_benchmarks[0]['runs'][0] date = run0['metadata']['date'] (date, _) = _utils.get_utc_datetime(date) return date def uploadid(self) -> PyperfUploadID: try: return self._uploadid except AttributeError: self._uploadid = PyperfUploadID.from_metadata(self.metadata, suite=self.suites) assert (getattr(self, '_modified', False) or (self._resfile.uploadid is None) or (self._uploadid == self._resfile.uploadid) or (str(self._uploadid._replace(suite=PyperfUploadID.SUITE_NOT_KNOWN)) == str(self._resfile.uploadid._replace(suite=PyperfUploadID.SUITE_NOT_KNOWN)))), (self._uploadid, self._resfile.uploadid) return self._uploadid def suite(self) -> str: return self.uploadid.suite def suites(self) -> Iterable[SuiteType]: return sorted(self.by_suite) def by_bench(self) -> Mapping[(str, Any)]: try: return self._by_bench except AttributeError: self._by_bench = dict(self._iter_benchmarks()) return self._by_bench def by_suite(self) -> Mapping[(SuiteType, Any)]: try: return self._by_suite except AttributeError: self._by_suite = self._collate_suites() return self._by_suite def _collate_suites(self) -> Mapping[(SuiteType, Any)]: by_suite: Dict[(SuiteType, Any)] = {} names = [n for (n, _) in self._iter_benchmarks()] if names: bench_suites = self.BENCHMARKS.get_suites(names, 'unknown') for (name, suite) in bench_suites.items(): normalized_suite = PyperfUploadID.normalize_suite(suite) if (normalized_suite not in by_suite): by_suite[normalized_suite] = [] data = self.by_bench[name] by_suite[normalized_suite].append(data) else: logger.warning(f'empty results {self}') return by_suite def _iter_benchmarks(self) -> Iterator[Tuple[(str, Any)]]: for benchdata in self.iter_benchmarks_from_data(self._data): name = self.get_benchmark_name(benchdata) (yield (name, benchdata)) def split_benchmarks(self) -> Mapping[(SuiteType, 'PyperfResults')]: if (self.suite is not PyperfUploadID.MULTI_SUITE): assert (self.suite is not PyperfUploadID.SUITE_NOT_KNOWN) raise Exception(f'already split ({self.suite})') by_suite = {} for (suite, benchmarks) in self.by_suite.items(): by_suite[suite] = {k: v for (k, v) in self._data.items() if (k != 'benchmarks')} by_suite[suite]['benchmarks'] = benchmarks by_suite_resolved = {} for (suite, data) in by_suite.items(): results = self._copy() results._data = data results._by_suite = {suite: data['benchmarks'][0]} results._modified = True by_suite_resolved[suite] = results return by_suite_resolved def copy_to(self, filename: Union[('PyperfResultsFile', str)], resultsroot: Optional[str]=None, *, compressed: Optional[bool]=None) -> 'PyperfResults': if (self._resfile is None): raise ValueError if isinstance(filename, PyperfResultsFile): filename_str = filename.filename else: filename_str = filename if ((not self._modified) and os.path.exists(self._resfile.filename)): resfile = self._resfile.copy_to(filename_str, resultsroot, compressed=compressed) else: resfile = PyperfResultsFile(filename_str, resultsroot, compressed=compressed) resfile.write(self) copied = self._copy() copied._resfile = resfile return copied
def convert_document_file(desktop, input_file: str, output_file: str, output_ext: str, remove_line_no: bool=False, remove_redline: bool=False, remove_header_footer: bool=False): output_filter_name = FILTER_NAME_BY_EXT[output_ext] input_file_url = uno.systemPathToFileUrl(os.path.realpath(input_file)) document = desktop.loadComponentFromURL(input_file_url, '_blank', 0, dict_to_property_values({'Hidden': True, 'ReadOnly': True})) if (not document): raise RuntimeError(('failed to load document: %s' % input_file_url)) try: if remove_line_no: document.getLineNumberingProperties().IsOn = False if remove_header_footer: disable_document_header_footer(document) if remove_redline: document.setPropertyValue('RedlineDisplayType', RedlineDisplayType.NONE) output_url = ('file://' + os.path.abspath(output_file)) LOGGER.debug('output_url: %s', output_url) document.storeToURL(output_url, dict_to_property_values({'FilterName': output_filter_name})) finally: document.close(True)
class Worker(mode.Worker): logger = logger app: AppT sensors: Set[SensorT] workdir: Path spinner: Optional[terminal.Spinner] _shutdown_immediately: bool = False def __init__(self, app: AppT, *services: ServiceT, sensors: Optional[Iterable[SensorT]]=None, debug: bool=DEBUG, quiet: bool=False, loglevel: Union[(str, int, None)]=None, logfile: Union[(str, IO, None)]=None, stdout: IO=sys.stdout, stderr: IO=sys.stderr, blocking_timeout: Optional[float]=None, workdir: Union[(Path, str, None)]=None, console_port: int=CONSOLE_PORT, loop: Optional[asyncio.AbstractEventLoop]=None, redirect_stdouts: Optional[bool]=None, redirect_stdouts_level: Optional[Severity]=None, logging_config: Optional[Dict]=None, **kwargs: Any) -> None: self.app = app self.sensors = set((sensors or [])) self.workdir = Path((workdir or Path.cwd())) conf = app.conf if (redirect_stdouts is None): redirect_stdouts = conf.worker_redirect_stdouts if (redirect_stdouts_level is None): redirect_stdouts_level = (conf.worker_redirect_stdouts_level or logging.INFO) if ((logging_config is None) and app.conf.logging_config): logging_config = dict(app.conf.logging_config) super().__init__(*services, debug=debug, quiet=quiet, loglevel=loglevel, logfile=logfile, loghandlers=app.conf.loghandlers, stdout=stdout, stderr=stderr, blocking_timeout=(blocking_timeout or 0.0), console_port=console_port, redirect_stdouts=redirect_stdouts, redirect_stdouts_level=redirect_stdouts_level, logging_config=logging_config, loop=loop, **kwargs) self.spinner = terminal.Spinner(file=self.stdout) async def on_start(self) -> None: self.app.in_worker = True (await super().on_start()) def _on_sigint(self) -> None: self._flag_as_shutdown_by_signal() super()._on_sigint() def _on_sigterm(self) -> None: self._flag_as_shutdown_by_signal() super()._on_sigterm() def _flag_as_shutdown_by_signal(self) -> None: self._shutdown_immediately = True if self.spinner: self.spinner.stop() async def maybe_start_blockdetection(self) -> None: if self.blocking_timeout: (await self.blocking_detector.maybe_start()) async def on_startup_finished(self) -> None: if self._shutdown_immediately: return self._on_shutdown_immediately() (await self.maybe_start_blockdetection()) self._on_startup_end_spinner() def _on_startup_end_spinner(self) -> None: if self.spinner: self.spinner.finish() if self.spinner.file.isatty(): self.say(' ') else: self.say(' OK ^') else: self.log.info('Ready') def _on_shutdown_immediately(self) -> None: self.say('') def on_init_dependencies(self) -> Iterable[ServiceT]: self.app.beacon.reattach(self.beacon) for sensor in self.sensors: self.app.sensors.add(sensor) self.app.on_startup_finished = self.on_startup_finished return chain(self.services, [self.app]) async def on_first_start(self) -> None: self.change_workdir(self.workdir) (await self.default_on_first_start()) self.autodiscover() def change_workdir(self, path: Path) -> None: if (path and (path.absolute() != path.cwd().absolute())): os.chdir(path.absolute()) def autodiscover(self) -> None: if self.app.conf.autodiscover: self.app.discover() def _setproctitle(self, info: str, *, ident: str=PSIDENT) -> None: setproctitle(f'{ident} -{info}- {self._proc_ident()}') def _proc_ident(self) -> str: conf = self.app.conf return f'{conf.id} {self._proc_web_ident()} {conf.datadir.absolute()}' def _proc_web_ident(self) -> str: conf = self.app.conf if (conf.web_transport.scheme == 'unix'): return f'{conf.web_transport}' return f'-p {conf.web_port}' async def on_execute(self) -> None: self._setproctitle('init') if (self.spinner and self.spinner.file.isatty()): self._say('starting ', end='', flush=True) def on_worker_shutdown(self) -> None: self._setproctitle('stopping') if (self.spinner and self.spinner.file.isatty()): self.spinner.reset() self._say('stopping ', end='', flush=True) def on_setup_root_logger(self, logger: logging.Logger, level: int) -> None: self._disable_spinner_if_level_below_WARN(level) self._setup_spinner_handler(logger, level) def _disable_spinner_if_level_below_WARN(self, level: int) -> None: if (level and (level < logging.WARN)): self.spinner = None def _setup_spinner_handler(self, logger: logging.Logger, level: int) -> None: if self.spinner: logger.handlers[0].setLevel(level) logger.addHandler(terminal.SpinnerHandler(self.spinner, level=logging.DEBUG)) logger.setLevel(logging.DEBUG)
_fixture def f_mock_calls(): p_popen = mock.patch('copr_rpmbuild.builders.mock.GentlyTimeoutedPopen') dummy_patchers = [mock.patch('copr_rpmbuild.builders.mock.MockBuilder.mock_clean'), mock.patch('copr_rpmbuild.builders.mock.shutil'), mock.patch('copr_rpmbuild.builders.mock.locate_spec', new=mock.MagicMock(return_value='spec')), mock.patch('copr_rpmbuild.builders.mock.get_mock_uniqueext', new=mock.MagicMock(return_value='0'))] for patcher in dummy_patchers: patcher.start() yield_val = p_popen.start() yield_val.return_value = mock.MagicMock(returncode=0) (yield yield_val.call_args_list) for patcher in dummy_patchers: patcher.stop()
def main(): script_dir = Path(__file__).parent.resolve() proto_filedir = script_dir.parent.resolve() proto_filename = 'DispatchProxyService.proto' project_dir = proto_filedir.parent.parent.resolve() proto_path = project_dir python_out = project_dir grpc_python_out = python_out proto_filepath = (proto_filedir / proto_filename) cmd = [sys.executable, '-m', 'grpc_tools.protoc', f'--proto_path={proto_path}', f'--python_out={python_out}', f'--grpc_python_out={grpc_python_out}', f'{proto_filepath}'] return subprocess.check_call(cmd, cwd=project_dir)
def extractKtlchamberWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('God of Music', 'God of Music', 'translated'), ('Possessing Nothing', 'Possessing Nothing', 'translated'), ('One Man Army', 'One Man Army', 'translated')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class NetworkDBComponent(AsyncioIsolatedComponent): name = 'Network Database' endpoint_name = 'network-db' def is_enabled(self) -> bool: return (not self._boot_info.args.disable_networkdb_component) def configure_parser(cls, arg_parser: ArgumentParser, subparser: _SubParsersAction) -> None: tracking_parser = arg_parser.add_argument_group('network db') tracking_parser.add_argument('--network-tracking-backend', help='Configure whether nodes are tracked and how. (sqlite3: persistent tracking across runs from an on-disk sqlite3 database, memory: tracking only in memory, do-not-track: no tracking)', action=NormalizeTrackingBackend, choices=('sqlite3', 'memory', 'do-not-track'), default=TrackingBackend.SQLITE3, type=str) tracking_parser.add_argument('--disable-networkdb-component', help="Disables the builtin 'Network Database' component. **WARNING**: disabling this API without a proper replacement will cause your trinity node to crash.", action='store_true') tracking_parser.add_argument('--disable-blacklistdb', help='Disables the blacklist database server component of the Network Database component. **WARNING**: disabling this API without a proper replacement will cause your trinity node to crash.', action='store_true') tracking_parser.add_argument('--disable-eth1-peer-db', help='Disables the ETH1.0 peer database server component of the Network Database component. **WARNING**: disabling this API without a proper replacement will cause your trinity node to crash.', action='store_true') tracking_parser.add_argument('--enable-experimental-eth1-peer-tracking', help='Enables the experimental tracking of metadata about successful connections to Eth1 peers.', action='store_true') remove_db_parser = subparser.add_parser('remove-network-db', help='Remove the on-disk sqlite database that tracks data about the p2p network') remove_db_parser.set_defaults(func=cls.clear_enr_db) def validate_cli(cls, boot_info: BootInfo) -> None: try: get_tracking_database(get_networkdb_path(boot_info.trinity_config)) except BadDatabaseError as err: raise ValidationError(f'''Error loading network database. Trying removing database with `remove-network-db` command: {err}''') from err def clear_enr_db(cls, args: Namespace, trinity_config: TrinityConfig) -> None: db_path = get_networkdb_path(trinity_config) if db_path.exists(): cls.logger.info('Removing network database at: %s', db_path.resolve()) db_path.unlink() else: cls.logger.info('No network database found at: %s', db_path.resolve()) _session: ClassVar[Session] = None def _get_database_session(cls, boot_info: BootInfo) -> Session: if (cls._session is None): cls._session = get_tracking_database(get_networkdb_path(boot_info.trinity_config)) return cls._session def _get_blacklist_tracker(cls, boot_info: BootInfo) -> BaseConnectionTracker: backend = boot_info.args.network_tracking_backend if (backend is TrackingBackend.SQLITE3): session = cls._get_database_session(boot_info) return SQLiteConnectionTracker(session) elif (backend is TrackingBackend.MEMORY): return MemoryConnectionTracker() elif (backend is TrackingBackend.DO_NOT_TRACK): return NoopConnectionTracker() else: raise Exception(f'INVARIANT: {backend}') def _get_blacklist_service(cls, boot_info: BootInfo, event_bus: EndpointAPI) -> ConnectionTrackerServer: tracker = cls._get_blacklist_tracker(boot_info) return ConnectionTrackerServer(event_bus=event_bus, tracker=tracker) def _get_eth1_tracker(cls, boot_info: BootInfo) -> BaseEth1PeerTracker: if (not boot_info.args.enable_experimental_eth1_peer_tracking): return NoopEth1PeerTracker() backend = boot_info.args.network_tracking_backend if (backend is TrackingBackend.SQLITE3): session = cls._get_database_session(boot_info) protocols = ('eth',) protocol_versions = (63,) return SQLiteEth1PeerTracker(session, network_id=boot_info.trinity_config.network_id, protocols=protocols, protocol_versions=protocol_versions) elif (backend is TrackingBackend.MEMORY): return MemoryEth1PeerTracker() elif (backend is TrackingBackend.DO_NOT_TRACK): return NoopEth1PeerTracker() else: raise Exception(f'INVARIANT: {backend}') def _get_eth1_peer_server(cls, boot_info: BootInfo, event_bus: EndpointAPI) -> PeerDBServer: tracker = cls._get_eth1_tracker(boot_info) return PeerDBServer(event_bus=event_bus, tracker=tracker) _tuple def _get_services(cls, boot_info: BootInfo, event_bus: EndpointAPI) -> Iterable[Service]: if boot_info.args.disable_blacklistdb: cls.logger.warning('Blacklist Database disabled via CLI flag') return else: (yield cls._get_blacklist_service(boot_info, event_bus)) if boot_info.args.disable_eth1_peer_db: cls.logger.warning('ETH1 Peer Database disabled via CLI flag') else: (yield cls._get_eth1_peer_server(boot_info, event_bus)) async def do_run(self, event_bus: EndpointAPI) -> None: boot_info = self._boot_info try: tracker_services = self._get_services(boot_info, event_bus) except BadDatabaseError as err: self.logger.exception(f'Unrecoverable error in Network Component: {err}') (await run_background_asyncio_services(tracker_services))
_dataclass_decorator class BluetoothLEAdvertisement(): address: int rssi: int address_type: int name: str service_uuids: list[str] service_data: dict[(str, bytes)] manufacturer_data: dict[(int, bytes)] def from_pb(cls: BluetoothLEAdvertisement, data: BluetoothLEAdvertisementResponse) -> BluetoothLEAdvertisement: _uuid_convert = _cached_uuid_converter if (raw_manufacturer_data := data.manufacturer_data): if raw_manufacturer_data[0].data: manufacturer_data = {int(v.uuid, 16): v.data for v in raw_manufacturer_data} else: manufacturer_data = {int(v.uuid, 16): bytes(v.legacy_data) for v in raw_manufacturer_data} else: manufacturer_data = {} if (raw_service_data := data.service_data): if raw_service_data[0].data: service_data = {_uuid_convert(v.uuid): v.data for v in raw_service_data} else: service_data = {_uuid_convert(v.uuid): bytes(v.legacy_data) for v in raw_service_data} else: service_data = {} if (raw_service_uuids := data.service_uuids): service_uuids = [_uuid_convert(v) for v in raw_service_uuids] else: service_uuids = [] return cls(address=data.address, rssi=data.rssi, address_type=data.address_type, name=data.name.decode('utf-8', errors='replace'), service_uuids=service_uuids, service_data=service_data, manufacturer_data=manufacturer_data)
('patterns,query,expected', [(['^a/.*'], 'a/', True), (['^a/.*'], 'a\\', True), (['^/foo/bar/.*'], '/foo/bar/blag', True), (['^/foo/bar/.*'], '\\foo\\bar/blag', True)]) def test_matches(patterns: List[str], query: str, expected: bool) -> None: ret = matches(patterns, query) assert (ret == expected)
def test_upsert_existing_record(): testutil.add_response('login_response_200') testutil.add_response('api_version_response_200') testutil.add_response('upsert_existing_record_response_204') client = testutil.get_client() update_result = client.sobjects(id='999', object_type='Upsert_Object__c', external_id='External_Field__c').upsert({'Name': 'Test Upsert Name'}) assert (update_result[0] == testutil.mock_responses['upsert_existing_record_response_204']['body']) assert (update_result[1].status == 204)
def test_align_get_align_summary_data(o_dir, e_dir, request): program = 'bin/align/phyluce_align_get_align_summary_data' output = os.path.join(o_dir, 'gblocks-clean-align-summary.csv') cmd = [os.path.join(request.config.rootdir, program), '--alignments', os.path.join(e_dir, 'mafft-gblocks-clean'), '--input-format', 'nexus', '--output-stats', output] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() assert (proc.returncode == 0), print('{}'.format(stderr.decode('utf-8'))) assert output, 'There are no output files' output_dict = {} with open(output) as output_file: for line in output_file: ls = line.strip().split(',') output_dict[ls[0]] = ','.join(ls[1:]) expected = os.path.join(e_dir, 'gblocks-clean-align-summary.csv') expected_dict = {} with open(expected) as expected_file: for line in expected_file: ls = line.strip().split(',') expected_dict[ls[0]] = ','.join(ls[1:]) for (k, v) in output_dict.items(): assert (expected_dict[k] == v)
def modify_span_sqs_pre(span, args, kwargs) -> None: operation_name = kwargs.get('operation_name', args[0]) if span.id: trace_parent = span.transaction.trace_parent.copy_from(span_id=span.id) else: transaction = execution_context.get_transaction() trace_parent = transaction.trace_parent.copy_from(span_id=transaction.id) attributes = {constants.TRACEPARENT_HEADER_NAME: {'DataType': 'String', 'StringValue': trace_parent.to_string()}} if trace_parent.tracestate: attributes[constants.TRACESTATE_HEADER_NAME] = {'DataType': 'String', 'StringValue': trace_parent.tracestate} if (len(args) > 1): if (operation_name in ('SendMessage', 'SendMessageBatch')): attributes_count = len(attributes) if (operation_name == 'SendMessage'): messages = [args[1]] else: messages = args[1]['Entries'] for message in messages: message['MessageAttributes'] = (message.get('MessageAttributes') or {}) if ((len(message['MessageAttributes']) + attributes_count) <= SQS_MAX_ATTRIBUTES): message['MessageAttributes'].update(attributes) else: logger.info('Not adding disttracing headers to message due to attribute limit reached') elif (operation_name == 'ReceiveMessage'): message_attributes = args[1].setdefault('MessageAttributeNames', []) if ('All' not in message_attributes): message_attributes.extend([constants.TRACEPARENT_HEADER_NAME, constants.TRACESTATE_HEADER_NAME])
('aea.cli.remove.shutil.rmtree') ('aea.cli.remove.Path.exists', return_value=False) ('aea.cli.remove.try_to_load_agent_config') class RemoveItemTestCase(TestCase): def test_remove_item_item_folder_not_exists(self, *mocks): public_id = PublicIdMock.from_str('author/name:0.1.0') with pytest.raises(ClickException, match='Can not find folder for the package'): remove_item(ContextMock(protocols=[public_id]), 'protocol', public_id)
def test_time_based_gas_price_strategy_without_transactions(): fixture_middleware = construct_result_generator_middleware({'eth_getBlockByHash': _get_initial_block, 'eth_getBlockByNumber': _get_initial_block, 'eth_gasPrice': _get_gas_price}) w3 = Web3(provider=BaseProvider(), middlewares=[fixture_middleware]) time_based_gas_price_strategy = construct_time_based_gas_price_strategy(max_wait_seconds=80, sample_size=5, probability=50, weighted=True) w3.eth.set_gas_price_strategy(time_based_gas_price_strategy) actual = w3.eth.generate_gas_price() assert (actual == w3.eth.gas_price)
class Box(Component): normal_pointer = Pointer('arrow') moving_pointer = Pointer('hand') offset_x = Float offset_y = Float fill_color = (0.8, 0.0, 0.1, 1.0) moving_color = (0.0, 0.8, 0.1, 1.0) resizable = '' def _draw_mainlayer(self, gc, view_bounds=None, mode='default'): with gc: gc.set_fill_color(self.fill_color) (dx, dy) = self.bounds (x, y) = self.position gc.clip_to_rect(x, y, dx, dy) gc.rect(x, y, dx, dy) gc.fill_path() def normal_left_down(self, event): self.event_state = 'moving' event.window.set_pointer(self.moving_pointer) event.window.set_mouse_owner(self, event.net_transform()) self.offset_x = (event.x - self.x) self.offset_y = (event.y - self.y) event.handled = True def moving_mouse_move(self, event): self.position = [(event.x - self.offset_x), (event.y - self.offset_y)] event.handled = True self.request_redraw() def moving_left_up(self, event): self.event_state = 'normal' event.window.set_pointer(self.normal_pointer) event.window.set_mouse_owner(None) event.handled = True self.request_redraw() def moving_mouse_leave(self, event): self.moving_left_up(event) event.handled = True
class TestDummyWeatherStationData(BaseSkillTestCase): path_to_skill = Path(ROOT_DIR, 'packages', 'fetchai', 'skills', 'weather_station') def setup(cls): super().setup() cls.forecast = Forecast() def test_generate(self): with patch.object(self.forecast, 'add_data') as mock_add: self.forecast.generate(2) assert (mock_add.call_count == 2) def test_add_data(self): tagged_data = {'abs_pressure': 100, 'delay': 20, 'hum_in': 20, 'hum_out': 20, int((datetime.datetime.now() - datetime.datetime.fromtimestamp(0)).total_seconds()): 20, 'rain': 20, 'temp_in': 20, 'temp_out': 20, 'wind_ave': 20, 'wind_dir': 20, 'wind_gust': 20} mocked_conn = Mock(wrap=sqlite3.Connection) mocked_cursor = Mock(wraps=sqlite3.Cursor) with patch('sqlite3.connect', return_value=mocked_conn) as mock_conn: with patch.object(mocked_conn, 'cursor', return_value=mocked_cursor) as mock_curs: with patch.object(mocked_cursor, 'execute') as mock_exe: with patch.object(logger, 'info') as mock_logger: with patch.object(mocked_cursor, 'close') as mock_cur_close: with patch.object(mocked_conn, 'commit') as mock_con_commit: with patch.object(mocked_conn, 'close') as mock_con_close: self.forecast.add_data(tagged_data) mock_conn.assert_called_once() mock_curs.assert_called_once() mock_exe.assert_any_call('INSERT INTO data(abs_pressure,\n delay,\n hum_in,\n hum_out,\n idx,\n rain,\n temp_in,\n temp_out,\n wind_ave,\n wind_dir,\n wind_gust) VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (tagged_data['abs_pressure'], tagged_data['delay'], tagged_data['hum_in'], tagged_data['hum_out'], int((datetime.datetime.now() - datetime.datetime.fromtimestamp(0)).total_seconds()), tagged_data['rain'], tagged_data['temp_in'], tagged_data['temp_out'], tagged_data['wind_ave'], tagged_data['wind_dir'], tagged_data['wind_gust'])) mock_logger.assert_any_call('Wheather station: I added data in the db!') mock_cur_close.assert_called_once() mock_con_commit.assert_called_once() mock_con_close.assert_called_once()
class SACLearner(core.Learner): def __init__(self, policy_network: networks_lib.FeedForwardNetwork, critic_network: networks_lib.FeedForwardNetwork, random_key: jax_types.PRNGKey, dataset: Iterator[reverb.ReplaySample], policy_optimizer: optax.GradientTransformation, critic_optimizer: optax.GradientTransformation, alpha_optimizer: Optional[optax.GradientTransformation]=None, entropy_coefficient: Optional[float]=None, target_entropy: float=0, discount: float=0.99, tau: float=0.005, init_alpha: float=1.0, logger: Optional[loggers.Logger]=None, counter: Optional[counting.Counter]=None): adaptive_entropy_coefficient = (entropy_coefficient is None) if adaptive_entropy_coefficient: alpha_optimizer = (alpha_optimizer or optax.adam(0.0003)) elif target_entropy: raise ValueError('target_entropy should not be set when entropy_coefficient is provided') def actor_loss(policy_params: networks_lib.Params, critic_params: networks_lib.Params, alpha: jnp.ndarray, transitions: types.Transition, key: jax_types.PRNGKey): action_dist = policy_network.apply(policy_params, transitions.observation) actions = action_dist.sample(seed=key) log_probs = action_dist.log_prob(actions) (q1, q2) = critic_network.apply(critic_params, transitions.observation, actions) q = jnp.minimum(q1, q2) entropy = (- log_probs.mean()) actor_loss = ((alpha * log_probs) - q) return (jnp.mean(actor_loss), {'entropy': entropy}) def critic_loss(critic_params: networks_lib.Params, critic_target_params: networks_lib.Params, policy_params: networks_lib.Params, alpha: jnp.ndarray, transitions: types.Transition, key: jax_types.PRNGKey): next_action_dist = policy_network.apply(policy_params, transitions.next_observation) next_actions = next_action_dist.sample(seed=key) next_log_probs = next_action_dist.log_prob(next_actions) (next_q1, next_q2) = critic_network.apply(critic_target_params, transitions.next_observation, next_actions) next_q = (jnp.minimum(next_q1, next_q2) - (alpha * next_log_probs)) target = jax.lax.stop_gradient((transitions.reward + ((transitions.discount * discount) * next_q))) (q1, q2) = critic_network.apply(critic_params, transitions.observation, transitions.action) critic_loss = (jnp.square((target - q1)) + jnp.square((target - q2))) return (jnp.mean(critic_loss), {'q1': q1.mean(), 'q2': q2.mean()}) def alpha_loss(log_alpha: jnp.ndarray, entropy: jnp.ndarray): return ((log_alpha * (entropy - target_entropy)), ()) def sgd_step(state: TrainingState, transitions: types.Transition): if adaptive_entropy_coefficient: alpha = jnp.exp(state.alpha_params) else: alpha = entropy_coefficient (critic_key, policy_key, key) = jax.random.split(state.key, 3) ((critic_loss_value, critic_metrics), critic_grad) = jax.value_and_grad(critic_loss, has_aux=True)(state.critic_params, state.critic_target_params, state.policy_params, alpha, transitions, critic_key) (critic_updates, critic_optimizer_state) = critic_optimizer.update(critic_grad, state.critic_optimizer_state) critic_params = optax.apply_updates(state.critic_params, critic_updates) ((actor_loss_value, actor_metrics), policy_grad) = jax.value_and_grad(actor_loss, has_aux=True)(state.policy_params, critic_params, alpha, transitions, policy_key) (policy_updates, policy_optimizer_state) = policy_optimizer.update(policy_grad, state.policy_optimizer_state) policy_params = optax.apply_updates(state.policy_params, policy_updates) critic_target_params = optax.incremental_update(critic_params, state.critic_target_params, tau) metrics = {'critic_loss': critic_loss_value, 'actor_loss': actor_loss_value, **critic_metrics, **actor_metrics} new_state = TrainingState(policy_params=policy_params, critic_params=critic_params, critic_target_params=critic_target_params, policy_optimizer_state=policy_optimizer_state, critic_optimizer_state=critic_optimizer_state, key=key) if adaptive_entropy_coefficient: ((alpha_loss_value, _), alpha_grad) = jax.value_and_grad(alpha_loss, has_aux=True)(state.alpha_params, actor_metrics['entropy']) (alpha_updates, alpha_optimizer_state) = alpha_optimizer.update(alpha_grad, state.alpha_optimizer_state) alpha_params = optax.apply_updates(state.alpha_params, alpha_updates) metrics.update({'alpha_loss': alpha_loss_value, 'alpha': jnp.exp(alpha_params)}) new_state = new_state._replace(alpha_params=alpha_params, alpha_optimizer_state=alpha_optimizer_state) return (new_state, metrics) self._sgd_step = jax.jit(sgd_step) def init_state(key): (init_policy_key, init_critic_key, key) = jax.random.split(random_key, 3) init_policy_params = policy_network.init(init_policy_key) init_critic_params = critic_network.init(init_critic_key) init_policy_optimizer_state = policy_optimizer.init(init_policy_params) init_critic_optimizer_state = critic_optimizer.init(init_critic_params) state = TrainingState(policy_params=init_policy_params, critic_params=init_critic_params, critic_target_params=init_critic_params, policy_optimizer_state=init_policy_optimizer_state, critic_optimizer_state=init_critic_optimizer_state, key=key) if adaptive_entropy_coefficient: init_alpha_params = jnp.array(np.log(init_alpha), dtype=jnp.float32) init_alpha_optimizer_state = alpha_optimizer.init(init_alpha_params) state = state._replace(alpha_params=init_alpha_params, alpha_optimizer_state=init_alpha_optimizer_state) return state self._state = init_state(random_key) self._iterator = dataset self._logger = (logger or loggers.make_default_logger(label='learner', save_data=False)) self._counter = (counter or counting.Counter()) self._timestamp = None def step(self): sample = next(self._iterator) transitions: types.Transition = sample.data (self._state, metrics) = self._sgd_step(self._state, transitions) timestamp = time.time() elapsed_time = ((timestamp - self._timestamp) if self._timestamp else 0) self._timestamp = timestamp counts = self._counter.increment(steps=1, walltime=elapsed_time) self._logger.write({**metrics, **counts}) def get_variables(self, names): variables = {'policy': self._state.policy_params, 'critic': self._state.critic_params} return [variables[name] for name in names] def save(self) -> TrainingState: return self._state def restore(self, state: TrainingState): self._state = state
class TestOFPActionPushVlan(unittest.TestCase): type_ = ofproto.OFPAT_PUSH_VLAN len_ = ofproto.OFP_ACTION_PUSH_SIZE fmt = ofproto.OFP_ACTION_PUSH_PACK_STR def test_init(self): ethertype = 33024 c = OFPActionPushVlan(ethertype) eq_(ethertype, c.ethertype) def _test_parser(self, ethertype): buf = pack(self.fmt, self.type_, self.len_, ethertype) res = OFPActionPushVlan.parser(buf, 0) eq_(res.type, self.type_) eq_(res.len, self.len_) eq_(res.ethertype, ethertype) def test_parser_mid(self): self._test_parser(33024) def test_parser_max(self): self._test_parser(65535) def test_parser_min(self): self._test_parser(0) def _test_serialize(self, ethertype): c = OFPActionPushVlan(ethertype) buf = bytearray() c.serialize(buf, 0) res = struct.unpack(self.fmt, six.binary_type(buf)) eq_(res[0], self.type_) eq_(res[1], self.len_) eq_(res[2], ethertype) def test_serialize_mid(self): self._test_serialize(33024) def test_serialize_max(self): self._test_serialize(65535) def test_serialize_min(self): self._test_serialize(0)
class ByteBuffer(Buffer): def __init__(self, name='ByteBuffer', clock=None): Buffer.__init__(self, name, clock) def check_set(self, search_byte, search_start_position=0): found_pos = 0 pos = search_start_position for x in range(pos, len(self._memory)): elem = self._memory[x] if ((elem[0] & search_byte) > 0): found_pos = x return found_pos def check_unset(self, search_byte, search_start_position=0): found_pos = 0 pos = search_start_position for x in range(pos, len(self._memory)): elem = self._memory[x] if ((elem[0] ^ 255) & search_byte): found_pos = x return found_pos def check_value(self, value, search_start_position=0): found_pos = 0 pos = search_start_position for x in range(pos, len(self._memory)): elem = self._memory[x] if (elem[0] == value): found_pos = x return found_pos
class TestAllTypesTOML(AllTypes): def arg_builder(monkeypatch): with monkeypatch.context() as m: m.setattr(sys, 'argv', ['', '--config', './tests/conf/toml/test.toml']) config = ConfigArgBuilder(*all_configs, desc='Test Builder') return config.generate()
def get_images(html, url): base = ' s = re.search("page = '';\\s+([^\\n]+)", html).group(1) pages = eval((s + '; pages')) pages = re.search('"page_url":"([^"]+)', pages).group(1) pages = re.split('\r?\n', pages) return [(base + page) for page in pages if (page and (not page.lower().endswith('thumbs.db')))]
def personalize_template(template_contents, output_dir, user_settings, sbref_nii, dtseries_sm): scene_file = os.path.join(output_dir, 'qc{}_{}.scene'.format(user_settings.qc_mode, user_settings.subject)) with open(scene_file, 'w') as scene_stream: new_text = modify_template_contents(template_contents, user_settings, scene_file, sbref_nii, dtseries_sm) scene_stream.write(new_text) return scene_file
class OptionPlotoptionsHeatmapOnpoint(Options): def connectorOptions(self) -> 'OptionPlotoptionsHeatmapOnpointConnectoroptions': return self._config_sub_data('connectorOptions', OptionPlotoptionsHeatmapOnpointConnectoroptions) def id(self): return self._config_get(None) def id(self, text: str): self._config(text, js_type=False) def position(self) -> 'OptionPlotoptionsHeatmapOnpointPosition': return self._config_sub_data('position', OptionPlotoptionsHeatmapOnpointPosition)
def exposed_purge_squatter_content(): proc = WebMirror.processor.HtmlProcessor.HtmlPageProcessor(baseUrls=None, pageUrl=None, pgContent=True, loggerPath=None, relinkable=None, stripTitle=None, destyle=None, preserveAttrs=None, decompose_svg=None, decompose=[], decomposeBefore=[]) engine = WebMirror.Engine.SiteArchiver(None, None, None) with db.session_context() as sess: print('Querying for count') count = sess.query(db.WebPages.id).count() print('Tital count') print('Querying for rows') iterable = sess.query(db.WebPages.id, db.WebPages.url, db.WebPages.netloc, db.WebPages.content).order_by(db.WebPages.netloc).yield_per(1000) rows = 0 skipped = [] for (rid, url, netloc, content) in tqdm.tqdm(iterable, total=count): try: if content: proc.checkSquatters(content) except GarbageDomainSquatterException: print('Squatter page: ', url) skipped.append((rid, url, netloc)) with open('dump.json', 'w') as fp: json.dump(skipped, fp)
def main(): if (len(sys.argv) != 2): raise ValueError('Please provide the path to the local elementary dbt package as an argument.') local_dbt_pkg_path = Path(sys.argv[1]).resolve() if ELE_DBT_PKG_PATH.is_symlink(): ELE_DBT_PKG_PATH.unlink() if ELE_DBT_PKG_PATH.is_dir(): shutil.rmtree(ELE_DBT_PKG_PATH) ELE_DBT_PKG_PATH.symlink_to(local_dbt_pkg_path)
def output(keys, image, display_only=True): keystring = '+'.join(keys) if len(keystring): print('keys:', keystring) if (not display_only): keyboard.press_and_release(keystring) else: to_display = get_key_text(keys) cv2.putText(image, to_display, frame_midpoint, cv2.FONT_HERSHEY_SIMPLEX, 10, (0, 0, 255), 10)
class BaseBlockCrossValidator(BaseCrossValidator, metaclass=ABCMeta): def __init__(self, spacing=None, shape=None, n_splits=10): if ((spacing is None) and (shape is None)): raise ValueError("Either 'spacing' or 'shape' must be provided.") self.spacing = spacing self.shape = shape self.n_splits = n_splits def split(self, X, y=None, groups=None): if (X.shape[1] != 2): raise ValueError('X must have exactly 2 columns ({} given).'.format(X.shape[1])) for (train, test) in super().split(X, y, groups): (yield (train, test)) def get_n_splits(self, X=None, y=None, groups=None): return self.n_splits def _iter_test_indices(self, X=None, y=None, groups=None):
class ConnectionContext(): __slots__ = ['db', 'conn', 'with_transaction', 'reuse_if_open'] def __init__(self, db, with_transaction=True, reuse_if_open=True): self.db = db self.conn = None self.with_transaction = with_transaction self.reuse_if_open = reuse_if_open def __enter__(self): self.conn = self.db.connection_open(with_transaction=self.with_transaction, reuse_if_open=self.reuse_if_open) return self def __exit__(self, exc_type, exc_val, exc_tb): if self.conn: self.db.connection_close() self.conn = None async def __aenter__(self): self.conn = (await self.db.connection_open_loop(with_transaction=self.with_transaction, reuse_if_open=self.reuse_if_open)) return self async def __aexit__(self, exc_type, exc_val, exc_tb): if self.conn: (await self.db.connection_close_loop()) self.conn = None
def test_data_quality_test_share_of_values_not_in_range() -> None: test_dataset = pd.DataFrame({'feature1': [0, 1, 1, 2, 3, 4, 15], 'target': [0, 0, 2, 3, 4, 5, 1]}) suite = TestSuite(tests=[TestShareOfOutRangeValues(column_name='feature1', left=0, right=10, lt=0.1)]) mapping = ColumnMapping(numerical_features=['feature1']) suite.run(current_data=test_dataset, reference_data=None, column_mapping=mapping) assert (not suite) suite = TestSuite(tests=[TestShareOfOutRangeValues(column_name='feature1', left=0, right=10, lt=0.5)]) suite.run(current_data=test_dataset, reference_data=None, column_mapping=mapping) assert suite reference_dataset = pd.DataFrame({'feature1': [0, 1, 1, 3, 4, 5, 6, 7], 'target': [0, 0, 0, 1, 0, 0, 1, 1], 'prediction': [0, 0, 1, 1, 0, 0, 1, 1]}) suite = TestSuite(tests=[TestShareOfOutRangeValues(column_name='feature1', lt=0.1)]) suite.run(current_data=test_dataset, reference_data=reference_dataset, column_mapping=ColumnMapping(prediction=None, numerical_features=['feature1'])) assert (not suite) suite = TestSuite(tests=[TestShareOfOutRangeValues(column_name='feature1', lte=0.5)]) suite.run(current_data=test_dataset, reference_data=reference_dataset, column_mapping=ColumnMapping(prediction=None, numerical_features=['feature1'])) assert suite assert suite.show() assert suite.json()
def numhess(geom, step_size=0.0001): coords = geom.coords cnum = len(coords) H = list() for i in range(cnum): print(f'Step {(i + 1)}/{cnum}') step = np.zeros_like(coords) step[i] += step_size pl_coords = (coords + step) geom.coords = pl_coords pl_forces = geom.forces min_coords = (coords - step) geom.coords = min_coords min_forces = geom.forces fd = ((- (pl_forces - min_forces)) / (2 * step_size)) H.append(fd) H = np.array(H) H = ((H + H.T) / 2) return H
class OptionSeriesArcdiagramSonificationDefaultspeechoptionsMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
.parametrize('wave, J, mode', [('db1', 1, 'zero'), ('db1', 3, 'zero'), ('db3', 1, 'symmetric'), ('db3', 2, 'reflect'), ('db2', 3, 'periodization'), ('db2', 3, 'periodic'), ('db4', 2, 'zero'), ('db3', 3, 'symmetric'), ('bior2.4', 2, 'periodization'), ('bior2.4', 2, 'periodization')]) def test_ok(wave, J, mode): x = torch.randn(5, 4, 64).to(dev) dwt = DWT1DForward(J=J, wave=wave, mode=mode).to(dev) iwt = DWT1DInverse(wave=wave, mode=mode).to(dev) (yl, yh) = dwt(x) x2 = iwt((yl, yh)) assert yl.is_contiguous() for j in range(J): assert yh[j].is_contiguous() assert x2.is_contiguous()
def test_set_auth_params(): set_auth_params(**auth_params) for func in FUNCTIONS: assert (func.get_auth_params()['app_key'] == app_key) assert (func.get_auth_params()['app_secret'] == app_secret) assert (func.get_auth_params()['oauth_token'] == oauth_token) assert (func.get_auth_params()['oauth_token_secret'] == oauth_token_secret)
def move(x, y, update_cursor=True, mutex=True): global cursor_x, cursor_y global print_lock if mutex: print_lock.acquire() print(('\x1b[%d;%dH' % (y, x)), end='') sys.stdout.flush() if update_cursor: cursor_x = x cursor_y = y if mutex: print_lock.release()
class OptionSeriesVectorDatalabels(Options): def align(self): return self._config_get('center') def align(self, text: str): self._config(text, js_type=False) def allowOverlap(self): return self._config_get(False) def allowOverlap(self, flag: bool): self._config(flag, js_type=False) def animation(self) -> 'OptionSeriesVectorDatalabelsAnimation': return self._config_sub_data('animation', OptionSeriesVectorDatalabelsAnimation) def backgroundColor(self): return self._config_get(None) def backgroundColor(self, text: str): self._config(text, js_type=False) def borderColor(self): return self._config_get(None) def borderColor(self, text: str): self._config(text, js_type=False) def borderRadius(self): return self._config_get(0) def borderRadius(self, num: float): self._config(num, js_type=False) def borderWidth(self): return self._config_get(0) def borderWidth(self, num: float): self._config(num, js_type=False) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def crop(self): return self._config_get(True) def crop(self, flag: bool): self._config(flag, js_type=False) def defer(self): return self._config_get(True) def defer(self, flag: bool): self._config(flag, js_type=False) def enabled(self): return self._config_get(False) def enabled(self, flag: bool): self._config(flag, js_type=False) def filter(self) -> 'OptionSeriesVectorDatalabelsFilter': return self._config_sub_data('filter', OptionSeriesVectorDatalabelsFilter) def format(self): return self._config_get('point.value') def format(self, text: str): self._config(text, js_type=False) def formatter(self): return self._config_get(None) def formatter(self, value: Any): self._config(value, js_type=False) def inside(self): return self._config_get(None) def inside(self, flag: bool): self._config(flag, js_type=False) def nullFormat(self): return self._config_get(None) def nullFormat(self, flag: bool): self._config(flag, js_type=False) def nullFormatter(self): return self._config_get(None) def nullFormatter(self, value: Any): self._config(value, js_type=False) def overflow(self): return self._config_get('justify') def overflow(self, text: str): self._config(text, js_type=False) def padding(self): return self._config_get(5) def padding(self, num: float): self._config(num, js_type=False) def position(self): return self._config_get('center') def position(self, text: str): self._config(text, js_type=False) def rotation(self): return self._config_get(0) def rotation(self, num: float): self._config(num, js_type=False) def shadow(self): return self._config_get(False) def shadow(self, flag: bool): self._config(flag, js_type=False) def shape(self): return self._config_get('square') def shape(self, text: str): self._config(text, js_type=False) def style(self): return self._config_get(None) def style(self, value: Any): self._config(value, js_type=False) def textPath(self) -> 'OptionSeriesVectorDatalabelsTextpath': return self._config_sub_data('textPath', OptionSeriesVectorDatalabelsTextpath) def useHTML(self): return self._config_get(False) def useHTML(self, flag: bool): self._config(flag, js_type=False) def verticalAlign(self): return self._config_get('bottom') def verticalAlign(self, text: str): self._config(text, js_type=False) def x(self): return self._config_get(0) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(0) def y(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(6) def zIndex(self, num: float): self._config(num, js_type=False)
class LZWDecode(PDFFilter): default = {'Predictor': 1, 'Columns': 1, 'Colors': 1, 'BitsPerComponent': 8, 'EarlyChange': 1} name = 'LZW' def __init__(self, params=None): PDFFilter.__init__(self, self.default) def decode(self, data): assert (self.getParams()['EarlyChange'] == 1) data = lzw.decompress(data) data = Predictor(p['Predictor'], p['Columns'], p['BitsPerComponent']).decode(data) return data def encode(self, data): assert (self.getParams()['EarlyChange'] == 1) assert (self.getParams()['Predictor'] == 1) return ''.join(lzw.compress(data))
class SameMappingDifferentNamespaces(AmbassadorTest): target: ServiceType def init(self): self.target = HTTP() def manifests(self) -> str: return (((namespace_manifest('same-mapping-1') + namespace_manifest('same-mapping-2')) + self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nmetadata:\n name: {self.target.path.k8s}\n namespace: same-mapping-1\nspec:\n ambassador_id: [{self.ambassador_id}]\n hostname: "*"\n prefix: /{self.name}-1/\n service: {self.target.path.fqdn}\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nmetadata:\n name: {self.target.path.k8s}\n namespace: same-mapping-2\nspec:\n ambassador_id: [{self.ambassador_id}]\n hostname: "*"\n prefix: /{self.name}-2/\n service: {self.target.path.fqdn}\n')) + super().manifests()) def queries(self): (yield Query(self.url((self.name + '-1/')))) (yield Query(self.url((self.name + '-2/'))))
def extractNoobchanXyz(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def get_extended_data(polynomial): assert is_power_of_two(len(polynomial)) extended_polynomial = (polynomial + ([0] * len(polynomial))) root_of_unity = get_root_of_unity(len(extended_polynomial)) return list_to_reverse_bit_order(fft(extended_polynomial, MODULUS, root_of_unity, False))
class Node(): def __init__(self, predecessors): assert (type(predecessors) is list), 'Predecessors must be a list, not {}'.format(type(predecessors)) self.predecessors = predecessors self.depends_on_context = False def execute_predecessors(self, trace, context): return [execute_op(p, trace=trace, context=context) for p in self.predecessors] def forward(self, *args, **kwargs): raise NotImplementedError((type(self) + ' does not implement forward()')) def follow(self, *args, **kwargs): raise NotImplementedError((type(self) + ' does not implement follow()')) def final(self, args, **kwargs): if all([(a == 'fin') for a in args]): return 'fin' return 'var' def __nodelabel__(self): return str(type(self)) def token_hint(self): return {} def postprocess_var(self, var_name): return False def postprocess(self, operands, value): pass def postprocess_order(self, other, **kwargs): return 0
def test_functional(mesh1, mesh2): c = Constant(1) val = assemble((c * dx(domain=mesh1))) cell_volume = mesh1.coordinates.function_space().finat_element.cell.volume() assert np.allclose(val, cell_volume) val = assemble((c * dx(domain=mesh2))) assert np.allclose(val, (cell_volume * (0.5 ** mesh1.topological_dimension()))) val = assemble(((c * dx(domain=mesh1)) + (c * dx(domain=mesh2)))) assert np.allclose(val, (cell_volume * (1 + (0.5 ** mesh1.topological_dimension()))))
class OptionSeriesVectorSonificationTracksPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
class WafExclusionsResponseAllOf(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'data': ([WafExclusionResponseData],), 'included': (IncludedWithWafExclusion,)} _property def discriminator(): return None attribute_map = {'data': 'data', 'included': 'included'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class ReplayBuffer(): def __init__(self, N): self.N = N self.buffer = None self.info_buffer = None def _init_buffer(self, trajectories): self.buffer = {} for k in trajectories.keys(): dtype = trajectories[k].dtype size = trajectories[k].size() b_size = ((self.N,) + size[2:]) self.buffer[k] = torch.zeros(*b_size, dtype=dtype) self.pos = 0 self.full = False def write(self, trajectories): rs = {} new_pos = None for k in trajectories.keys(): v = trajectories[k] size = v.size() b_size = (((size[0] * size[1]),) + size[2:]) v = v.reshape(*b_size) n = v.size()[0] overhead = (self.N - (self.pos + n)) if (new_pos is None): new_pos = (torch.arange(n) + self.pos) mask = new_pos.ge(self.N).float() nidx = ((torch.arange(n) + self.pos) - self.N) new_pos = ((new_pos * (1 - mask)) + (mask * nidx)).long() self.buffer[k][new_pos] = v self.pos = (self.pos + n) if (self.pos >= self.N): self.pos = (self.pos - self.N) self.full = True assert (self.pos < self.N) def size(self): if self.full: return self.N else: return self.pos def push(self, trajectories): trajectories = trajectories.trajectories max_length = trajectories.lengths.max().item() assert trajectories.lengths.eq(max_length).all() if (self.buffer is None): self._init_buffer(trajectories) self.write(trajectories) def sample(self, n=1): limit = self.pos if self.full: limit = self.N transitions = torch.randint(0, high=limit, size=(n,)) d = {k: self.buffer[k][transitions] for k in self.buffer} return DictTensor(d)
def get_words_in_list(list_id, limit=None, last_word_token=None, audio_file_key_check=False): try: query_response = query_dynamodb(list_id, limit=limit, last_word_token=last_word_token, audio_file_key_check=audio_file_key_check) except Exception as e: print(f'Error: DyanmoDB query for word list failed.') print(e) return {f'error_message': 'Failed to query DynamoDB. Error: {e}'} word_list = format_word_list(query_response) return word_list
class LoggingCommonResponseAllOf(ModelNormal): allowed_values = {('placement',): {'None': None, 'NONE': 'none', 'WAF_DEBUG': 'waf_debug', 'NULL': 'null'}} validations = {} _property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): return {'name': (str,), 'placement': (str, none_type), 'response_condition': (str, none_type), 'format': (str,)} _property def discriminator(): return None attribute_map = {'name': 'name', 'placement': 'placement', 'response_condition': 'response_condition', 'format': 'format'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class MyClass(): def __init__(self, a, b): self.a = a self.b = b def compute(self, n): a = self.a b = self.b if ts.is_transpiled: result = ts.use_block('block0') else: result = np.zeros_like(a) for _ in range(n): result += ((a ** 2) + (b ** 3)) a = result if ts.is_transpiled: result = ts.use_block('block1') else: result = np.zeros_like(a) for _ in range(n): result += ((a ** 2) + (b ** 3)) return result
class TestDeleteFailsWhenDirectoryCannotBeDeleted(): def setup_class(cls): cls.runner = CliRunner() cls.agent_name = 'myagent' cls.cwd = os.getcwd() cls.t = tempfile.mkdtemp() dir_path = Path('packages') tmp_dir = (cls.t / dir_path) src_dir = (cls.cwd / Path(ROOT_DIR, dir_path)) shutil.copytree(str(src_dir), str(tmp_dir)) os.chdir(cls.t) result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'init', '--author', AUTHOR]) assert (result.exit_code == 0) result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'create', '--local', cls.agent_name], standalone_mode=False) assert (result.exit_code == 0) with unittest.mock.patch.object(shutil, 'rmtree', side_effect=OSError): cls.result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, 'delete', cls.agent_name], standalone_mode=False) def test_exit_code_equal_to_1(self): assert (self.result.exit_code == 1) def test_log_error_message(self): s = 'An error occurred while deleting the agent directory. Aborting...' assert (self.result.exception.message == s) def teardown_class(cls): os.chdir(cls.cwd) try: shutil.rmtree(cls.t) except (OSError, IOError): pass
def get_submission_ids_for_periods(fiscal_year: int, fiscal_quarter: Optional[int], fiscal_month: Optional[int]) -> List[int]: sql = f''' select submission_id from submission_attributes where (toptier_code, reporting_fiscal_year, reporting_fiscal_period) in ( select distinct on (toptier_code) toptier_code, reporting_fiscal_year, reporting_fiscal_period from submission_attributes where reporting_fiscal_year = %(fiscal_year)s and ( (reporting_fiscal_quarter <= %(fiscal_quarter)s and quarter_format_flag is true) or (reporting_fiscal_period <= %(fiscal_month)s and quarter_format_flag is false) ) order by toptier_code, reporting_fiscal_period desc ) and ( (reporting_fiscal_quarter = %(fiscal_quarter)s and quarter_format_flag is true) or (reporting_fiscal_period = %(fiscal_month)s and quarter_format_flag is false) ) ''' with connection.cursor() as cursor: cursor.execute(sql, {'fiscal_year': fiscal_year, 'fiscal_quarter': (fiscal_quarter or (- 1)), 'fiscal_month': (fiscal_month or (- 1))}) return [r[0] for r in cursor.fetchall()]
class FrontendTests(unittest.TestCase): maxDiff = None def test_simple(self): ast = parser.parse('\n#version 1\n\nenum ofp_port_config {\n OFPPC_PORT_DOWN = 0x1,\n OFPPC_NO_STP = 0x2,\n OFPPC_NO_RECV = 0x4,\n OFPPC_NO_RECV_STP = 0x8,\n OFPPC_NO_FLOOD = 0x10,\n OFPPC_NO_FWD = 0x20,\n OFPPC_NO_PACKET_IN = 0x40,\n};\n\n#version 2\n\nstruct of_echo_reply(align=8) {\n uint8_t version;\n uint8_t type == 3;\n uint16_t length;\n uint32_t xid;\n of_octets_t data;\n};\n\nenum ofp_queue_op_failed_code(wire_type=uint32, bitmask=False, complete=True) {\n OFPQOFC_BAD_PORT = 0,\n OFPQOFC_BAD_QUEUE = 1,\n OFPQOFC_EPERM = 2,\n};\n\nstruct of_packet_queue {\n uint32_t queue_id;\n uint16_t len;\n pad(2);\n list(of_queue_prop_t) properties;\n};\n') expected_ast = [['metadata', 'version', '1'], ['enum', 'ofp_port_config', [], [['OFPPC_PORT_DOWN', [], 1], ['OFPPC_NO_STP', [], 2], ['OFPPC_NO_RECV', [], 4], ['OFPPC_NO_RECV_STP', [], 8], ['OFPPC_NO_FLOOD', [], 16], ['OFPPC_NO_FWD', [], 32], ['OFPPC_NO_PACKET_IN', [], 64]]], ['metadata', 'version', '2'], ['struct', 'of_echo_reply', [['align', '8']], None, [['data', ['scalar', 'uint8_t'], 'version'], ['type', ['scalar', 'uint8_t'], 'type', 3], ['data', ['scalar', 'uint16_t'], 'length'], ['data', ['scalar', 'uint32_t'], 'xid'], ['data', ['scalar', 'of_octets_t'], 'data']]], ['enum', 'ofp_queue_op_failed_code', [['wire_type', 'uint32'], ['bitmask', 'False'], ['complete', 'True']], [['OFPQOFC_BAD_PORT', [], 0], ['OFPQOFC_BAD_QUEUE', [], 1], ['OFPQOFC_EPERM', [], 2]]], ['struct', 'of_packet_queue', [], None, [['data', ['scalar', 'uint32_t'], 'queue_id'], ['data', ['scalar', 'uint16_t'], 'len'], ['pad', 2], ['data', ['list', 'list(of_queue_prop_t)'], 'properties']]]] self.assertEqual(expected_ast, ast) ofinput = frontend.create_ofinput('standard-1.0', ast) self.assertEqual(set([1, 2]), ofinput.wire_versions) expected_classes = [OFClass(name='of_echo_reply', superclass=None, members=[OFDataMember('version', 'uint8_t'), OFTypeMember('type', 'uint8_t', 3), OFLengthMember('length', 'uint16_t'), OFDataMember('xid', 'uint32_t'), OFDataMember('data', 'of_octets_t')], virtual=False, params={'align': '8'}), OFClass(name='of_packet_queue', superclass=None, members=[OFDataMember('queue_id', 'uint32_t'), OFLengthMember('len', 'uint16_t'), OFPadMember(2), OFDataMember('properties', 'list(of_queue_prop_t)')], virtual=False, params={})] self.assertEqual(expected_classes, ofinput.classes) expected_enums = [OFEnum(name='ofp_port_config', entries=[OFEnumEntry('OFPPC_PORT_DOWN', 1, {}), OFEnumEntry('OFPPC_NO_STP', 2, {}), OFEnumEntry('OFPPC_NO_RECV', 4, {}), OFEnumEntry('OFPPC_NO_RECV_STP', 8, {}), OFEnumEntry('OFPPC_NO_FLOOD', 16, {}), OFEnumEntry('OFPPC_NO_FWD', 32, {}), OFEnumEntry('OFPPC_NO_PACKET_IN', 64, {})], params={}), OFEnum(name='ofp_queue_op_failed_code', entries=[OFEnumEntry('OFPQOFC_BAD_PORT', 0, {}), OFEnumEntry('OFPQOFC_BAD_QUEUE', 1, {}), OFEnumEntry('OFPQOFC_EPERM', 2, {})], params={'wire_type': 'uint32', 'bitmask': 'False', 'complete': 'True'})] self.assertEqual(expected_enums, ofinput.enums) def test_inheritance(self): ast = parser.parse('\n#version 1\n\nstruct of_queue_prop {\n uint16_t type == ?;\n uint16_t len;\n pad(4);\n};\n\nstruct of_queue_prop_min_rate : of_queue_prop {\n uint16_t type == 1;\n uint16_t len;\n pad(4);\n uint16_t rate;\n pad(6);\n};\n') expected_ast = [['metadata', 'version', '1'], ['struct', 'of_queue_prop', [], None, [['discriminator', ['scalar', 'uint16_t'], 'type'], ['data', ['scalar', 'uint16_t'], 'len'], ['pad', 4]]], ['struct', 'of_queue_prop_min_rate', [], 'of_queue_prop', [['type', ['scalar', 'uint16_t'], 'type', 1], ['data', ['scalar', 'uint16_t'], 'len'], ['pad', 4], ['data', ['scalar', 'uint16_t'], 'rate'], ['pad', 6]]]] self.assertEqual(expected_ast, ast) ofinput = frontend.create_ofinput('standard-1.0', ast) expected_classes = [OFClass(name='of_queue_prop', superclass=None, members=[OFDiscriminatorMember('type', 'uint16_t'), OFLengthMember('len', 'uint16_t'), OFPadMember(4)], virtual=True, params={}), OFClass(name='of_queue_prop_min_rate', superclass='of_queue_prop', members=[OFTypeMember('type', 'uint16_t', 1), OFLengthMember('len', 'uint16_t'), OFPadMember(4), OFDataMember('rate', 'uint16_t'), OFPadMember(6)], virtual=False, params={})] self.assertEqual(expected_classes, ofinput.classes) def test_field_length(self): ast = parser.parse('\n#version 1\n\nstruct of_test_entry {\n uint32_t x;\n};\n\nstruct of_test {\n uint16_t list_len == length(list);\n list(of_test_entry_t) list;\n};\n') expected_ast = [['metadata', 'version', '1'], ['struct', 'of_test_entry', [], None, [['data', ['scalar', 'uint32_t'], 'x']]], ['struct', 'of_test', [], None, [['field_length', ['scalar', 'uint16_t'], 'list_len', 'list'], ['data', ['list', 'list(of_test_entry_t)'], 'list']]]] self.assertEqual(expected_ast, ast) ofinput = frontend.create_ofinput('standard-1.0', ast) expected_classes = [OFClass(name='of_test_entry', superclass=None, virtual=False, params={}, members=[OFDataMember('x', 'uint32_t')]), OFClass(name='of_test', superclass=None, virtual=False, params={}, members=[OFFieldLengthMember('list_len', 'uint16_t', 'list'), OFDataMember('list', 'list(of_test_entry_t)')])] self.assertEqual(expected_classes, ofinput.classes)
class TBBasicTurnHandler(DefaultScript): rules = COMBAT_RULES def at_script_creation(self): self.key = 'Combat Turn Handler' self.interval = 5 self.persistent = True self.db.fighters = [] for thing in self.obj.contents: if thing.db.hp: self.db.fighters.append(thing) for fighter in self.db.fighters: self.initialize_for_combat(fighter) self.obj.db.combat_turnhandler = self ordered_by_roll = sorted(self.db.fighters, key=self.rules.roll_init, reverse=True) self.db.fighters = ordered_by_roll self.obj.msg_contents(('Turn order is: %s ' % ', '.join((obj.key for obj in self.db.fighters)))) self.start_turn(self.db.fighters[0]) self.db.turn = 0 self.db.timer = TURN_TIMEOUT def at_stop(self): for fighter in self.db.fighters: if fighter: self.rules.combat_cleanup(fighter) self.obj.db.combat_turnhandler = None def at_repeat(self): currentchar = self.db.fighters[self.db.turn] self.db.timer -= self.interval if (self.db.timer <= 0): self.obj.msg_contents(("%s's turn timed out!" % currentchar)) self.rules.spend_action(currentchar, 'all', action_name='disengage') return elif ((self.db.timer <= 10) and (not self.db.timeout_warning_given)): currentchar.msg('WARNING: About to time out!') self.db.timeout_warning_given = True def initialize_for_combat(self, character): self.rules.combat_cleanup(character) character.db.combat_actionsleft = 0 character.db.combat_turnhandler = self character.db.combat_lastaction = 'null' def start_turn(self, character): character.db.combat_actionsleft = ACTIONS_PER_TURN character.msg(("|wIt's your turn! You have %i HP remaining.|n" % character.db.hp)) def next_turn(self): disengage_check = True for fighter in self.db.fighters: if (fighter.db.combat_lastaction != 'disengage'): disengage_check = False if disengage_check: self.obj.msg_contents('All fighters have disengaged! Combat is over!') self.stop() self.delete() return defeated_characters = 0 for fighter in self.db.fighters: if (fighter.db.HP == 0): defeated_characters += 1 if (defeated_characters == (len(self.db.fighters) - 1)): for fighter in self.db.fighters: if (fighter.db.HP != 0): LastStanding = fighter self.obj.msg_contents(('Only %s remains! Combat is over!' % LastStanding)) self.stop() self.delete() return currentchar = self.db.fighters[self.db.turn] self.db.turn += 1 if (self.db.turn > (len(self.db.fighters) - 1)): self.db.turn = 0 newchar = self.db.fighters[self.db.turn] self.db.timer = (TURN_TIMEOUT + self.time_until_next_repeat()) self.db.timeout_warning_given = False self.obj.msg_contents(("%s's turn ends - %s's turn begins!" % (currentchar, newchar))) self.start_turn(newchar) def turn_end_check(self, character): if (not character.db.combat_actionsleft): self.next_turn() return def join_fight(self, character): self.db.fighters.insert(self.db.turn, character) self.db.turn += 1 self.initialize_for_combat(character)
_only_with_numba def test_equivalent_sources_cartesian_parallel(coordinates, data): eqs_serial = EquivalentSources(parallel=False) eqs_serial.fit(coordinates, data) eqs_parallel = EquivalentSources(parallel=True) eqs_parallel.fit(coordinates, data) grid_serial = eqs_serial.grid(coordinates) grid_parallel = eqs_parallel.grid(coordinates) npt.assert_allclose(grid_serial.scalars, grid_parallel.scalars, rtol=1e-07)
class TestOpticalCharacterRecognitionGraphicMatcher(): .parametrize('ocr_text,figure_label,should_match', [('Figure 1', 'Figure 1', True), ('Figure 1', 'Figure 2', False), ('Fig 1', 'Figure 1', True), ('F 1', 'Figure 1', False), ('Fug 1', 'Figure 1', False), ('Other\nFigure 1\nMore', 'Figure 1', True)]) def test_should_match_based_on_figure_label(self, ocr_model_mock: MagicMock, ocr_text: str, figure_label: str, should_match: bool, tmp_path: Path): local_graphic_path = (tmp_path / 'image.png') PIL.Image.new('RGB', (10, 10), (0, 1, 2)).save(local_graphic_path) ocr_model_mock.predict_single.return_value.get_text.return_value = ocr_text semantic_graphic_1 = SemanticGraphic(layout_graphic=LayoutGraphic(coordinates=FAR_AWAY_COORDINATES_1, local_file_path=str(local_graphic_path))) candidate_semantic_content_1 = SemanticFigure([SemanticLabel(layout_block=LayoutBlock.for_text(figure_label))]) result = OpticalCharacterRecognitionGraphicMatcher(ocr_model=ocr_model_mock).get_graphic_matches(semantic_graphic_list=[semantic_graphic_1], candidate_semantic_content_list=[candidate_semantic_content_1]) LOGGER.debug('result: %r', result) if should_match: assert (len(result) == 1) first_match = result.graphic_matches[0] assert (first_match.semantic_graphic == semantic_graphic_1) else: assert (not result.graphic_matches) assert (result.unmatched_graphics == [semantic_graphic_1]) def test_should_ignore_layout_graphic_without_local_path(self, ocr_model_mock: MagicMock): ocr_model_mock.predict_single.return_value.get_text.side_effect = RuntimeError semantic_graphic_1 = SemanticGraphic(layout_graphic=LayoutGraphic(coordinates=FAR_AWAY_COORDINATES_1, local_file_path=None)) candidate_semantic_content_1 = SemanticFigure([SemanticLabel(layout_block=LayoutBlock.for_text('Figure 1'))]) result = OpticalCharacterRecognitionGraphicMatcher(ocr_model=ocr_model_mock).get_graphic_matches(semantic_graphic_list=[semantic_graphic_1], candidate_semantic_content_list=[candidate_semantic_content_1]) LOGGER.debug('result: %r', result) assert (not result.graphic_matches) assert (result.unmatched_graphics == [semantic_graphic_1])
class GraphPrinterTest(unittest.TestCase): def test_print_tree(self) -> None: bar = {'blah': [2, 3, {'abc': (6, 7, (5, 5, 6))}]} d: Dict[(Any, Any)] = {'foo': 2, 'bar1': bar, 'bar2': bar} d['self'] = d observed = print_graph([d]) expected = '\ndigraph "graph" {\n N0[label=dict];\n N10[label=5];\n N1[label=2];\n N2[label=dict];\n N3[label=list];\n N4[label=3];\n N5[label=dict];\n N6[label=tuple];\n N7[label=6];\n N8[label=7];\n N9[label=tuple];\n N0 -> N0[label=self];\n N0 -> N1[label=foo];\n N0 -> N2[label=bar1];\n N0 -> N2[label=bar2];\n N2 -> N3[label=blah];\n N3 -> N1[label=0];\n N3 -> N4[label=1];\n N3 -> N5[label=2];\n N5 -> N6[label=abc];\n N6 -> N7[label=0];\n N6 -> N8[label=1];\n N6 -> N9[label=2];\n N9 -> N10[label=0];\n N9 -> N10[label=1];\n N9 -> N7[label=2];\n}\n' self.assertEqual(observed.strip(), expected.strip()) def test_builder(self) -> None: self.maxDiff = None db = DotBuilder('my_graph') db.with_comment('comment') db.start_subgraph('my_subgraph', True) db.with_label('graph_label') db.with_node('A1', 'A') db.with_node('A2', 'A') db.with_edge('A1', 'A2', 'edge_label') db.end_subgraph() observed = str(db) expected = '\ndigraph my_graph {\n // comment\n subgraph cluster_my_subgraph {\n label=graph_label\n A1[label=A];\n A2[label=A];\n A1 -> A2[label=edge_label];\n }\n}\n ' self.assertEqual(observed.strip(), expected.strip())
class OptionPlotoptionsGaugeSonificationDefaultinstrumentoptionsMappingPitch(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get('y') def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get('c6') def max(self, text: str): self._config(text, js_type=False) def min(self): return self._config_get('c2') def min(self, text: str): self._config(text, js_type=False) def scale(self): return self._config_get(None) def scale(self, value: Any): self._config(value, js_type=False) def within(self): return self._config_get('yAxis') def within(self, text: str): self._config(text, js_type=False)
class OptionSeriesVectorSonificationDefaultspeechoptionsMappingRate(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def is_notebook(): try: from IPython import get_ipython except Exception: return False if ('COLAB_GPU' in os.environ): return True shell = get_ipython().__class__.__name__ if (shell == 'ZMQInteractiveShell'): return True elif (shell == 'TerminalInteractiveShell'): return True
def test_constant_jacobian_lvs(): mesh = UnitSquareMesh(2, 2) V = FunctionSpace(mesh, 'CG', 1) u = TrialFunction(V) v = TestFunction(V) q = Function(V) q.assign(1) a = ((q * inner(u, v)) * dx) f = Function(V) f.assign(1) L = (inner(f, v) * dx) out = Function(V) lvp = LinearVariationalProblem(a, L, out, constant_jacobian=False) lvs = LinearVariationalSolver(lvp) lvs.solve() assert (norm(assemble((out - f))) < 1e-07) q.assign(5) lvs.solve() assert (norm(assemble(((out * 5) - f))) < 2e-07) q.assign(1) lvp = LinearVariationalProblem(a, L, out, constant_jacobian=True) lvs = LinearVariationalSolver(lvp) lvs.solve() assert (norm(assemble((out - f))) < 1e-07) q.assign(5) lvs.solve() assert (not (norm(assemble(((out * 5) - f))) < 2e-07))
def extractMantoutranslationsWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('President Daddy Super Awesome', 'President Daddy Super Awesome', 'translated'), ('PDSA', 'President Daddy Super Awesome', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_matcher_remove(matcher, nlp): text = 'This is a test case.' pattern = [{'ORTH': 'test'}, {'OP': '?'}] assert (len(matcher) == 0) matcher.add('Rule', [pattern]) assert ('Rule' in matcher) results1 = matcher(nlp(text)) assert (len(results1) == 1) matcher.remove('Rule') results2 = matcher(nlp(text)) assert (len(results2) == 0) with pytest.raises(ValueError): matcher.remove('Rule')
class WavLMLoss(torch.nn.Module): def __init__(self, model, wd, model_sr, slm_sr=16000): super(WavLMLoss, self).__init__() self.wavlm = AutoModel.from_pretrained(model) self.wd = wd self.resample = torchaudio.transforms.Resample(model_sr, slm_sr) self.wavlm.eval() for param in self.wavlm.parameters(): param.requires_grad = False def forward(self, wav, y_rec): with torch.no_grad(): wav_16 = self.resample(wav) wav_embeddings = self.wavlm(input_values=wav_16, output_hidden_states=True).hidden_states y_rec_16 = self.resample(y_rec) y_rec_embeddings = self.wavlm(input_values=y_rec_16.squeeze(), output_hidden_states=True).hidden_states floss = 0 for (er, eg) in zip(wav_embeddings, y_rec_embeddings): floss += torch.mean(torch.abs((er - eg))) return floss.mean() def generator(self, y_rec): y_rec_16 = self.resample(y_rec) y_rec_embeddings = self.wavlm(input_values=y_rec_16, output_hidden_states=True).hidden_states y_rec_embeddings = torch.stack(y_rec_embeddings, dim=1).transpose((- 1), (- 2)).flatten(start_dim=1, end_dim=2) y_df_hat_g = self.wd(y_rec_embeddings) loss_gen = torch.mean(((1 - y_df_hat_g) ** 2)) return loss_gen def discriminator(self, wav, y_rec): with torch.no_grad(): wav_16 = self.resample(wav) wav_embeddings = self.wavlm(input_values=wav_16, output_hidden_states=True).hidden_states y_rec_16 = self.resample(y_rec) y_rec_embeddings = self.wavlm(input_values=y_rec_16, output_hidden_states=True).hidden_states y_embeddings = torch.stack(wav_embeddings, dim=1).transpose((- 1), (- 2)).flatten(start_dim=1, end_dim=2) y_rec_embeddings = torch.stack(y_rec_embeddings, dim=1).transpose((- 1), (- 2)).flatten(start_dim=1, end_dim=2) y_d_rs = self.wd(y_embeddings) y_d_gs = self.wd(y_rec_embeddings) (y_df_hat_r, y_df_hat_g) = (y_d_rs, y_d_gs) r_loss = torch.mean(((1 - y_df_hat_r) ** 2)) g_loss = torch.mean((y_df_hat_g ** 2)) loss_disc_f = (r_loss + g_loss) return loss_disc_f.mean() def discriminator_forward(self, wav): with torch.no_grad(): wav_16 = self.resample(wav) wav_embeddings = self.wavlm(input_values=wav_16, output_hidden_states=True).hidden_states y_embeddings = torch.stack(wav_embeddings, dim=1).transpose((- 1), (- 2)).flatten(start_dim=1, end_dim=2) y_d_rs = self.wd(y_embeddings) return y_d_rs
.asyncio .workspace_host class TestGetUserField(): async def test_unauthorized(self, unauthorized_api_assertions: HTTPXResponseAssertion, test_client_api: test_data: TestData): user_field = test_data['user_fields']['given_name'] response = (await test_client_api.get(f'/user-fields/{user_field.id}')) unauthorized_api_assertions(response) .authenticated_admin async def test_not_existing(self, test_client_api: not_existing_uuid: uuid.UUID): response = (await test_client_api.get(f'/user-fields/{not_existing_uuid}')) assert (response.status_code == status.HTTP_404_NOT_FOUND) .authenticated_admin async def test_valid(self, test_client_api: test_data: TestData): user_field = test_data['user_fields']['given_name'] response = (await test_client_api.get(f'/user-fields/{user_field.id}')) assert (response.status_code == status.HTTP_200_OK)
def DoSpecialize(stmt_cursor, conds): assert conds, 'Must add at least one condition' s = stmt_cursor._node def is_valid_condition(e): assert isinstance(e, LoopIR.BinOp) if (e.op in ['and', 'or']): return (is_valid_condition(e.lhs) and is_valid_condition(e.rhs)) elif (e.op in ['==', '!=', '<', '<=', '>', '>=']): return (e.lhs.type.is_indexable() and e.rhs.type.is_indexable()) else: return False else_br = Alpha_Rename([s]).result() for cond in reversed(conds): if (not is_valid_condition(cond)): raise SchedulingError('Invalid specialization condition. ') then_br = Alpha_Rename([s]).result() else_br = [LoopIR.If(cond, then_br, else_br, None, s.srcinfo)] (ir, fwd) = stmt_cursor._replace(else_br) return (ir, fwd)
class WafTagAttributes(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): return {'name': (str,)} _property def discriminator(): return None attribute_map = {'name': 'name'} read_only_vars = {'name'} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class PolicyProcessor(AbstractGamestateDataProcessor): ID = 'policy' DEPENDENCIES = [CountryProcessor.ID, RulerEventProcessor.ID] def extract_data_from_gamestate(self, dependencies): countries_dict = dependencies[CountryProcessor.ID] rulers_dict = dependencies[RulerEventProcessor.ID] for (country_id, country_model) in countries_dict.items(): current_stance_per_policy = self._get_current_policies(country_id) previous_policy_by_name = self._load_previous_policies(country_model) for (policy_name, (current_selected, date)) in current_stance_per_policy.items(): policy_date_days = (datamodel.date_to_days(date) if date else self._basic_info.date_in_days) add_new_policy = False event_type = None event_description = None if (policy_name not in previous_policy_by_name): add_new_policy = True event_type = datamodel.HistoricalEventType.new_policy event_description = f'{policy_name}|{current_selected}' else: previous_policy = previous_policy_by_name[policy_name] previous_selected = previous_policy.selected.text if (previous_selected != current_selected): add_new_policy = True previous_policy.is_active = False self._session.add(previous_policy) event_type = datamodel.HistoricalEventType.changed_policy event_description = f'{policy_name}|{previous_selected}|{current_selected}' if add_new_policy: self._session.add(datamodel.Policy(country_model=country_model, policy_date=policy_date_days, is_active=True, policy_name=self._get_or_add_shared_description(policy_name), selected=self._get_or_add_shared_description(current_selected))) if (event_type and event_description): self._session.add(datamodel.HistoricalEvent(event_type=event_type, country=country_model, leader=rulers_dict.get(country_id), start_date_days=policy_date_days, db_description=self._get_or_add_shared_description(event_description))) def _get_current_policies(self, country_id) -> dict[(str, (str, str))]: country_gs_dict = self._gamestate_dict['country'][country_id] current_policies = country_gs_dict.get('active_policies') if (not isinstance(current_policies, list)): current_policies = [] current_stance_per_policy = {p.get('policy'): (p.get('selected'), p.get('date')) for p in current_policies} return current_stance_per_policy def _load_previous_policies(self, country_model): previous_policy_by_name: dict[(str, datamodel.Policy)] = {p.policy_name.text: p for p in self._session.query(datamodel.Policy).filter_by(country_model=country_model, is_active=True).all()} return previous_policy_by_name
def main(page: ft.Page): def on_scroll(e: ft.OnScrollEvent): if (e.pixels >= (e.max_scroll_extent - 100)): if sem.acquire(blocking=False): try: for i in range(0, 10): cl.controls.append(ft.Text(f'Text line {s.i}', key=str(s.i))) s.i += 1 cl.update() finally: sem.release() cl = ft.Column(spacing=10, height=200, width=200, scroll=ft.ScrollMode.ALWAYS, on_scroll_interval=0, on_scroll=on_scroll) for i in range(0, 50): cl.controls.append(ft.Text(f'Text line {s.i}', key=str(s.i))) s.i += 1 page.add(ft.Container(cl, border=ft.border.all(1)))
def get_max_level_super_rare(save_stats: dict[(str, Any)], cat_id: int) -> int: user_rank = helper.calculate_user_rank(save_stats) cleared_eoc_2 = main_story.has_cleared_chapter(save_stats, 1) acient_curse_clear = uncanny.is_ancient_curse_clear(save_stats) crazed = is_crazed(cat_id) catseyes = catseyes_unlocked(save_stats) if (not cleared_eoc_2): return 10 if (crazed and (user_rank < 3600)): return 20 if ((not crazed) and (user_rank < 1000)): return 20 if (crazed and (user_rank < 3650)): return 25 if ((not crazed) and (user_rank < 1300)): return 25 if (not catseyes): return 30 if (not acient_curse_clear): return 40 return 50
def test_runs_a_known_pipeline(unittest_pipeline): cfg = config.Config() cfg.add(config.Scope.benchmark, 'system', 'race.id', '28a032d1-0b03-4579-ad2a-c65316f126e9') cfg.add(config.Scope.benchmark, 'race', 'pipeline', 'unit-test-pipeline') cfg.add(config.Scope.benchmark, 'mechanic', 'distribution.version', '') racecontrol.run(cfg) unittest_pipeline.target.assert_called_once_with(cfg)
class GridPath(object): def __init__(self, nrows, ncols, goal): self.map = GridMap(nrows, ncols) self.goal = goal self._path_cache = {} def get_next(self, coord): if (not (coord in self._path_cache)): self._compute_path(coord) if (coord in self._path_cache): return self._path_cache[coord] else: return None def set_blocked(self, coord, blocked=True): self.map.set_blocked(coord, blocked) self._path_cache = {} def _compute_path(self, coord): pf = PathFinder(self.map.successors, self.map.move_cost, self.map.move_cost) path_list = list(pf.compute_path(coord, self.goal)) for (i, path_coord) in enumerate(path_list): next_i = (i if (i == (len(path_list) - 1)) else (i + 1)) self._path_cache[path_coord] = path_list[next_i]
def test___setitem__checks_the_value_ranges_1(): wh = WorkingHours() with pytest.raises(ValueError) as cm: wh['sun'] = [[(- 10), 100]] assert (str(cm.value) == 'WorkingHours.working_hours value should be a list of lists of two integers between and the range of integers should be 0-1440, not [[-10, 100]]')
def test_reverting_arbitrage(trace_classifier: TraceClassifier): block = load_test_block() classified_traces = trace_classifier.classify(block.traces) swaps = get_swaps(classified_traces) assert (len(swaps) == 38) arbitrages = get_arbitrages(list(swaps)) assert (len(arbitrages) == 5) reverting_arbitrage = [arb for arb in arbitrages if (arb.transaction_hash == '0x23a4dc7044666d3d4cc2d394a8017fc9d6b87018c20390d35266cea1af783e8a')][0] assert (reverting_arbitrage.error == 'Reverted')
_touched_chat def cmd_unsub(bot, update, args, chat=None): if (len(args) < 1): bot.reply(update, 'Use /unsub username1 username2 username3 ...') return tw_usernames = args not_found = [] successfully_unsubscribed = [] for tw_username in tw_usernames: tw_user = bot.get_tw_user(tw_username) if ((tw_user is None) or (Subscription.select().where((Subscription.tw_user == tw_user), (Subscription.tg_chat == chat)).count() == 0)): not_found.append(tw_username) continue Subscription.delete().where((Subscription.tw_user == tw_user), (Subscription.tg_chat == chat)).execute() successfully_unsubscribed.append(tw_user.full_name) reply = '' if (len(not_found) is not 0): reply += "I didn't find any subscription to {}\n\n".format(', '.join(not_found)) if (len(successfully_unsubscribed) is not 0): reply += 'You are no longer subscribed to {}'.format(', '.join(successfully_unsubscribed)) bot.reply(update, reply)
class ComboBox(BaseDropdown): CSS = '\n\n .flx-ComboBox {\n }\n\n .flx-ComboBox > ul {\n list-style-type: none;\n box-sizing: border-box;\n border: 1px solid #333;\n border-radius: 3px;\n margin: 0;\n padding: 2px;\n position: fixed; /* because all our widgets are overflow:hidden */\n background: white;\n z-index: 9999;\n display: none;\n }\n .flx-ComboBox.expanded > ul {\n display: block;\n max-height: 220px;\n overflow-y: auto;\n }\n\n .flx-ComboBox.expanded > ul > li:hover {\n background: rgba(0, 128, 255, 0.2);\n }\n .flx-ComboBox.expanded > ul > li.highlighted-true {\n box-shadow: inset 0 0 3px 1px rgba(0, 0, 255, 0.4);\n }\n ' text = event.StringProp('', settable=True, doc='\n The text displayed on the widget. This property is set\n when an item is selected from the dropdown menu. When editable,\n the ``text`` is also set when the text is edited by the user.\n This property is settable programatically regardless of the\n value of ``editable``.\n ') selected_index = event.IntProp((- 1), settable=True, doc='\n The currently selected item index. Can be -1 if no item has\n been selected or when the text was changed manually (if editable).\n Can also be programatically set.\n ') selected_key = event.StringProp('', settable=True, doc="\n The currently selected item key. Can be '' if no item has\n been selected or when the text was changed manually (if editable).\n Can also be programatically set.\n ") placeholder_text = event.StringProp('', settable=True, doc='\n The placeholder text to display in editable mode.\n ') editable = event.BoolProp(False, settable=True, doc="\n Whether the combobox's text is editable.\n ") options = event.TupleProp((), settable=True, doc='\n A list of tuples (key, text) representing the options. Both\n keys and texts are converted to strings if they are not already.\n For items that are given as a string, the key and text are the same.\n If a dict is given, it is transformed to key-text pairs.\n ') _highlighted = app.LocalProperty((- 1), settable=True, doc='\n The index of the currently highlighted item.\n ') def set_options(self, options): if isinstance(options, dict): keys = options.keys() keys = sorted(keys) options = [(k, options[k]) for k in keys] options2 = [] for opt in options: if isinstance(opt, (tuple, list)): opt = (str(opt[0]), str(opt[1])) else: opt = (str(opt), str(opt)) options2.append(opt) self._mutate_options(tuple(options2)) keys = [key_text[0] for key_text in self.options] if (self.selected_key and (self.selected_key in keys)): key = self.selected_key self.set_selected_key('') self.set_selected_key(key) elif (0 <= self.selected_index < len(self.options)): index = self.selected_index self.set_selected_index((- 1)) self.set_selected_index(index) elif self.selected_key: self.set_selected_key('') else: pass def _deselect(self): self._mutate('selected_index', (- 1)) self._mutate('selected_key', '') if (not self.editable): self.set_text('') def update_selected_index(self, text): for (index, option) in enumerate(self.options): if (option[1] == text): self._mutate('selected_index', index) self._mutate('selected_key', option[0]) return self._deselect() def set_selected_index(self, index): if (index == self.selected_index): return elif (0 <= index < len(self.options)): (key, text) = self.options[index] self._mutate('selected_index', index) self._mutate('selected_key', key) self.set_text(text) else: self._deselect() def set_selected_key(self, key): if (key == self.selected_key): return elif key: for (index, option) in enumerate(self.options): if (option[0] == key): self._mutate('selected_index', index) self._mutate('selected_key', key) self.set_text(option[1]) return self._deselect() def user_selected(self, index): options = self.options if ((index >= 0) and (index < len(options))): (key, text) = options[index] self.set_selected_index(index) self.set_selected_key(key) self.set_text(text) return dict(index=index, key=key, text=text) def _create_dom(self): node = super()._create_dom() node.onkeydown = self._key_down return node def _render_dom(self): options = self.options option_nodes = [] strud = [] for i in range(len(options)): (key, text) = options[i] clsname = ('highlighted-true' if (self._highlighted == i) else '') li = create_element('li', dict(index=i, className=clsname), (text if len(text.strip()) else '\xa0')) strud += [(text + '\xa0'), create_element('span', {'class': 'flx-dd-space'}), create_element('br')] option_nodes.append(li) nodes = super()._render_dom() nodes[1].props.placeholder = self.placeholder_text nodes[(- 1)].children = strud nodes.append(create_element('ul', dict(onmousedown=self._ul_click), option_nodes)) return nodes def __track_editable(self): if self.editable: self.node.classList.remove('editable-false') self.node.classList.add('editable-true') else: self.node.classList.add('editable-false') self.node.classList.remove('editable-true') def _ul_click(self, e): if hasattr(e.target, 'index'): self._select_from_ul(e.target.index) def _select_from_ul(self, index): self.user_selected(index) self._collapse() def _key_down(self, e): key = e.key if ((not key) and e.code): key = e.code if (not self.node.classList.contains('expanded')): if (key in ['ArrowUp', 'ArrowDown']): e.stopPropagation() self.expand() return if (key not in ['Escape', 'ArrowUp', 'ArrowDown', ' ', 'Enter']): return e.preventDefault() e.stopPropagation() if (key == 'Escape'): self._set_highlighted((- 1)) self._collapse() elif ((key == 'ArrowUp') or (key == 'ArrowDown')): if (key == 'ArrowDown'): hl = (self._highlighted + 1) else: hl = (self._highlighted - 1) self._set_highlighted(min(max(hl, 0), (len(self.options) - 1))) elif ((key == 'Enter') or (key == ' ')): if ((self._highlighted >= 0) and (self._highlighted < len(self.options))): self._select_from_ul(self._highlighted) def _expand(self): rect = super()._expand() ul = self.outernode.children[(len(self.outernode.children) - 1)] ul.style.left = (rect.left + 'px') ul.style.width = (rect.width + 'px') ul.style.top = ((rect.bottom - 1) + 'px') space_below = (window.innerHeight - rect.bottom) if (space_below < ul.clientHeight): space_above = rect.top if (space_above > space_below): ul.style.top = (((rect.top - 1) - ul.clientHeight) + 'px') def _submit_text(self): super()._submit_text() self.update_selected_index(self.text)
.parametrize('log_base, exp_age, exp_marks', _params_test_c_is_numerical) def test_param_C_is_int_variables_is_none(log_base, exp_age, exp_marks, df_vartypes): int_constant = 1 transformer = LogCpTransformer(base=log_base, C=int_constant) X = transformer.fit_transform(df_vartypes) transf_df = df_vartypes.copy() transf_df['Age'] = exp_age transf_df['Marks'] = exp_marks assert (transformer.base == log_base) assert (transformer.variables is None) assert (transformer.C == int_constant) assert (transformer.variables_ == ['Age', 'Marks']) assert (transformer.n_features_in_ == 5) assert (transformer.C_ == int_constant) pd.testing.assert_frame_equal(X, transf_df) Xit = transformer.inverse_transform(X) Xit['Age'] = Xit['Age'].round().astype('int64') Xit['Marks'] = Xit['Marks'].round(1) pd.testing.assert_frame_equal(Xit, df_vartypes)
class VrfConfListener(ConfWithIdListener, ConfWithStatsListener): def __init__(self, vrf_conf): super(VrfConfListener, self).__init__(vrf_conf) vrf_conf.add_listener(VrfConf.VRF_CHG_EVT, self.on_chg_vrf_conf) def on_chg_vrf_conf(self, evt): raise NotImplementedError('This method should be overridden')
(boundscheck=False, wraparound=False, cdivision=True, nonecheck=False) def add(img: Auint8, stateimg: Auint8, channel: int, amount: int): height = img.shape[0] width = img.shape[1] k = channel n = amount lut: A1dC = np.empty(256, dtype=np.uint8) for l in range(256): op_result = (l + n) if (op_result > 255): op_result = 255 elif (op_result < 0): op_result = 0 lut[l] = np.uint8(op_result) for i in range(height): for j in range(width): img[(i, j, k)] = lut[stateimg[(i, j, k)]]
class TestInlineHiliteNoPygments(util.MdCase): extension = ['pymdownx.highlight', 'pymdownx.inlinehilite'] extension_configs = {'pymdownx.highlight': {'use_pygments': False}, 'pymdownx.inlinehilite': {'css_class': 'inlinehilite'}} def test_no_pygments(self): self.check_markdown('`#!python import module`.', '<p><code class="language-python inlinehilite">import module</code>.</p>')
class LinkedRDKitChorizo(): cterm_pad_smiles = 'CN' nterm_pad_smiles = 'CC=O' backbone_smarts = '[C:1](=[O:2])[C:3][N:4]' backbone = Chem.MolFromSmarts(backbone_smarts) backboneh = Chem.MolFromSmarts('[C:1](=[O:2])[C:3][N:4][#1]') nterm_pad_backbone_smarts_idxs = (0, 2, 1) cterm_pad_backbone_smarts_idxs = (2, 3) rxn_cterm_pad = rdChemReactions.ReactionFromSmarts(f'[N:5][C:6].{backbone_smarts}>>[C:6][N:5]{backbone_smarts}') rxn_nterm_pad = rdChemReactions.ReactionFromSmarts(f'[C:5][C:6]=[O:7].{backbone_smarts}>>{backbone_smarts}[C:6](=[O:7])[C:5]') def __init__(self, pdb_string, params=chorizo_params, mutate_res_dict=None, termini=None, deleted_residues=None, allow_bad_res=False): suggested_mutations = {} self.residues = self._pdb_to_resblocks(pdb_string) res_list = self.residues.keys() self.termini = self._check_termini(termini, res_list) if (deleted_residues is None): deleted_residues = () self._check_del_res(deleted_residues, self.residues) self.deleted_residues = deleted_residues self.mutate_res_dict = mutate_res_dict if (mutate_res_dict is not None): self._rename_residues(mutate_res_dict) (self.res_templates, self.ambiguous) = self._load_params(params) ambiguous_chosen = self.parameterize_residues(self.termini, self.ambiguous) suggested_mutations.update(ambiguous_chosen) removed_residues = self.getIgnoredResidues() if ((len(removed_residues) > 0) and (not allow_bad_res)): for res in removed_residues: suggested_mutations[res] = res print('The following mutations are suggested. For HIS, mutate to HID, HIE, or HIP.') print(json.dumps(suggested_mutations, indent=2)) msg = ('The following residues could not be processed:' + pathlib.os.linesep) msg += self.print_residues_by_resname(removed_residues) raise RuntimeError(msg) self.disulfide_bridges = self._find_disulfide_bridges() for (cys_1, cys_2) in self.disulfide_bridges: (chain_1, resname_1, resnum_1) = cys_1.split(':') (chain_2, resname_2, resnum_2) = cys_2.split(':') if ((resname_1 != 'CYX') or (resname_2 != 'CYX')): print(f'Likely disulfide bridge between {cys_1} and {cys_2}') if (resname_1 != 'CYX'): cyx_1 = f'{chain_1}:CYX:{resnum_1}' suggested_mutations[cys_1] = cyx_1 if ((cys_1 not in mutate_res_dict) and ((cys_2 not in mutate_res_dict) or (resname_2 == 'CYX'))): self._rename_residues({cys_1: cyx_1}) resmol = self.build_resmol(cyx_1, 'CYX') if (resmol is not None): self.residues[cyx_1].rdkit_mol = resmol else: self.residues[cyx_1].ignore_residue = True if (resname_2 != 'CYX'): cyx_2 = f'{chain_2}:CYX:{resnum_2}' suggested_mutations[cys_2] = cyx_2 if ((cys_2 not in mutate_res_dict) and ((cys_1 not in mutate_res_dict) or (resname_1 == 'CYX'))): self._rename_residues({cys_2: cyx_2}) resmol = self.build_resmol(cyx_2, 'CYX') if (resmol is not None): self.residues[cyx_2].rdkit_mol = resmol else: self.residues[cyx_2].ignore_residue = True 'to_remove = []\n for res_id in self.getIgnoredResidues():\n i = self.res_list.index(res_id)\n to_remove.append(i)\n for i in sorted(to_remove, reverse=True):\n self.res_list.pop(i) ' self.suggested_mutations = suggested_mutations return def _find_disulfide_bridges(self): cys_list = {} cutoff = 2.5 bridges = [] for res in self.residues: if self.residues[res].ignore_residue: continue resname = res.split(':')[1] if (resname in ['CYS', 'CYX', 'CYM']): resmol = self.residues[res].rdkit_mol molxyz = resmol.GetConformer().GetPositions() s_xyz = None for atom in resmol.GetAtoms(): if (atom.GetAtomicNum() == 16): s_xyz = molxyz[atom.GetIdx()] for (cys, other_s_xyz) in cys_list.items(): v = (s_xyz - other_s_xyz) dist = np.sqrt(np.dot(v, v)) if (dist < cutoff): bridges.append((res, cys)) cys_list[res] = s_xyz return bridges def _check_del_res(query_res, residues): missing = set() for res in query_res: if (res not in residues): missing.add(res) else: residues[res].user_deleted = True if (len(missing) > 0): msg = ('deleted_residues not found: ' + ' '.join(missing)) raise ValueError(msg) def _check_termini(termini, res_list): allowed_c = ('cterm', 'c-term', 'c') allowed_n = ('nterm', 'n-term', 'n') output = {} if (termini is None): return output for (resn, value) in termini.items(): if (resn not in res_list): raise ValueError(('%s in termini not found' % resn)) output[resn] = [] if (value.lower() in allowed_c): output[resn] = 'C' elif (value.lower() in allowed_n): output[resn] = 'N' else: raise ValueError(('termini value was %s, expected %s or %s' % (value, allowed_c, allowed_n))) return output def get_padded_mol(self, resn): def _join(mol, pad_mol, pad_smarts_mol, rxn, is_res_atom, mapidx, adjacent_mol=None, pad_smarts_idxs=None): pad_matches = adjacent_mol.GetSubstructMatches(pad_smarts_mol) if (len(pad_matches) != 1): raise RuntimeError(('expected 1 match but got %d' % len(pad_matches))) conformer = Chem.Conformer(pad_mol.GetNumAtoms()) pad_mol.AddConformer(conformer) if (adjacent_mol is not None): for (index, smarts_index) in enumerate(pad_smarts_idxs): adjacent_mol_index = pad_matches[0][smarts_index] pos = adjacent_mol.GetConformer().GetAtomPosition(adjacent_mol_index) pad_mol.GetConformer().SetAtomPosition(index, pos) (products, index_map) = react_and_map((pad_mol, mol), rxn) if (len(products) != 1): raise RuntimeError(('expected 1 reaction product but got %d' % len(products))) mol = products[0][0] index_map['reactant_idx'] = index_map['reactant_idx'][0][0] index_map['atom_idx'] = index_map['atom_idx'][0][0] Chem.SanitizeMol(mol) new_is_res_atom = [] new_mapidx = {} for atom in mol.GetAtoms(): index = atom.GetIdx() reactant_idx = index_map['reactant_idx'][index] if (reactant_idx == 0): new_is_res_atom.append(False) elif (reactant_idx == 1): atom_idx = index_map['atom_idx'][index] new_is_res_atom.append(is_res_atom[atom_idx]) if (atom_idx in mapidx): new_mapidx[index] = mapidx[atom_idx] else: raise RuntimeError(('we have only two reactants, got %d ?' % reactant_idx)) return (mol, new_is_res_atom, new_mapidx) mol = Chem.Mol(self.residues[resn].rdkit_mol) is_res_atom = [True for atom in mol.GetAtoms()] mapidx = {atom.GetIdx(): atom.GetIdx() for atom in mol.GetAtoms()} if ((self.residues[resn].previous_id is not None) and (self.residues[self.residues[resn].previous_id].rdkit_mol is not None)): prev_resn = self.residues[resn].previous_id prev_mol = self.residues[prev_resn].rdkit_mol nterm_pad = Chem.MolFromSmiles(self.nterm_pad_smiles) (mol, is_res_atom, mapidx) = _join(mol, nterm_pad, self.backbone, self.rxn_nterm_pad, is_res_atom, mapidx, prev_mol, self.nterm_pad_backbone_smarts_idxs) if ((self.residues[resn].next_id is not None) and (self.residues[self.residues[resn].next_id].rdkit_mol is not None)): next_resn = self.residues[resn].next_id next_mol = self.residues[next_resn].rdkit_mol cterm_pad = Chem.MolFromSmiles(self.cterm_pad_smiles) (mol, is_res_atom, mapidx) = _join(mol, cterm_pad, self.backbone, self.rxn_cterm_pad, is_res_atom, mapidx, next_mol, self.cterm_pad_backbone_smarts_idxs) n_atoms_before_addhs = mol.GetNumAtoms() mol = Chem.AddHs(mol) is_res_atom.extend(([False] * (mol.GetNumAtoms() - n_atoms_before_addhs))) return (mol, is_res_atom, mapidx) def res_to_molsetup(self, res, mk_prep, is_protein_sidechain=False, cut_at_calpha=False): (padded_mol, is_res_atom, mapidx) = self.get_padded_mol(res) if is_protein_sidechain: bb_matches = padded_mol.GetSubstructMatches(self.backboneh) if (len(bb_matches) != 1): raise RuntimeError(('expected 1 backbone match, got %d' % len(bb_matches))) c_alpha = bb_matches[0][2] else: c_alpha = None molsetups = mk_prep.prepare(padded_mol, root_atom_index=c_alpha) if (len(molsetups) > 1): raise NotImplementedError('multiple molsetups not yet implemented for flexres') molsetup = molsetups[0] molsetup.is_sidechain = is_protein_sidechain ignored_in_molsetup = [] for atom_index in molsetup.atom_ignore: if (atom_index < len(is_res_atom)): is_res = is_res_atom[atom_index] else: is_res = False ignore = (not is_res) ignore |= (is_protein_sidechain and cut_at_calpha and ((atom_index != c_alpha) and (atom_index in bb_matches[0]))) molsetup.atom_ignore[atom_index] |= ignore if (ignore and is_res): ignored_in_molsetup.append(mapidx[atom_index]) net_charge = sum([atom.GetFormalCharge() for atom in self.residues[res].rdkit_mol.GetAtoms()]) if (mk_prep.charge_model == 'zero'): net_charge = 0 not_ignored_idxs = [] charges = [] for (i, q) in molsetup.charge.items(): if (i in mapidx): charges.append(q) not_ignored_idxs.append(i) charges = rectify_charges(charges, net_charge, decimals=3) for (i, j) in enumerate(not_ignored_idxs): molsetup.charge[j] = charges[i] return (molsetup, mapidx, ignored_in_molsetup) def flexibilize_protein_sidechain(self, res, mk_prep, cut_at_calpha=False): (molsetup, mapidx, ignored_in_molsetup) = self.res_to_molsetup(res, mk_prep, is_protein_sidechain=True, cut_at_calpha=cut_at_calpha) self.residues[res].molsetup = molsetup self.residues[res].molsetup_mapidx = mapidx self.residues[res].molsetup_ignored = ignored_in_molsetup self.residues[res].is_movable = True return def add_molsetup_inflexible(self, res, mk_prep, cut_at_calpha=False): (molsetup, mapidx, ignored_in_molsetup) = self.res_to_molsetup(res, mk_prep, is_protein_sidechain=False, cut_at_calpha=cut_at_calpha) self.residues[res].molsetup = molsetup self.residues[res].molsetup_mapidx = mapidx self.residues[res].molsetup_ignored = ignored_in_molsetup self.residues[res].is_movable = False return def print_residues_by_resname(removed_residues): by_resname = dict() for res_id in removed_residues: (chain, resn, resi) = res_id.split(':') by_resname.setdefault(resn, []) by_resname[resn].append(f'{chain}:{resi}') string = '' for (resname, removed_res) in by_resname.items(): string += (f'Resname: {resname}:' + pathlib.os.linesep) string += (' '.join(removed_res) + pathlib.os.linesep) return string def _load_params(params): undesired_props = ('bonds', '//', 'bond_cut_atoms', 'smiles') ps = Chem.SmilesParserParams() ps.removeHs = False res_templates = {} for resn in params: if (resn == 'ambiguous'): continue resmol = Chem.MolFromSmiles(params[resn]['smiles'], ps) for (idx, atom) in enumerate(resmol.GetAtoms()): for propname in params[resn]: if (propname in undesired_props): continue value = params[resn][propname][idx] if (value is None): continue if (type(value) == bool): atom.SetBoolProp(propname, value) elif (type(value) == float): atom.SetDoubleProp(propname, value) elif (type(value) == str): atom.SetProp(propname, value) else: raise RuntimeError('property type:', type(value), value, 'propname:', propname, 'resn:', resn) res_templates[resn] = resmol ambiguous = params['ambiguous'] return (res_templates, ambiguous) def _pdb_to_resblocks(pdb_string): residues = {} current_res_id = None current_res = None for line in pdb_string.splitlines(True): if (line.startswith('TER') and (current_res is not None)): current_res.next_id = None residues[current_res_id] = current_res current_res = None current_res_id = None if (line.startswith('ATOM') or line.startswith('HETATM')): resname = line[17:20].strip() resid = int(line[22:26].strip()) chainid = line[21].strip() full_res_id = ':'.join([chainid, resname, str(resid)]) if (full_res_id == current_res_id): current_res.pdb_text += line else: if (current_res_id is not None): last_resid = int(current_res_id.split(':')[2]) if ((resid - last_resid) < 2): current_res.next_id = full_res_id else: current_res.next_id = None current_res = ChorizoResidue(full_res_id, line) if ((current_res_id is not None) and ((resid - int(current_res_id.split(':')[2])) < 2)): current_res.previous_id = current_res_id else: current_res.previous_id = None current_res_id = full_res_id residues[current_res_id] = current_res if (current_res is not None): current_res.next_id = None residues[current_res_id] = current_res return residues def _rename_residues(self, mutate_dict): residue_order = list(self.residues.keys()) for res in mutate_dict: old_resn = res.split(':')[1] new_resn = mutate_dict[res].split(':')[1] self.residues[mutate_dict[res]] = self.residues.pop(res) self.residues[mutate_dict[res]].residue_id = mutate_dict[res] previous_res = self.residues[mutate_dict[res]].previous_id if previous_res: self.residues[previous_res].next_id = mutate_dict[res] next_res = self.residues[mutate_dict[res]].next_id if next_res: self.residues[next_res].previous_id = mutate_dict[res] i = residue_order.index(res) residue_order[i] = mutate_dict[res] for residue in residue_order: value = self.residues.pop(residue) self.residues[residue] = value def add_termini(resn, res, termini, residues): next_res = residues[res].next_id prev_res = residues[res].previous_id if (termini.get(res, None) == 'C'): if ((next_res is not None) and (not residues[next_res].user_deleted)): raise ValueError('Trying to C-term {res} but {next_res=} exists') resn = ('C' + resn) elif (termini.get(res, None) == 'N'): if ((prev_res is not None) and (not residues[prev_res].user_deleted)): raise ValueError('Trying to N-term {res} but {prev_res=} exists') resn = ('N' + resn) elif (termini.get(res, None) is None): resn = resn else: raise ValueError(("termini must be either 'C' or 'N', not %s" % termini.get(res, None))) return resn def parameterize_residues(self, termini, ambiguous): ambiguous_chosen = {} for res in self.residues: if self.residues[res].user_deleted: continue pdbmol = Chem.MolFromPDBBlock(self.residues[res].pdb_text, removeHs=False) if (pdbmol is None): self.residues[res].ignore_residue = True continue (chain, resn, resnum) = res.split(':') if ((resn not in self.res_templates) and (resn not in ambiguous)): self.residues[res].ignore_residue = True continue if (resn in ambiguous): possible_resn = ambiguous[resn] else: possible_resn = [resn] lowest_nr_missing = 9999999 for resn in possible_resn: resn = self.add_termini(resn, res, termini, self.residues) resmol = Chem.Mol(self.res_templates[resn]) n_atoms = len(resmol.GetAtoms()) atom_map = mapping_by_mcs(resmol, pdbmol) nr_missing = (n_atoms - len(atom_map)) if (nr_missing < lowest_nr_missing): best_resmol = resmol lowest_nr_missing = nr_missing best_n_atoms = n_atoms best_resn = resn resmol = best_resmol n_atoms = best_n_atoms resn = best_resn if (len(possible_resn) > 1): ambiguous_chosen[res] = f'{chain}:{resn}:{resnum}' resmol = self.build_resmol(res, resn) if (resmol is None): self.residues[res].ignore_residue = True else: self.residues[res].rdkit_mol = resmol return ambiguous_chosen def build_resmol(self, res, resn): resmol = Chem.Mol(self.res_templates[resn]) pdbmol = Chem.MolFromPDBBlock(self.residues[res].pdb_text, removeHs=False) atom_map = mapping_by_mcs(resmol, pdbmol) resmol.AddConformer(Chem.Conformer(resmol.GetNumAtoms())) resmol.GetConformer().Set3D(True) for (idx, pdb_idx) in atom_map.items(): pdb_atom = pdbmol.GetAtomWithIdx(pdb_idx) pdb_coord = pdbmol.GetConformer().GetAtomPosition(pdb_idx) resmol.GetConformer().SetAtomPosition(idx, pdb_coord) resinfo = pdb_atom.GetPDBResidueInfo() resmol.GetAtomWithIdx(idx).SetDoubleProp('occupancy', resinfo.GetOccupancy()) resmol.GetAtomWithIdx(idx).SetDoubleProp('temp_factor', resinfo.GetTempFactor()) missing_atoms = {resmol.GetAtomWithIdx(i).GetProp('atom_name'): i for i in range(resmol.GetNumAtoms()) if (i not in atom_map.keys())} if ('H' in missing_atoms): prev_res = self.residues[res].previous_id if (prev_res is not None): h_pos = h_coord_from_dipeptide(self.residues[res].pdb_text, self.residues[prev_res].pdb_text) else: h_pos = h_coord_random_n_terminal(resmol) resmol.GetConformer().SetAtomPosition(missing_atoms['H'], h_pos) resmol.GetAtomWithIdx(missing_atoms['H']).SetBoolProp('computed', True) missing_atoms.pop('H') missing_atom_elements = set([atom[0] for atom in missing_atoms.keys()]) if (len(missing_atom_elements) > 0): resmol_h = Chem.RemoveHs(resmol) resmol_h = Chem.AddHs(resmol_h, addCoords=True) h_map = mapping_by_mcs(resmol, resmol_h) for atom in list(missing_atoms.keys()): if atom.startswith('H'): h_idx = missing_atoms[atom] resmol.GetConformer().SetAtomPosition(h_idx, resmol_h.GetConformer().GetAtomPosition(h_map[h_idx])) resmol.GetAtomWithIdx(h_idx).SetBoolProp('computed', True) missing_atoms.pop(atom) if (len(missing_atoms) > 0): err = f'Could not add res={res!r} missing_atoms={missing_atoms!r}' print(err) resmol = None return resmol def mk_parameterize_all_residues(self, mk_prep): for res in self.getValidResidues(): self.mk_parameterize_residue(res, mk_prep) return def mk_parameterize_residue(self, res, mk_prep): (molsetup, mapidx, ignored_in_molsetup) = self.res_to_molsetup(res, mk_prep) resmol = self.residues[res].rdkit_mol for (molsetup_idx, resmol_idx) in mapidx.items(): atom = resmol.GetAtomWithIdx(resmol_idx) atom.SetDoubleProp('q', molsetup.charge[molsetup_idx]) atom.SetProp('atom_type', molsetup.atom_type[molsetup_idx]) for (key, value_array) in molsetup.atom_params.items(): value = value_array[molsetup_idx] if (type(value) == float): atom.SetDoubleProp(key, value) elif (type(value) == bool): atom.SetBoolProp(key, value) else: atom.SetProp(key, value) return def to_pdb(self, use_modified_coords=False, modified_coords_index=0): pdbout = '' atom_count = 0 icode = '' pdb_line = '{:6s}{:5d} {:^4s} {:3s} {:1s}{:4d}{:1s} {:8.3f}{:8.3f}{:8.3f} {:2s} ' pdb_line += pathlib.os.linesep for res_id in self.residues: if (self.residues[res_id].user_deleted or self.residues[res_id].ignore_residue): continue resmol = self.residues[res_id].rdkit_mol if (use_modified_coords and (self.residues[res_id].molsetup is not None)): molsetup = self.residues[res_id].molsetup if (len(molsetup.modified_atom_positions) <= modified_coords_index): errmsg = ('Requesting pose %d but only got %d in molsetup of %s' % (modified_coords_index, len(molsetup.modified_atom_positions), res_id)) raise RuntimeError(errmsg) p = molsetup.modified_atom_positions[modified_coords_index] modified_positions = molsetup.get_conformer_with_modified_positions(p).GetPositions() positions = {} for (i, j) in self.residues[res_id].molsetup_mapidx.items(): positions[j] = modified_positions[i] else: positions = {i: xyz for (i, xyz) in enumerate(resmol.GetConformer().GetPositions())} (chain, resname, resnum) = res_id.split(':') resnum = int(resnum) for (i, atom) in enumerate(resmol.GetAtoms()): atom_count += 1 props = atom.GetPropsAsDict() atom_name = props.get('atom_name', '') (x, y, z) = positions[i] element = mini_periodic_table[atom.GetAtomicNum()] pdbout += pdb_line.format('ATOM', atom_count, atom_name, resname, chain, resnum, icode, x, y, z, element) return pdbout def export_static_atom_params(self, ignore_atom_types=('H',)): atom_params = {} counter_atoms = 0 coords = [] for res_id in self.residues: if (self.residues[res_id].user_deleted or self.residues[res_id].ignore_residue): continue resmol = self.residues[res_id].rdkit_mol for atom in resmol.GetAtoms(): props = atom.GetPropsAsDict() if ((len(ignore_atom_types) > 0) and (props['atom_type'] in ignore_atom_types)): continue if self.residues[res_id].molsetup: if (atom.GetIdx() not in self.residues[res_id].molsetup_ignored): continue for (key, value) in props.items(): if key.startswith('_'): continue atom_params.setdefault(key, ([None] * counter_atoms)) atom_params[key].append(value) counter_atoms += 1 for key in set(atom_params).difference(props): atom_params[key].append(None) coords.append(resmol.GetConformer().GetAtomPosition(atom.GetIdx())) if hasattr(self, 'param_rename'): for (key, new_key) in self.param_rename.items(): atom_params[new_key] = atom_params.pop(key) return (atom_params, coords) def getUserDeletedResidues(self): return {k: v for (k, v) in self.residues.items() if (v.user_deleted == True)} def getNonUserDeletedResidues(self): return {k: v for (k, v) in self.residues.items() if (v.user_deleted == False)} def getIgnoredResidues(self): return {k: v for (k, v) in self.residues.items() if (v.ignore_residue == True)} def getNotIgnoredResidues(self): return {k: v for (k, v) in self.residues.items() if (v.ignore_residue == False)} def getValidResidues(self): return {k: v for (k, v) in self.residues.items() if v.isValidResidue()}
class HTMLConverter(PDFConverter): RECT_COLORS = {'figure': 'yellow', 'textline': 'magenta', 'textbox': 'cyan', 'textgroup': 'red', 'curve': 'black', 'page': 'gray'} TEXT_COLORS = {'textbox': 'blue', 'char': 'black'} def __init__(self, rsrcmgr, outfp, pageno=1, laparams=None, scale=1, fontscale=1.0, layoutmode='normal', showpageno=True, pagemargin=50, imagewriter=None, debug=0, rect_colors={'curve': 'black', 'page': 'gray'}, text_colors={'char': 'black'}): PDFConverter.__init__(self, rsrcmgr, outfp, pageno=pageno, laparams=laparams) self.scale = scale self.fontscale = fontscale self.layoutmode = layoutmode self.showpageno = showpageno self.pagemargin = pagemargin self.imagewriter = imagewriter self.rect_colors = rect_colors self.text_colors = text_colors if debug: self.rect_colors.update(self.RECT_COLORS) self.text_colors.update(self.TEXT_COLORS) self._yoffset = self.pagemargin self._font = None self._fontstack = [] self.write_header() return def write(self, text): self.outfp.write(text) return def write_header(self): self.write('<html><head>\n') self.write('<meta content="text/html; charset=utf-8">\n') self.write('</head><body>\n') return def write_footer(self): self.write(('<div style="position:absolute; top:0px;">Page: %s</div>\n' % ', '.join((('<a href="#%s">%s</a>' % (i, i)) for i in range(1, self.pageno))))) self.write('</body></html>\n') return def write_text(self, text): self.write(q(text)) return def place_rect(self, color, borderwidth, x, y, w, h): color = self.rect_colors.get(color) if (color is not None): self.write(('<span style="position:absolute; border: %s %dpx solid; left:%dpx; top:%dpx; width:%dpx; height:%dpx;"></span>\n' % (color, borderwidth, (x * self.scale), ((self._yoffset - y) * self.scale), (w * self.scale), (h * self.scale)))) return def place_border(self, color, borderwidth, item): self.place_rect(color, borderwidth, item.x0, item.y1, item.width, item.height) return def place_image(self, item, borderwidth, x, y, w, h): if (self.imagewriter is not None): name = self.imagewriter.export_image(item) self.write(('<img src="%s" border="%d" style="position:absolute; left:%dpx; top:%dpx;" width="%d" height="%d" />\n' % (q(name), borderwidth, (x * self.scale), ((self._yoffset - y) * self.scale), (w * self.scale), (h * self.scale)))) return def place_text(self, color, text, x, y, size): color = self.text_colors.get(color) if (color is not None): self.write(('<span style="position:absolute; color:%s; left:%dpx; top:%dpx; font-size:%dpx;">' % (color, (x * self.scale), ((self._yoffset - y) * self.scale), ((size * self.scale) * self.fontscale)))) self.write_text(text) self.write('</span>\n') return def begin_div(self, color, borderwidth, x, y, w, h, writing_mode=False): self._fontstack.append(self._font) self._font = None self.write(('<div style="position:absolute; border: %s %dpx solid; writing-mode:%s; left:%dpx; top:%dpx; width:%dpx; height:%dpx;">' % (color, borderwidth, writing_mode, (x * self.scale), ((self._yoffset - y) * self.scale), (w * self.scale), (h * self.scale)))) return def end_div(self, color): if (self._font is not None): self.write('</span>') self._font = self._fontstack.pop() self.write('</div>') return def put_text(self, text, fontname, fontsize): font = (fontname, fontsize) if (font != self._font): if (self._font is not None): self.write('</span>') self.write(('<span style="font-family: %s; font-size:%dpx">' % (q(fontname), ((fontsize * self.scale) * self.fontscale)))) self._font = font self.write_text(text) return def put_newline(self): self.write('<br>') return def receive_layout(self, ltpage): def show_group(item): if isinstance(item, LTTextGroup): self.place_border('textgroup', 1, item) for child in item: show_group(child) return def render(item): if isinstance(item, LTPage): self._yoffset += item.y1 self.place_border('page', 1, item) if self.showpageno: self.write(('<div style="position:absolute; top:%dpx;">' % ((self._yoffset - item.y1) * self.scale))) self.write(('<a name="%s">Page %s</a></div>\n' % (item.pageid, item.pageid))) for child in item: render(child) if (item.groups is not None): for group in item.groups: show_group(group) elif isinstance(item, LTCurve): self.place_border('curve', 1, item) elif isinstance(item, LTFigure): self.begin_div('figure', 1, item.x0, item.y1, item.width, item.height) for child in item: render(child) self.end_div('figure') elif isinstance(item, LTImage): self.place_image(item, 1, item.x0, item.y1, item.width, item.height) elif (self.layoutmode == 'exact'): if isinstance(item, LTTextLine): self.place_border('textline', 1, item) for child in item: render(child) elif isinstance(item, LTTextBox): self.place_border('textbox', 1, item) self.place_text('textbox', str((item.index + 1)), item.x0, item.y1, 20) for child in item: render(child) elif isinstance(item, LTChar): self.place_border('char', 1, item) self.place_text('char', item.get_text(), item.x0, item.y1, item.size) elif isinstance(item, LTTextLine): for child in item: render(child) if (self.layoutmode != 'loose'): self.put_newline() elif isinstance(item, LTTextBox): self.begin_div('textbox', 1, item.x0, item.y1, item.width, item.height, item.get_writing_mode()) for child in item: render(child) self.end_div('textbox') elif isinstance(item, LTChar): self.put_text(item.get_text(), item.fontname, item.size) elif isinstance(item, LTText): self.write_text(item.get_text()) return render(ltpage) self._yoffset += self.pagemargin return def close(self): self.write_footer() return
def update_config(load_config_from_string_mock_object, load_config_from_string_mock_function: Callable, replace_config_placeholders_mock_object, replace_config_placeholders_mock_function: Callable, replace_dataset_placeholders_mock_object, replace_dataset_placeholders_mock_function: Callable, validation_function: Callable, db, secondary_mailchimp_instance, tertiary_mailchimp_instance, secondary_sendgrid_instance): assert ('mailchimp' in ConnectorRegistry.connector_types()) mailchimp_template_config = load_config_from_string(ConnectorRegistry.get_connector_template('mailchimp').config) mailchimp_template_dataset = load_dataset_from_string(ConnectorRegistry.get_connector_template('mailchimp').dataset) mailchimp_version = mailchimp_template_config['version'] sendgrid_template_config = load_config_from_string(ConnectorRegistry.get_connector_template('sendgrid').config) sendgrid_template_dataset = load_dataset_from_string(ConnectorRegistry.get_connector_template('sendgrid').dataset) sendgrid_version = sendgrid_template_config['version'] (secondary_mailchimp_config, secondary_mailchimp_dataset) = secondary_mailchimp_instance secondary_mailchimp_saas_config = secondary_mailchimp_config.saas_config secondary_mailchimp_dataset.ctl_dataset.description = mailchimp_template_dataset['description'] assert (secondary_mailchimp_saas_config['version'] == mailchimp_version) assert (secondary_mailchimp_saas_config['description'] == mailchimp_template_config['description']) (tertiary_mailchimp_config, tertiary_mailchimp_dataset) = tertiary_mailchimp_instance tertiary_mailchimp_saas_config = tertiary_mailchimp_config.saas_config tertiary_mailchimp_dataset.ctl_dataset.description = mailchimp_template_dataset['description'] tertiary_mailchimp_saas_config = tertiary_mailchimp_dataset.connection_config.saas_config assert (tertiary_mailchimp_saas_config['version'] == mailchimp_version) assert (tertiary_mailchimp_saas_config['description'] == mailchimp_template_config['description']) (secondary_sendgrid_config, secondary_sendgrid_dataset) = secondary_sendgrid_instance secondary_sendgrid_saas_config = secondary_sendgrid_config.saas_config secondary_sendgrid_dataset.ctl_dataset.description = sendgrid_template_dataset['description'] assert (secondary_sendgrid_saas_config['version'] == sendgrid_version) assert (secondary_sendgrid_saas_config['description'] == sendgrid_template_config['description']) load_config_from_string_mock_object.side_effect = load_config_from_string_mock_function replace_config_placeholders_mock_object.side_effect = replace_config_placeholders_mock_function replace_dataset_placeholders_mock_object.side_effect = replace_dataset_placeholders_mock_function update_saas_configs(db) secondary_mailchimp_dataset: DatasetConfig = DatasetConfig.filter(db=db, conditions=(DatasetConfig.fides_key == secondary_mailchimp_dataset.fides_key)).first() validation_function(secondary_mailchimp_dataset, mailchimp_template_config, mailchimp_template_dataset, secondary_mailchimp_config.key, secondary_mailchimp_dataset.fides_key) tertiary_mailchimp_dataset: DatasetConfig = DatasetConfig.filter(db=db, conditions=(DatasetConfig.fides_key == tertiary_mailchimp_dataset.fides_key)).first() validation_function(tertiary_mailchimp_dataset, mailchimp_template_config, mailchimp_template_dataset, tertiary_mailchimp_config.key, tertiary_mailchimp_dataset.fides_key) secondary_sendgrid_dataset: DatasetConfig = DatasetConfig.filter(db=db, conditions=(DatasetConfig.fides_key == secondary_sendgrid_dataset.fides_key)).first() validation_function(secondary_sendgrid_dataset, sendgrid_template_config, sendgrid_template_dataset, secondary_sendgrid_config.key, secondary_sendgrid_dataset.fides_key) secondary_mailchimp_config.delete(db) tertiary_mailchimp_config.delete(db) secondary_sendgrid_config.delete(db)
def compile_terminal_form(tensor, prefix, *, tsfc_parameters=None): assert tensor.terminal, 'Only terminal tensors have forms associated with them!' mapper = RemoveRestrictions() integrals = map(partial(map_integrand_dags, mapper), tensor.form.integrals()) transformed_integrals = transform_integrals(integrals) cxt_kernels = [] assert (prefix is not None) for (orig_it_type, integrals) in transformed_integrals.items(): subkernel_prefix = (prefix + ('%s_to_' % orig_it_type)) form = Form(integrals) kernels = tsfc_compile(form, subkernel_prefix, parameters=tsfc_parameters, split=False, diagonal=tensor.diagonal) if kernels: cxt_k = ContextKernel(tensor=tensor, coefficients=form.coefficients(), constants=extract_firedrake_constants(form), original_integral_type=orig_it_type, tsfc_kernels=kernels) cxt_kernels.append(cxt_k) cxt_kernels = tuple(cxt_kernels) return cxt_kernels
def trackingshuffledobjects(n: int, variant='five_objects'): import os import json path = os.path.join(os.path.expanduser('~'), '.cache', 'lmql', 'datasets', (('shuffled_objects' + variant) + '.json')) if (not os.path.exists(path)): os.makedirs(os.path.join(os.path.expanduser('~'), '.cache', 'lmql', 'datasets'), exist_ok=True) url = f' subprocess.run(['curl', url], stdout=open(path, 'w'), check=True) assert os.path.exists(path) with open(path, 'r') as f: data = json.load(f) s = data['examples'][n] choices = list(s['target_scores'].items()) answer_choices = [x[0].rstrip('.') for x in choices] answer = [x[0] for x in choices if (x[1] == max([x[1] for x in choices]))][0] choices_line = ('Answer Choices: ' + ', '.join(answer_choices)) return MultipleChoiceSample(s['input'], answer_choices, answer, choices_line)
class PrimaryDagSchema(BaseDagSchema): compatibility_version = fields.String() dag_args = fields.Nested(DagArgsSchema) default_task_args = fields.Dict() _schema def validate_compatibility_version(self, data): if (not data.get('compatibility_version')): return version = None try: version = semver.VersionInfo.parse(data['compatibility_version']) except ValueError: raise ValidationError('Must be a valid SemVer', ['compatibility_version']) if (VERSION < version): raise ValidationError('Incompatible boundary_layer version: This workflow requires boundary_layer version {} or higher! Current version is {}'.format(version, VERSION), ['compatibility_version']) if (version < MIN_SUPPORTED_VERSION): raise ValidationError('Incompatible boundary_layer version: This workflow is for the incompatible prior version {}. Use the migrate-workflow script to update it.'.format(version), ['compatibility_version'])
class RemoveOnlyCVEForUpdateEquals2011MongoDbDriver(MongoDbDriver): def __init__(self): self.client = Mock(spec=pymongo.MongoClient) self.db = Mock() self.db.collection_names.return_value = ['cve'] self.db.cve.count.return_value = 10 cursor_cve = self.db.cve.find.return_value sort_cve = cursor_cve.sort.return_value sort_cve.limit.return_value = [{'year': 2012}] self.db.cve.remove.return_value = True self.db.cve_info.remove.return_value = True
def pytest_addoption(parser): parser.addoption('--api', action='store', help='API: cuda/ocl/supported', default='supported', choices=['cuda', 'ocl', 'supported']) parser.addoption('--double', action='store', help='Use doubles: no/yes/supported', default='supported', choices=['no', 'yes', 'supported']) parser.addoption('--fast-math', dest='fast_math', action='store', help='Use fast math (where applicable): no/yes/both', default='no', choices=['no', 'yes', 'both']) parser.addoption('--device-include-mask', action='append', help='Run tests on matching devices only', default=[]) parser.addoption('--device-exclude-mask', action='append', help='Run tests on matching devices only', default=[]) parser.addoption('--platform-include-mask', action='append', help='Run tests on matching platforms only', default=[]) parser.addoption('--platform-exclude-mask', action='append', help='Run tests on matching platforms only', default=[]) parser.addoption('--include-duplicate-devices', action='store_true', help='Run tests on all available devices and not only on uniquely named ones', default=False)
def extractHalfbakedtranslationsBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_remove(): outfile = NamedTemporaryFile(suffix='.h5', prefix='test_matrix', delete=True) outfile.close() args = '--matrix {} --outFileName {} --regions {} --action {}'.format((ROOT + 'small_test_matrix_50kb_res.h5'), outfile.name, (ROOT + 'hicAdjustMatrix/remove.bed'), 'remove').split() compute(hicAdjustMatrix.main, args, 5) test = hm.hiCMatrix((ROOT + 'hicAdjustMatrix/small_test_matrix_50kb_res_remove.h5')) new = hm.hiCMatrix(outfile.name) np.assert_almost_equal(test.matrix.data, new.matrix.data, decimal=5) np.assert_equal(test.cut_intervals, new.cut_intervals) os.unlink(outfile.name)
('evennia.server.service.LoopingCall', new=MagicMock()) class TestServer(TestCase): def setUp(self): from evennia.server import server self.server = evennia.EVENNIA_SERVER_SERVICE _settings(IDMAPPER_CACHE_MAXSIZE=1000) def test__server_maintenance_reset(self): with patch.object(self.server, '_flush_cache', new=MagicMock()) as mockflush, patch.object(evennia, 'ServerConfig', new=MagicMock()) as mockconf, patch.multiple('evennia.server.service', LoopingCall=DEFAULT, connection=DEFAULT) as mocks: self.server.maintenance_count = 0 mocks['connection'].close = MagicMock() mockconf.objects.conf = MagicMock(return_value=456) self.server.server_maintenance() mockconf.objects.conf.assert_called_with('runtime', 456) _settings(IDMAPPER_CACHE_MAXSIZE=1000) def test__server_maintenance_flush(self): with patch.multiple('evennia.server.service', LoopingCall=DEFAULT, connection=DEFAULT) as mocks, patch.object(evennia, 'ServerConfig', new=MagicMock()) as mockconf, patch.object(self.server, '_flush_cache', new=MagicMock()) as mockflush: mocks['connection'].close = MagicMock() mockconf.objects.conf = MagicMock(return_value=100) self.server.maintenance_count = (5 - 1) self.server.server_maintenance() self.server._flush_cache.assert_called_with(1000) _settings(IDMAPPER_CACHE_MAXSIZE=1000) def test__server_maintenance_close_connection(self): with patch.multiple('evennia.server.service', LoopingCall=DEFAULT, connection=DEFAULT) as mocks, patch.object(evennia, 'ServerConfig', new=MagicMock()) as mockconf: self.server._flush_cache = MagicMock() self.server.maintenance_count = ((60 * 7) - 1) self.server._last_server_time_snapshot = 0 mocks['connection'].close = MagicMock() mockconf.objects.conf = MagicMock(return_value=100) self.server.server_maintenance() mocks['connection'].close.assert_called() _settings(IDLE_TIMEOUT=10) def test__server_maintenance_idle_time(self): with patch.multiple('evennia.server.service', LoopingCall=DEFAULT, connection=DEFAULT, time=DEFAULT) as mocks, patch.object(evennia, 'ServerConfig', new=MagicMock()) as mockconf, patch.object(evennia, 'SESSION_HANDLER', new=MagicMock()) as mocksess: self.server.maintenance_count = ((3600 * 7) - 1) self.server._last_server_time_snapshot = 0 sess1 = MagicMock() sess2 = MagicMock() sess3 = MagicMock() sess4 = MagicMock() sess1.cmd_last = 100 sess2.cmd_last = 999 sess3.cmd_last = 100 sess4.cmd_last = 100 sess1.account = None sess2.account = None sess3.account = MagicMock() sess3.account = MagicMock() sess4.account.access = MagicMock(return_value=False) mocks['time'].time = MagicMock(return_value=1000) mockconf.objects.conf = MagicMock(return_value=100) mocksess.values = MagicMock(return_value=[sess1, sess2, sess3, sess4]) mocksess.disconnect = MagicMock() self.server.server_maintenance() reason = 'idle timeout exceeded' calls = [call(sess1, reason=reason), call(sess4, reason=reason)] mocksess.disconnect.assert_has_calls(calls, any_order=True) def test_update_defaults(self): with patch.object(evennia, 'ObjectDB', new=MagicMock()) as mockobj, patch.object(evennia, 'AccountDB', new=MagicMock()) as mockacc, patch.object(evennia, 'ScriptDB', new=MagicMock()) as mockscr, patch.object(evennia, 'ChannelDB', new=MagicMock()) as mockchan, patch.object(evennia, 'ServerConfig', new=MagicMock()) as mockconf: for m in (mockscr, mockobj, mockacc, mockchan): m.objects.filter = MagicMock() settings_names = ('CMDSET_CHARACTER', 'CMDSET_ACCOUNT', 'BASE_ACCOUNT_TYPECLASS', 'BASE_OBJECT_TYPECLASS', 'BASE_CHARACTER_TYPECLASS', 'BASE_ROOM_TYPECLASS', 'BASE_EXIT_TYPECLASS', 'BASE_SCRIPT_TYPECLASS', 'BASE_CHANNEL_TYPECLASS') fakes = {name: 'Dummy.path' for name in settings_names} def _mock_conf(key, *args): return fakes[key] mockconf.objects.conf = _mock_conf self.server.update_defaults() for m in (mockscr, mockobj, mockacc, mockchan): m.objects.filter.assert_called() _settings(TEST_ENVIRONMENT=True) def test_initial_setup(self): from evennia.utils.create import create_account acct = create_account('TestSuperuser', '', 'testpassword', is_superuser=True) with patch.multiple('evennia.server.initial_setup', reset_server=DEFAULT, AccountDB=DEFAULT) as mocks: mocks['AccountDB'].objects.get = MagicMock(return_value=acct) self.server.run_initial_setup() acct.delete() _settings(TEST_ENVIRONMENT=True) def test_initial_setup_retry(self): from evennia.utils.create import create_account acct = create_account('TestSuperuser2', '', 'testpassword', is_superuser=True) with patch.multiple('evennia.server.initial_setup', ServerConfig=DEFAULT, reset_server=DEFAULT, AccountDB=DEFAULT) as mocks: mocks['AccountDB'].objects.get = MagicMock(return_value=acct) mocks['ServerConfig'].objects.conf = MagicMock(return_value=4) self.server.run_initial_setup() acct.delete() def test_get_info_dict(self): with patch.object(self.server, 'get_info_dict', return_value={'test': 'foo'}) as mocks: self.assertEqual(self.server.get_info_dict(), {'test': 'foo'})
def summarise_projects(illumina_data): summary = '' project_summaries = [] if illumina_data.paired_end: summary = 'Paired end: ' for project in illumina_data.projects: n_samples = len(project.samples) project_summaries.append(('%s (%d sample%s)' % (project.name, n_samples, ('s' if (n_samples != 1) else '')))) summary += '; '.join(project_summaries) return summary
class HyperlinkedIdentityField(HyperlinkedRelatedField): def __init__(self, view_name=None, **kwargs): assert (view_name is not None), 'The `view_name` argument is required.' kwargs['read_only'] = True kwargs['source'] = '*' super().__init__(view_name, **kwargs) def use_pk_only_optimization(self): return False
def get_fake_device(): context_dict = import_by_filename('default_context.py').CONTEXT context_dict['output_directory'] = os.path.expanduser('~/temp') context = TestRunContext(context_dict) device = LocalhostDiscoverer(context, {}).discover_device({'device_id': 'localhost'}) return device
class TestSolidRunDefinition(unittest.TestCase): def setUp(self): self.tmp_defn_file = TestUtils().make_run_definition_file() self.run_defn = SolidRunDefinition(self.tmp_defn_file) def tearDown(self): os.remove(self.tmp_defn_file) def test_solid_run_definition(self): self.assertTrue(isinstance(self.run_defn, SolidRunDefinition)) self.assertTrue(self.run_defn) def test_nsamples(self): self.assertEqual(12, self.run_defn.nSamples()) def test_attributes(self): self.assertEqual(self.run_defn.version, 'v0.0') self.assertEqual(self.run_defn.userId, 'user') self.assertEqual(self.run_defn.runType, 'FRAGMENT') self.assertEqual(self.run_defn.isMultiplexing, 'TRUE') self.assertEqual(self.run_defn.runName, 'solid0123__FRAG_BC_2') self.assertEqual(self.run_defn.runDesc, '') self.assertEqual(self.run_defn.mask, '1_spot_mask_sf') self.assertEqual(self.run_defn.protocol, 'SOLiD4 Multiplex') def test_fields(self): self.assertEqual(['sampleName', 'sampleDesc', 'spotAssignments', 'primarySetting', 'library', 'application', 'secondaryAnalysis', 'multiplexingSeries', 'barcodes'], self.run_defn.fields()) def test_get_data_item(self): self.assertEqual('AB_CD_EF_pool', self.run_defn.getDataItem('sampleName', 0)) self.assertEqual('CD_UV5', self.run_defn.getDataItem('library', 0)) self.assertEqual('mm9', self.run_defn.getDataItem('secondaryAnalysis', 0)) self.assertEqual('AB_CD_EF_pool', self.run_defn.getDataItem('sampleName', 4)) self.assertEqual('EF12', self.run_defn.getDataItem('library', 4)) self.assertEqual('dm5', self.run_defn.getDataItem('secondaryAnalysis', 4)) self.assertRaises(IndexError, self.run_defn.getDataItem, 'sampleName', 12) self.assertEqual(None, self.run_defn.getDataItem('tertiaryAnalysis', 0)) self.assertEqual(None, self.run_defn.getDataItem('tertiaryAnalysis', 12)) def test_nonexistent_run_definition_file(self): run_defn = SolidRunDefinition('i_dont_exist') self.assertFalse(run_defn)
class TestMemPhi(): def test_string(self): i = MemPhi(mem6, [mem3, mem4, mem5]) assert (str(i) == 'mem#6 = (mem#3,mem#4,mem#5)') def test_substitute_does_nothing(self): i = MemPhi(mem6, [mem3, mem4, mem5]) i.substitute(mem3, a) assert (str(i) == 'mem#6 = (mem#3,mem#4,mem#5)') def test_create_phi_for_variables(self): i = MemPhi(mem6, [mem3, mem4, mem5]) phis = i.create_phi_functions_for_variables({a, b, c}) ssa_labels = (6, 3, 4, 5) (a6, a3, a4, a5) = (Variable(a.name, a.type, i, is_aliased=True) for i in ssa_labels) (b6, b3, b4, b5) = (Variable(b.name, b.type, i, is_aliased=True) for i in ssa_labels) (c6, c3, c4, c5) = (Variable(c.name, c.type, i, is_aliased=True) for i in ssa_labels) (g6, g3, g4, g5) = (GlobalVariable('g', Integer.char(), i, initial_value=42) for i in ssa_labels) (g6_loc, g3_loc, g4_loc, g5_loc) = (Variable('g', Integer.char(), is_aliased=True) for i in ssa_labels) phi_a = Phi(a6, [a3, a4, a5]) phi_b = Phi(b6, [b3, b4, b5]) phi_c = Phi(c6, [c3, c4, c5]) g = GlobalVariable('g', Integer.char(), initial_value=42) phi_g = Phi(g6, [g3, g4, g5]) phi_g_loc = Phi(g6_loc, [g3_loc, g4_loc, g5_loc]) assert (set(phis) == {phi_a, phi_b, phi_c}) i2 = MemPhi(mem6, [mem3, mem4, mem5]) phis = i2.create_phi_functions_for_variables(set()) assert (phis == []) i3 = MemPhi(mem6, [mem3, mem4, mem5]) phis = i3.create_phi_functions_for_variables({a, b}) assert (set(phis) == {phi_a, phi_b}) i4 = MemPhi(mem6, [mem3, mem4, mem5]) phis = i4.create_phi_functions_for_variables(set()) assert (phis == []) i4 = MemPhi(mem6, [mem3, mem4, mem5]) phis = i4.create_phi_functions_for_variables({b, c}) assert (set(phis) == {phi_b, phi_c}) i5 = MemPhi(mem6, [mem3, mem4, mem5]) phis = i5.create_phi_functions_for_variables({g}) assert (set(phis) == {phi_g}) assert (set(phis) != {phi_g_loc})
class HexSurface(SuperEnum): __keys__ = ['id', 'specific_heat', 'albedo'] water_fresh = (1, 1.0, 0.0) water_sea = (2, 0.94, 0.0) granite = (3, 0.19, 0.0) basalt = (4, 0.2, 0.0) soil_wet = (5, 0.35, 0.0) soil_dry = (6, 0.19, 0.0) soil_barren = (7, 0.1, 0.0) ice_warm = (8, 0.5, 0.0) ice_cold = (9, 0.4, 0.0)
class OptionSeriesPieDatalabels(Options): def alignTo(self): return self._config_get(None) def alignTo(self, text: str): self._config(text, js_type=False) def animation(self) -> 'OptionSeriesPieDatalabelsAnimation': return self._config_sub_data('animation', OptionSeriesPieDatalabelsAnimation) def backgroundColor(self): return self._config_get(None) def backgroundColor(self, text: str): self._config(text, js_type=False) def borderColor(self): return self._config_get(None) def borderColor(self, text: str): self._config(text, js_type=False) def borderRadius(self): return self._config_get(0) def borderRadius(self, num: float): self._config(num, js_type=False) def borderWidth(self): return self._config_get(0) def borderWidth(self, num: float): self._config(num, js_type=False) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def connectorColor(self): return self._config_get(None) def connectorColor(self, text: str): self._config(text, js_type=False) def connectorPadding(self): return self._config_get(5) def connectorPadding(self, num: float): self._config(num, js_type=False) def connectorShape(self): return self._config_get('crookedLine') def connectorShape(self, text: str): self._config(text, js_type=False) def connectorWidth(self): return self._config_get(1) def connectorWidth(self, num: float): self._config(num, js_type=False) def crookDistance(self): return self._config_get('undefined') def crookDistance(self, text: str): self._config(text, js_type=False) def crop(self): return self._config_get(True) def crop(self, flag: bool): self._config(flag, js_type=False) def defer(self): return self._config_get(True) def defer(self, flag: bool): self._config(flag, js_type=False) def distance(self): return self._config_get(30) def distance(self, num: float): self._config(num, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def filter(self) -> 'OptionSeriesPieDatalabelsFilter': return self._config_sub_data('filter', OptionSeriesPieDatalabelsFilter) def format(self): return self._config_get('undefined') def format(self, text: str): self._config(text, js_type=False) def formatter(self): return self._config_get('function () { return this.point.isNull ? void 0 : this.point.name; }') def formatter(self, text: str): self._config(text, js_type=True) def nullFormat(self): return self._config_get(None) def nullFormat(self, flag: bool): self._config(flag, js_type=False) def nullFormatter(self): return self._config_get(None) def nullFormatter(self, value: Any): self._config(value, js_type=False) def overflow(self): return self._config_get('justify') def overflow(self, text: str): self._config(text, js_type=False) def padding(self): return self._config_get(5) def padding(self, num: float): self._config(num, js_type=False) def position(self): return self._config_get('center') def position(self, text: str): self._config(text, js_type=False) def rotation(self): return self._config_get(0) def rotation(self, num: float): self._config(num, js_type=False) def shadow(self): return self._config_get(False) def shadow(self, flag: bool): self._config(flag, js_type=False) def shape(self): return self._config_get('square') def shape(self, text: str): self._config(text, js_type=False) def softConnector(self): return self._config_get(True) def softConnector(self, flag: bool): self._config(flag, js_type=False) def style(self): return self._config_get(None) def style(self, value: Any): self._config(value, js_type=False) def textPath(self) -> 'OptionSeriesPieDatalabelsTextpath': return self._config_sub_data('textPath', OptionSeriesPieDatalabelsTextpath) def useHTML(self): return self._config_get(False) def useHTML(self, flag: bool): self._config(flag, js_type=False) def verticalAlign(self): return self._config_get('bottom') def verticalAlign(self, text: str): self._config(text, js_type=False) def x(self): return self._config_get(0) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(0) def y(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(6) def zIndex(self, num: float): self._config(num, js_type=False)
class FaucetMetrics(PromClient): _dpid_counters = None _dpid_gauges = None def __init__(self, reg=None): super().__init__(reg=reg) self.port_required_labels = (self.REQUIRED_LABELS + ['port', 'port_description']) self._dpid_counters = {} self._dpid_gauges = {} self.ryu_config = self._gauge('ryu_config', 'ryu configuration option', ['param']) self.faucet_stack_root_dpid = self._gauge('faucet_stack_root_dpid', 'set to current stack root DPID', []) self.faucet_config_reload_requests = self._counter('faucet_config_reload_requests', 'number of config reload requests', []) self.faucet_config_load_error = self._gauge('faucet_config_load_error', '1 if last attempt to re/load config failed', []) self.faucet_config_hash = self._info('faucet_config_hash', 'file hashes for last successful config') self.faucet_config_hash_func = self._gauge('faucet_config_hash_func', 'algorithm used to compute config hashes', ['algorithm']) self.faucet_config_applied = self._gauge('faucet_config_applied', 'fraction of DPs that we have tried to apply config to', []) self.faucet_event_id = self._gauge('faucet_event_id', 'highest/most recent event ID to be sent', []) self.faucet_config_reload_warm = self._dpid_counter('faucet_config_reload_warm', 'number of warm, differences only config reloads executed') self.faucet_config_reload_cold = self._dpid_counter('faucet_config_reload_cold', 'number of cold, complete reprovision config reloads executed') self.of_ignored_packet_ins = self._dpid_counter('of_ignored_packet_ins', 'number of OF packet_ins received but ignored from DP (due to rate limiting)') self.of_unexpected_packet_ins = self._dpid_counter('of_unexpected_packet_ins', 'number of OF packet_ins received that are unexpected from DP (e.g. for unknown VLAN)') self.of_packet_ins = self._dpid_counter('of_packet_ins', 'number of OF packet_ins received from DP') self.of_non_vlan_packet_ins = self._dpid_counter('of_non_vlan_packet_ins', 'number of OF packet_ins received from DP, not associated with a FAUCET VLAN') self.of_vlan_packet_ins = self._dpid_counter('of_vlan_packet_ins', 'number of OF packet_ins received from DP, associated with a FAUCET VLAN') self.of_flowmsgs_sent = self._dpid_counter('of_flowmsgs_sent', 'number of OF flow messages (and packet outs) sent to DP') self.of_errors = self._dpid_counter('of_errors', 'number of OF errors received from DP') self.of_dp_connections = self._dpid_counter('of_dp_connections', 'number of OF connections from a DP') self.of_dp_disconnections = self._dpid_counter('of_dp_disconnections', 'number of OF connections from a DP') self.vlan_hosts_learned = self._gauge('vlan_hosts_learned', 'number of hosts learned on a VLAN', (self.REQUIRED_LABELS + ['vlan'])) self.port_vlan_hosts_learned = self._gauge('port_vlan_hosts_learned', 'number of hosts learned on a port and VLAN', (self.port_required_labels + ['vlan'])) self.vlan_neighbors = self._gauge('vlan_neighbors', 'number of L3 neighbors on a VLAN (whether resolved to L2 addresses, or not)', (self.REQUIRED_LABELS + ['vlan', 'ipv'])) self.vlan_learn_bans = self._gauge('vlan_learn_bans', 'number of times learning was banned on a VLAN', (self.REQUIRED_LABELS + ['vlan'])) self.faucet_config_table_names = self._gauge('faucet_config_table_names', 'number to names map of FAUCET pipeline tables', (self.REQUIRED_LABELS + ['table_name', 'next_tables'])) self.faucet_packet_in_secs = self._histogram('faucet_packet_in_secs', 'FAUCET packet in processing time', self.REQUIRED_LABELS, (0.0001, 0.001, 0.01, 0.1, 1)) self.faucet_valve_service_secs = self._histogram('faucet_valve_service_secs', 'FAUCET valve service processing time', (self.REQUIRED_LABELS + ['valve_service']), (0.0001, 0.001, 0.01, 0.1, 1)) self.bgp_neighbor_uptime_seconds = self._gauge('bgp_neighbor_uptime', 'BGP neighbor uptime in seconds', (self.REQUIRED_LABELS + ['vlan', 'neighbor'])) self.bgp_neighbor_routes = self._gauge('bgp_neighbor_routes', 'BGP neighbor route count', (self.REQUIRED_LABELS + ['vlan', 'neighbor', 'ipv'])) self.learned_macs = self._gauge('learned_macs', 'MAC address stored as 64bit number to DP ID, port, VLAN, and n (discrete index)', (self.port_required_labels + ['vlan', 'n'])) self.port_status = self._gauge('port_status', 'status of switch ports', self.port_required_labels) self.port_stack_state = self._gauge('port_stack_state', 'state of stacking on a port', self.port_required_labels) self.port_learn_bans = self._gauge('port_learn_bans', 'number of times learning was banned on a port', self.port_required_labels) self.learned_l2_port = self._gauge('learned_l2_port', 'learned port of l2 entries', (self.REQUIRED_LABELS + ['vid', 'eth_src'])) self.port_lacp_role = self._gauge('port_lacp_role', 'LACP role of a port', self.port_required_labels) self.port_lacp_state = self._gauge('port_lacp_state', 'state of LACP on a port', self.port_required_labels) self.dp_status = self._dpid_gauge('dp_status', 'status of datapaths') self.dp_root_hop_port = self._gauge('dp_root_hop_port', 'port that leads to stack root DP', self.REQUIRED_LABELS) self.of_dp_desc_stats = self._gauge('of_dp_desc_stats', 'DP description (OFPDescStatsReply)', (self.REQUIRED_LABELS + ['mfr_desc', 'hw_desc', 'sw_desc', 'serial_num', 'dp_desc'])) self.stack_cabling_errors = self._dpid_counter('stack_cabling_errors', 'number of cabling errors detected in all FAUCET stacks') self.stack_probes_received = self._dpid_counter('stack_probes_received', 'number of stacking messages received') self.is_dp_stack_root = self._dpid_gauge('is_dp_stack_root', 'bool indicating if dp is stack root') self.dp_dot1x_success = self._dpid_counter('dp_dot1x_success', 'number of successful authentications on dp') self.dp_dot1x_failure = self._dpid_counter('dp_dot1x_failure', 'number of authentications attempts failed on dp') self.dp_dot1x_logoff = self._dpid_counter('dp_dot1x_logoff', 'number of eap-logoff events on dp') self.port_dot1x_success = self._counter('port_dot1x_success', 'number of successful authentications on port', self.port_required_labels) self.port_dot1x_failure = self._counter('port_dot1x_failure', 'number of authentications attempts failed on port', self.port_required_labels) self.port_dot1x_logoff = self._counter('port_dot1x_logoff', 'number of eap-logoff events on port', self.port_required_labels) self.lacp_port_id = self._gauge('lacp_port_id', 'lacp port ID for for port', self.port_required_labels) self.port_stack_state_change_count = self._counter('port_stack_state_change_count', 'number of changes in port stack state', self.port_required_labels) self.port_lacp_state_change_count = self._counter('port_lacp_state_change_count', 'number of changes in port lacp state', self.port_required_labels) self.stack_root_change_count = self._counter('stack_root_change_count', 'number of changes in stack root', []) def _counter(self, var, var_help, labels): return Counter(var, var_help, labels, registry=self._reg) def _gauge(self, var, var_help, labels): return PromGauge(var, var_help, labels, registry=self._reg) def _info(self, var, var_help): return Info(var, var_help, registry=self._reg) def _histogram(self, var, var_help, labels, buckets): return Histogram(var, var_help, labels, buckets=buckets, registry=self._reg) def _dpid_counter(self, var, var_help): counter = self._counter(var, var_help, self.REQUIRED_LABELS) self._dpid_counters[var] = counter return counter def _dpid_gauge(self, var, var_help): gauge = self._gauge(var, var_help, self.REQUIRED_LABELS) self._dpid_gauges[var] = gauge return gauge def reset_dpid(self, dp_labels): for counter in self._dpid_counters.values(): counter.labels(**dp_labels).inc(0) for gauge in self._dpid_gauges.values(): gauge.labels(**dp_labels).set(0) def inc_var(self, var, labels, val=1): assert (labels is not None) metrics_var = getattr(self, var) metrics_var.labels(**labels).inc(val)
def _parse_raw_output(raw_output, temp_dir): output = [] for line in raw_output.splitlines(): if line.endswith(' FOUND'): m = {} splitted_info = line.split(' ') m['file'] = splitted_info[0][:(- 1)].replace(temp_dir, '') m['malware'] = splitted_info[1] output.append(m) return output