code
stringlengths
281
23.7M
def load_models(app_config: AppConfig, app_context: AppContext, fulltext_processor_config: FullTextProcessorConfig) -> FullTextModels: segmentation_model = load_model(app_config, app_context, 'segmentation', SegmentationModel) header_model = load_model(app_config, app_context, 'header', HeaderModel) name_header_model = load_model(app_config, app_context, 'name_header', NameModel) name_citation_model = load_model(app_config, app_context, 'name_citation', NameModel) affiliation_address_model = load_model(app_config, app_context, 'affiliation_address', AffiliationAddressModel) fulltext_model = load_model(app_config, app_context, 'fulltext', FullTextModel) figure_model = load_model(app_config, app_context, 'figure', FigureModel) table_model = load_model(app_config, app_context, 'table', TableModel) reference_segmenter_model = load_model(app_config, app_context, 'reference_segmenter', ReferenceSegmenterModel) citation_model = load_model(app_config, app_context, 'citation', CitationModel) cv_model = get_cv_model_for_app_config(app_config, enabled=fulltext_processor_config.use_cv_model) ocr_model = get_ocr_model_for_app_config(app_config, enabled=fulltext_processor_config.use_ocr_model) return FullTextModels(segmentation_model=segmentation_model, header_model=header_model, name_header_model=name_header_model, name_citation_model=name_citation_model, affiliation_address_model=affiliation_address_model, fulltext_model=fulltext_model, figure_model=figure_model, table_model=table_model, reference_segmenter_model=reference_segmenter_model, citation_model=citation_model, cv_model=cv_model, ocr_model=ocr_model)
class Gpu_nvidia(Gpu_interface): primeoffload: bool def __init__(self, os, model): super().__init__(os, GPU_VENDOR, model) _interface.model.setter def model(self, value: str): self._model = value def get_temp(self): if (self.os == 'windows'): raise NotImplementedError("Temperature report for Nvidia GPU's is not supported on Windows yet.") elif (self.os == 'macos'): raise NotImplementedError("Temperature report for Nvidia GPU's is not supported on MacOS yet.") elif (self.os == 'linux'): try: return exec_bash("nvidia-smi -q | awk '/GPU Current Temp/{print $5}' | sed 's/^/[/;s/$/C]/'") except BashError: pass else: raise NotImplementedError('Unknown OS, no GPU temperature report.') def check_primeoffload(self): self.primeoffload = False try: self.primeoffload = exec_bash('xrandr --listproviders | grep -o "NVIDIA-0"') return True except BashError: return False
class ChatHistoryStorageOperator(MapOperator[(ChatContext, ChatContext)]): def __init__(self, history: BaseChatHistoryMemory=None, **kwargs): super().__init__(**kwargs) self._history = history async def map(self, input_value: ChatContext) -> ChatContext: if self._history: return self._history chat_history_fac = ChatHistory() input_value.history_storage = chat_history_fac.get_store_instance(input_value.chat_session_id) return input_value
.xfail(reason='Need to properly add authorization as of 8/10/2022') def test_pm_release_package(empty_sol_registry, w3): w3.pm.registry = empty_sol_registry w3.pm.release_package('escrow', '1.0.0', 'ipfs://QmTpYHEog4yfmgx5GgvNCRQyDeQyBD4FWxTkiUP64AH1QC') w3.pm.release_package('owned', '1.0.0', 'ipfs://QmcxvhkJJVpbxEAa6cgW3B6XwPJb79w9GpNUv2P2THUzZR') release_id_1 = w3.pm.get_release_id('escrow', '1.0.0') release_id_2 = w3.pm.get_release_id('owned', '1.0.0') package_data_1 = w3.pm.get_release_id_data(release_id_1) package_data_2 = w3.pm.get_release_id_data(release_id_2) assert (package_data_1[0] == 'escrow') assert (package_data_1[1] == '1.0.0') assert (package_data_1[2] == 'ipfs://QmTpYHEog4yfmgx5GgvNCRQyDeQyBD4FWxTkiUP64AH1QC') assert (package_data_2[0] == 'owned') assert (package_data_2[1] == '1.0.0') assert (package_data_2[2] == 'ipfs://QmcxvhkJJVpbxEAa6cgW3B6XwPJb79w9GpNUv2P2THUzZR')
.longrun .parametrize('target_reward, hydra_overrides', trainings) def test_train(hydra_overrides: Dict[(str, str)], target_reward: float): with Timeout(seconds=300): run_maze_job(hydra_overrides, config_module='maze.conf', config_name='conf_train') tf_summary_files = glob.glob('*events.out.tfevents*') assert (len(tf_summary_files) == 1), f'expected exactly 1 tensorflow summary file {tf_summary_files}' events_df = tensorboard_to_pandas(tf_summary_files[0]) max_mean_reward = np.nanmax(np.asarray(events_df.loc['train_BaseEnvEvents/reward/mean'])) assert (max_mean_reward >= target_reward)
class AITSimpleModel(nn.Module): def __init__(self, hidden, eps: float=1e-05): super().__init__() self.dense1 = nn.Linear(hidden, (4 * hidden), specialization='fast_gelu') self.dense2 = nn.Linear((4 * hidden), hidden) self.layernorm = nn.LayerNorm(hidden, eps=eps) def forward(self, input): hidden_states = self.dense1(input) hidden_states = self.dense2(hidden_states) hidden_states = (hidden_states + input) hidden_states = self.layernorm(hidden_states) return hidden_states
.integrationtest .skipif((pymongo.version_tuple < (2, 7)), reason='New in 2.7') .skipif((pymongo.version_tuple >= (4, 0)), reason='Removed in 4.0') def test_bulk_execute(instrument, elasticapm_client, mongo_database): elasticapm_client.begin_transaction('transaction.test') bulk = mongo_database.test_bulk.initialize_ordered_bulk_op() bulk.insert({'x': 'y'}) bulk.insert({'z': 'x'}) bulk.find({'x': 'y'}).replace_one({'x': 'z'}) bulk.execute() elasticapm_client.end_transaction('transaction.test') transactions = elasticapm_client.events[TRANSACTION] span = _get_pymongo_span(elasticapm_client.spans_for_transaction(transactions[0])) assert (span['type'] == 'db') assert (span['subtype'] == 'mongodb') assert (span['action'] == 'query') assert (span['name'] == 'elasticapm_test.test_bulk.bulk.execute')
def sqrt_c_c(gen, t, srcs): xnonzero = gen.symbols.newLabel() done = gen.symbols.newLabel() dst_re = gen.newTemp(Float) dst_im = gen.newTemp(Float) gen.emit_cjump(srcs[0].re, xnonzero) temp = sqrt_f_f(gen, t, [abs_f_f(gen, t, [gen.emit_binop('/', [srcs[0].im, ConstFloatArg(2.0)], Float)])]) gen.emit_move(temp, dst_re) ypos = gen.emit_binop('>=', [srcs[0].im, ConstFloatArg(0.0)], Float) ygtzero = gen.symbols.newLabel() gen.emit_cjump(ypos, ygtzero) nt = neg_f_f(gen, t, [temp]) gen.emit_move(nt, temp) gen.emit_label(ygtzero) gen.emit_move(temp, dst_im) gen.emit_jump(done) gen.emit_label(xnonzero) temp = sqrt_f_f(gen, t, [gen.emit_binop('*', [ConstFloatArg(2.0), gen.emit_binop('+', [cabs_c_f(gen, t, [srcs[0]]), abs_f_f(gen, t, [srcs[0].re])], Float)], Float)]) u = gen.emit_binop('/', [temp, ConstFloatArg(2.0)], Float) xpos = gen.emit_binop('>', [srcs[0].re, ConstFloatArg(0.0)], Float) xgtzero = gen.symbols.newLabel() gen.emit_cjump(xpos, xgtzero) gen.emit_move(gen.emit_binop('/', [abs_f_f(gen, t, [srcs[0].im]), temp], Float), dst_re) ypos2 = gen.emit_binop('>=', [srcs[0].im, ConstFloatArg(0.0)], Float) ygtzero2 = gen.symbols.newLabel() gen.emit_cjump(ypos2, ygtzero2) gen.emit_move(neg_f_f(gen, t, [u]), dst_im) gen.emit_jump(done) gen.emit_label(ygtzero2) gen.emit_move(u, dst_im) gen.emit_jump(done) gen.emit_label(xgtzero) gen.emit_move(u, dst_re) gen.emit_move(gen.emit_binop('/', [srcs[0].im, temp], Float), dst_im) gen.emit_label(done) return ComplexArg(dst_re, dst_im)
class TestHCT(util.ColorAssertsPyTest): COLORS = [('red', 'color(--hct 27.41 113.36 53.237)'), ('orange', 'color(--hct 71.257 60.528 74.934)'), ('yellow', 'color(--hct 111.05 75.504 97.139)'), ('green', 'color(--hct 142.23 71.136 46.228)'), ('blue', 'color(--hct 282.76 87.228 32.301)'), ('indigo', 'color(--hct 310.96 60.765 20.47)'), ('violet', 'color(--hct 331.49 65.001 69.695)'), ('white', 'color(--hct 209.54 2.8716 100)'), ('gray', 'color(--hct 209.54 1.8977 53.585)'), ('black', 'color(--hct 209.55 0 0)'), ('color(--acescg 1 0 1)', 'color(--hct 342.46 146.18 63.808 / 1)'), ('color(--acescg 1 0 0)', 'color(--hct 25.381 186.54 58.758 / 1)'), ('color(--hct 270 30 100)', 'color(--hct 270 30 100)'), ('color(--hct 270 30 100 / 0.5)', 'color(--hct 270 30 100 / 0.5)'), ('color(--hct 50% 50% 50% / 50%)', 'color(--hct 180 72.5 50 / 0.5)'), ('color(--hct none none none / none)', 'color(--hct none none none / none)'), ('color(--hct 0% 0% 0%)', 'color(--hct 0 0 none)'), ('color(--hct 100% 100% 100%)', 'color(--hct 360 145 100 / 1)'), ('color(--hct -100% -100% -100%)', 'color(--hct -360 -145 -100 / 1)')] .parametrize('color1,color2', COLORS) def test_colors(self, color1, color2): self.assertColorEqual(Color(color1).convert('hct'), Color(color2))
('flytekitplugins.identity_aware_proxy.cli.id_token.fetch_id_token') ('keyring.get_password') ('keyring.set_password') def test_sa_id_token_no_token_in_keyring(kr_set_password, kr_get_password, mock_fetch_id_token): test_audience = 'test_audience' service_account_email = 'default' tmp_test_keyring_store = {} kr_get_password.side_effect = (lambda service, user: tmp_test_keyring_store.get(service, {}).get(user, None)) kr_set_password.side_effect = (lambda service, user, pwd: tmp_test_keyring_store.update({service: {user: pwd}})) mock_fetch_id_token.side_effect = (lambda _, aud: create_mock_token(aud, expires_in=timedelta(hours=1))) token = get_service_account_id_token(test_audience, service_account_email) assert (jwt.decode(token.encode('utf-8'), options={'verify_signature': False})['aud'] == test_audience) assert jwt.decode(token.encode('utf-8'), options={'verify_signature': False})['jti'].startswith('test_token') second_token = get_service_account_id_token(test_audience, service_account_email) assert (token == second_token)
class StarredFileRequest(DatClass): drive_id: str = None file_id: str = None starred: bool = True custom_index_key: str = None def __post_init__(self): if self.starred: self.custom_index_key = 'starred_yes' else: self.custom_index_key = '' super().__post_init__()
('validate-manifest', 'parse a manifest and validate that it is correct') class ValidateManifest(SubCmd): def run(self, args): try: ManifestParser(file_name=args.file_name) print('OK', file=sys.stderr) return 0 except Exception as exc: print(('ERROR: %s' % str(exc)), file=sys.stderr) return 1 def setup_parser(self, parser): parser.add_argument('file_name', help='path to the manifest file')
class TestProtectedRequest(): def test___init__(self): request = testing.DummyRequest() request.buildinfo = mock.MagicMock() request.db = mock.MagicMock() request.validated = mock.MagicMock() request.dontcopy = mock.MagicMock() pr = security.ProtectedRequest(request) for attr in ('db', 'registry', 'validated', 'buildinfo', 'identity'): assert (getattr(pr, attr) == getattr(request, attr)) assert isinstance(pr.errors, errors.Errors) assert (pr.real_request is request) assert (not hasattr(pr, 'dontcopy'))
.asyncio .workspace_host class TestUpdateUserField(): async def test_unauthorized(self, unauthorized_api_assertions: HTTPXResponseAssertion, test_client_api: test_data: TestData): user_field = test_data['user_fields']['given_name'] response = (await test_client_api.patch(f'/user-fields/{user_field.id}', json={})) unauthorized_api_assertions(response) .authenticated_admin async def test_not_existing(self, test_client_api: not_existing_uuid: uuid.UUID): response = (await test_client_api.patch(f'/user-fields/{not_existing_uuid}', json={})) assert (response.status_code == status.HTTP_404_NOT_FOUND) .parametrize('user_field_alias,default', [('phone_number_verified', 'INVALID_VALUE')]) .authenticated_admin async def test_invalid_default(self, user_field_alias: str, default: str, test_client_api: test_data: TestData): user_field = test_data['user_fields'][user_field_alias] response = (await test_client_api.patch(f'/user-fields/{user_field.id}', json={'configuration': {**user_field.configuration, 'default': default}})) assert (response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY) json = response.json() assert (json['detail'][0]['loc'] == ['body', 'configuration', 'default']) .authenticated_admin async def test_already_existing_slug(self, test_client_api: test_data: TestData): user_field = test_data['user_fields']['given_name'] response = (await test_client_api.patch(f'/user-fields/{user_field.id}', json={'slug': 'gender'})) assert (response.status_code == status.HTTP_400_BAD_REQUEST) json = response.json() assert (json['detail'] == APIErrorCode.USER_FIELD_SLUG_ALREADY_EXISTS) .authenticated_admin async def test_valid(self, test_client_api: test_data: TestData): user_field = test_data['user_fields']['given_name'] response = (await test_client_api.patch(f'/user-fields/{user_field.id}', json={'name': 'Updated name', 'slug': 'given_name'})) assert (response.status_code == status.HTTP_200_OK) json = response.json() assert (json['name'] == 'Updated name')
def test_is_valid_opcode_invalidates_bytes_after_PUSHXX_opcodes(): code_stream = CodeStream(b'\x02`\x02\x04') assert (code_stream.is_valid_opcode(0) is True) assert (code_stream.is_valid_opcode(1) is True) assert (code_stream.is_valid_opcode(2) is False) assert (code_stream.is_valid_opcode(3) is True) assert (code_stream.is_valid_opcode(4) is False)
.parametrize('method,expected', (('test_endpoint', 'value-a'), ('not_implemented', NotImplementedError))) def test_error_middleware(w3, method, expected): def _callback(method, params): return params[0] w3.middleware_onion.add(construct_error_generator_middleware({'test_endpoint': _callback})) if (isinstance(expected, type) and issubclass(expected, Exception)): with pytest.raises(expected): w3.manager.request_blocking(method, [expected]) else: with pytest.raises(ValueError) as err: w3.manager.request_blocking(method, [expected]) assert (expected in str(err))
def add_datafiles_tests(klass, regex, ofp, pyversion=3): for filename in test_data.list_files(): match = re.match(regex, filename) if (not match): continue def make_test(filename): def fn(self): test_datafile(filename, ofp, pyversion) return fn setattr(klass, ('test_' + os.path.splitext(filename)[0]), make_test(filename))
def file_is_empty(file_path: Path) -> bool: try: if file_path.is_symlink(): file_path = file_path.resolve() return (file_path.lstat().st_size == 0) except (FileNotFoundError, PermissionError, OSError): return False except Exception as exception: logging.error(f'Unexpected Exception: {type(exception)} {exception!s}')
def get_dataset_drift(drift_metrics: Dict[(str, ColumnDataDriftMetrics)], drift_share=0.5) -> DatasetDrift: number_of_drifted_columns = sum([(1 if drift.drift_detected else 0) for (_, drift) in drift_metrics.items()]) share_drifted_columns = (number_of_drifted_columns / len(drift_metrics)) dataset_drift = bool((share_drifted_columns >= drift_share)) return DatasetDrift(number_of_drifted_columns=number_of_drifted_columns, dataset_drift_score=share_drifted_columns, dataset_drift=dataset_drift)
class Dunofausto(Sprite): def __init__(self, torradas): super().__init__() self.image = load('images/dunofausto_small.png') self.rect = self.image.get_rect() self.torradas = torradas self.velocidade = 2 def tacar_torradas(self): if (len(self.torradas) < 15): self.torradas.add(Torrada(*self.rect.center)) def update(self): keys = pygame.key.get_pressed() torradas_fonte = fonte.render(f'Torradas: {(15 - len(self.torradas))}', True, (255, 255, 255)) superficie.blit(torradas_fonte, (20, 20)) if keys[pygame.K_LEFT]: self.rect.x -= self.velocidade if keys[pygame.K_RIGHT]: self.rect.x += self.velocidade if keys[pygame.K_UP]: self.rect.y -= self.velocidade if keys[pygame.K_DOWN]: self.rect.y += self.velocidade
def find_functions_by_identifier(contract_abi: ABI, w3: Union[('Web3', 'AsyncWeb3')], address: ChecksumAddress, callable_check: Callable[(..., Any)], function_type: Type[TContractFn]) -> List[TContractFn]: fns_abi = filter_by_type('function', contract_abi) return [function_type.factory(fn_abi['name'], w3=w3, contract_abi=contract_abi, address=address, function_identifier=fn_abi['name'], abi=fn_abi) for fn_abi in fns_abi if callable_check(fn_abi)]
def test_level_user_error(): with Image(width=100, height=100, pseudo='gradient:') as img: with raises(TypeError): img.level(black='NaN') with raises(TypeError): img.level(white='NaN') with raises(TypeError): img.level(gamma='NaN') with raises(ValueError): img.level(channel='404')
def test_operator_registry(valid_operator_registry): operator_config = {'name': 'my-operator', 'type': 'base'} operator = valid_operator_registry.get(operator_config) operator.resolve_properties(execution_context=ExecutionContext(None, {}), default_task_args={}, base_operator_loader=valid_operator_registry.get, preprocessor_loader=None) assert (operator.resolved_properties.values == {'dag': '<<dag>>', 'task_id': 'my_operator'})
class TerminusCheckin(AnswerBotCheckin): name = '' bot_username = 'EmbyPublicBot' bot_checkin_cmd = ['/cancel', '/checkin'] bot_text_ignore = ['', ''] bot_checked_keywords = [''] additional_auth = ['visual'] max_retries = 1 async def on_photo(self, message: Message): if message.reply_markup: clean = (lambda o: emoji.replace_emoji(o, '').replace(' ', '')) keys = [k for r in message.reply_markup.inline_keyboard for k in r] options = [k.text for k in keys] options_cleaned = [clean(o) for o in options] if (len(options) < 2): return for i in range(3): (result, by) = (await Link(self.client).visual(message.photo.file_id, options_cleaned)) if result: self.log.debug(f' ({by}) : {result}.') break else: self.log.warning(f', ({(i + 1)}/3).') result = options[options_cleaned.index(result)] try: (await message.click(result)) except RPCError: self.log.warning('.')
class TestStatusNameStability(unittest.TestCase): def _check_status_names(self, path: pathlib.Path) -> None: with open(path) as f: for line in f: line = line.strip() if ((not line.startswith('#')) and (len(line) > 0)): status_name = line try: PrivateComputationInstanceStatus(status_name) except ValueError: self.fail(f'Failed to find status name {status_name}') def test_status_name_stability(self) -> None: for fn in os.listdir(OUTPUT_DIR): if (os.path.isfile((OUTPUT_DIR / fn)) and fn.endswith(STATUS_NAME_SUFFIX)): with self.subTest(fn): self._check_status_names((OUTPUT_DIR / fn))
class DataProperties(): def __init__(self, context: dict): self._context = context def add(self, records, js_funcs: list=None, profile=None): data_id = len(self._context['sources']) self._context['sources'][data_id] = records self._context['schema'][data_id] = {'containers': {}, 'fncs': (js_funcs or []), 'profile': profile} return data_id def get_schema(self, data_id: int): return self._context['schema'][data_id] def get_records(self, data_id: int): return self._context['sources'][data_id]
.gui() def test_frame_count_progresses(monkeypatch, qtbot): counter_values = [] def mocked_paint_event(cls, event): counter_values.append(cls.counter) monkeypatch.setattr(LoadingIndicator, 'paintEvent', mocked_paint_event) window = QtWidgets.QMainWindow() indicator = LoadingIndicator(parent=window) qtbot.addWidget(window) assert (indicator.counter == 0) indicator.framerate = 120 window.show() indicator.show() qtbot.wait(500) assert counter_values assert (max(counter_values) == (indicator.dot_count - 1))
('deploy', cls=FandoghCommand) ('--image', help='The image name', default=None) ('--version', '-v', prompt='The image version', help='The image version you want to deploy') ('--name', prompt='Your service name', help='Choose a unique name for your service') ('--env', '-e', 'envs', help='Environment variables (format: VARIABLE_NAME=VARIABLE_VALUE)', multiple=True) ('--internal-port', '-m', 'internal_ports', help='Expose other ports internally', multiple=True) ('--hosts', '-h', 'hosts', help='Custom hosts that service should be accessible through', multiple=True) ('--port', '-p', 'port', help='The service port that will be exposed on port 80 to worldwide', default=80) ('--registry-secret', '-s', 'registry_secret', help='Name of the secret containing require credentials to access registry', default=None) ('--internal', help='This is an internal service like a DB and the port should not be exposed publicly', default=False, is_flag=True) ('--image-pull-policy', 'image_pull_policy', default='IfNotPresent') ('-d', 'detach', is_flag=True, default=False, help='detach terminal.') def deploy(image, version, name, port, envs, hosts, internal, registry_secret, image_pull_policy, internal_ports, detach): if (not image): image = get_project_config().get('image.name') if (not image): click.echo(format_text("You're not in a fandogh directory and didn't provide an image name using --image, please provide an image name, it could \nbe a full image name tagged with a repository or simply name of one of the images you already published.\nfor example:\n- myprivate.repository.com:5000/mynamespace/imagename <-- full image name\n- some-image-name <-- image you already published on fandogh\n- myusername/image-name <-- image from docker hub\n", TextStyle.OKBLUE)) image = click.prompt('Image name: ').strip() if (not image): click.echo(format_text("It's not possible to perform deploy operation withou image name", TextStyle.FAIL), err=True) sys.exit(301) deployment_result = deploy_service(image, version, name, envs, hosts, port, internal, registry_secret, image_pull_policy, internal_ports) if detach: message = '\nCongratulation, Your service is running ^_^\n\n' if (str(deployment_result['service_type']).lower() == 'external'): message += 'Your service is accessible using the following URLs:\n{}'.format('\n'.join([' - {}'.format(url) for url in deployment_result['urls']])) message += '\n' click.echo(message) else: message += "\n Since your service is internal, it's not accessible from outside your fandogh private network, \n but other services inside your private network will be able to find it using it's name: '{}'\n ".strip().format(deployment_result['name']) message += '\n' click.secho(message, bold=True, fg='yellow') else: while True: details = get_details(name) if (not details): sys.exit(302) click.clear() if (details.get('state') == 'RUNNING'): present_service_detail(details) message = '\nCongratulation, Your service is running ^_^\n\n' if (str(deployment_result['service_type']).lower() == 'external'): message += 'Your service is accessible using the following URLs:\n{}'.format('\n'.join([' - {}'.format(url) for url in deployment_result['urls']])) message += '\n' click.echo(message) else: message += "\n Since your service is internal, it's not accessible from outside your fandogh private network, \n but other services inside your private network will be able to find it using it's name: '{}'\n ".strip().format(deployment_result['name']) message += '\n' click.secho(message, bold=True, fg='yellow') sys.exit(0) elif (details.get('state') == 'UNSTABLE'): present_service_detail(details) click.echo('You can press ctrl + C to exit details service state monitoring') sleep(3) else: sys.exit(303)
.register_header_type(inet.IPPROTO_FRAGMENT) class fragment(header): TYPE = inet.IPPROTO_FRAGMENT _PACK_STR = '!BxHI' _MIN_LEN = struct.calcsize(_PACK_STR) def __init__(self, nxt=inet.IPPROTO_TCP, offset=0, more=0, id_=0): super(fragment, self).__init__(nxt) self.offset = offset self.more = more self.id_ = id_ def parser(cls, buf): (nxt, off_m, id_) = struct.unpack_from(cls._PACK_STR, buf) offset = (off_m >> 3) more = (off_m & 1) return cls(nxt, offset, more, id_) def serialize(self): off_m = ((self.offset << 3) | self.more) buf = struct.pack(self._PACK_STR, self.nxt, off_m, self.id_) return buf def __len__(self): return self._MIN_LEN
def highlight_genes(axis, genes, y_posn): ngenes = len(genes) text_size = ('small' if (ngenes <= 6) else 'x-small') if (ngenes <= 3): text_rot = 'horizontal' elif (ngenes <= 6): text_rot = 30 elif (ngenes <= 10): text_rot = 45 elif (ngenes <= 20): text_rot = 60 else: text_rot = 'vertical' for gene in genes: (gene_start, gene_end, gene_name) = gene axis.axvspan((gene_start * MB), (gene_end * MB), alpha=0.5, color=HIGHLIGHT_COLOR, zorder=(- 1)) axis.text(((0.5 * (gene_start + gene_end)) * MB), y_posn, gene_name, horizontalalignment='center', rotation=text_rot, size=text_size)
def test_read16(la: LogicAnalyzer, slave: SPISlave): la.capture(4, block=False) value = slave.read16() la.stop() (sck, sdo, cs, sdi) = la.fetch_data() sdi_initstate = la.get_initial_states()[SDI[0]] assert (len(cs) == (CS_START + CS_STOP)) assert (len(sck) == SCK_WRITE16) assert (len(sdo) == 0) assert verify_value(value, sck, sdi_initstate, sdi)
class TestApp(unittest.TestCase): module = browsepy generic_page_class = Page list_page_class = ListPage confirm_page_class = ConfirmPage page_exceptions = {404: Page404Exception, 400: Page400Exception, 302: Page302Exception, None: PageException} def setUp(self): self.app = self.module.app self.base = tempfile.mkdtemp() self.start = os.path.join(self.base, 'start') self.remove = os.path.join(self.base, 'remove') self.upload = os.path.join(self.base, 'upload') self.exclude = os.path.join(self.base, 'exclude') os.mkdir(self.start) os.mkdir(self.remove) os.mkdir(self.upload) os.mkdir(self.exclude) open(os.path.join(self.start, 'testfile.txt'), 'w').close() open(os.path.join(self.remove, 'testfile.txt'), 'w').close() open(os.path.join(self.exclude, 'testfile.txt'), 'w').close() def exclude_fnc(path): return ((path == self.exclude) or path.startswith((self.exclude + os.sep))) self.app.config.update(directory_base=self.base, directory_start=self.start, directory_remove=self.remove, directory_upload=self.upload, exclude_fnc=exclude_fnc, SERVER_NAME='test') self.base_directories = [self.url_for('browse', path='remove'), self.url_for('browse', path='start'), self.url_for('browse', path='upload')] self.start_files = [self.url_for('open', path='start/testfile.txt')] self.remove_files = [self.url_for('open', path='remove/testfile.txt')] self.upload_files = [] def clear(self, path): assert path.startswith((self.base + os.sep)), 'Cannot clear directories out of base' for sub in os.listdir(path): sub = os.path.join(path, sub) if os.path.isdir(sub): shutil.rmtree(sub) else: os.remove(sub) def tearDown(self): shutil.rmtree(self.base) test_utils.clear_flask_context() def get(self, endpoint, **kwargs): status_code = kwargs.pop('status_code', 200) follow_redirects = kwargs.pop('follow_redirects', False) if (endpoint in ('index', 'browse')): page_class = self.list_page_class elif (endpoint == 'remove'): page_class = self.confirm_page_class elif ((endpoint == 'sort') and follow_redirects): page_class = self.list_page_class else: page_class = self.generic_page_class with (kwargs.pop('client', None) or self.app.test_client()) as client: response = client.get(self.url_for(endpoint, **kwargs), follow_redirects=follow_redirects) if (response.status_code != status_code): raise self.page_exceptions.get(response.status_code, self.page_exceptions[None])(response.status_code) result = page_class.from_source(response.data, response) response.close() test_utils.clear_flask_context() return result def post(self, endpoint, **kwargs): status_code = kwargs.pop('status_code', 200) data = (kwargs.pop('data') if ('data' in kwargs) else {}) with (kwargs.pop('client', None) or self.app.test_client()) as client: response = client.post(self.url_for(endpoint, **kwargs), data=data, follow_redirects=True) if (response.status_code != status_code): raise self.page_exceptions.get(response.status_code, self.page_exceptions[None])(response.status_code) result = self.list_page_class.from_source(response.data, response) test_utils.clear_flask_context() return result def url_for(self, endpoint, **kwargs): with self.app.app_context(): return flask.url_for(endpoint, _external=False, **kwargs) def test_index(self): page = self.get('index') self.assertEqual(page.path, ('%s/start' % os.path.basename(self.base))) start = os.path.abspath(os.path.join(self.base, '..')) self.app.config['directory_start'] = start self.assertRaises(Page404Exception, self.get, 'index') self.app.config['directory_start'] = self.start def test_browse(self): basename = os.path.basename(self.base) page = self.get('browse') self.assertEqual(page.path, basename) self.assertEqual(page.directories, self.base_directories) self.assertFalse(page.removable) self.assertFalse(page.upload) page = self.get('browse', path='start') self.assertEqual(page.path, ('%s/start' % basename)) self.assertEqual(page.files, self.start_files) self.assertFalse(page.removable) self.assertFalse(page.upload) page = self.get('browse', path='remove') self.assertEqual(page.path, ('%s/remove' % basename)) self.assertEqual(page.files, self.remove_files) self.assertTrue(page.removable) self.assertFalse(page.upload) page = self.get('browse', path='upload') self.assertEqual(page.path, ('%s/upload' % basename)) self.assertEqual(page.files, self.upload_files) self.assertFalse(page.removable) self.assertTrue(page.upload) self.assertRaises(Page404Exception, self.get, 'browse', path='..') self.assertRaises(Page404Exception, self.get, 'browse', path='start/testfile.txt') self.assertRaises(Page404Exception, self.get, 'browse', path='exclude') self.app.config['directory_downloadable'] = True page = self.get('browse') self.assertTrue(page.tarfile) self.app.config['directory_downloadable'] = False page = self.get('browse') self.assertFalse(page.tarfile) def test_open(self): content = b'hello world' with open(os.path.join(self.start, 'testfile3.txt'), 'wb') as f: f.write(content) page = self.get('open', path='start/testfile3.txt') self.assertEqual(page.data, content) self.assertRaises(Page404Exception, self.get, 'open', path='../shall_not_pass.txt') def test_remove(self): open(os.path.join(self.remove, 'testfile2.txt'), 'w').close() page = self.get('remove', path='remove/testfile2.txt') self.assertEqual(page.name, 'testfile2.txt') self.assertEqual(page.path, 'remove/testfile2.txt') self.assertEqual(page.back, self.url_for('browse', path='remove')) basename = os.path.basename(self.base) page = self.post('remove', path='remove/testfile2.txt') self.assertEqual(page.path, ('%s/remove' % basename)) self.assertEqual(page.files, self.remove_files) os.mkdir(os.path.join(self.remove, 'directory')) page = self.post('remove', path='remove/directory') self.assertEqual(page.path, ('%s/remove' % basename)) self.assertEqual(page.files, self.remove_files) self.assertRaises(Page404Exception, self.get, 'remove', path='start/testfile.txt') self.assertRaises(Page404Exception, self.post, 'remove', path='start/testfile.txt') self.app.config['directory_remove'] = None self.assertRaises(Page404Exception, self.get, 'remove', path='remove/testfile.txt') self.app.config['directory_remove'] = self.remove self.assertRaises(Page404Exception, self.get, 'remove', path='../shall_not_pass.txt') self.assertRaises(Page404Exception, self.get, 'remove', path='exclude/testfile.txt') def test_download_file(self): binfile = os.path.join(self.base, 'testfile.bin') bindata = bytes(range(256)) with open(binfile, 'wb') as f: f.write(bindata) page = self.get('download_file', path='testfile.bin') os.remove(binfile) self.assertEqual(page.data, bindata) self.assertRaises(Page404Exception, self.get, 'download_file', path='../shall_not_pass.txt') self.assertRaises(Page404Exception, self.get, 'download_file', path='start') self.assertRaises(Page404Exception, self.get, 'download_file', path='exclude/testfile.txt') def test_download_directory(self): binfile = os.path.join(self.start, 'testfile.bin') excfile = os.path.join(self.start, 'testfile.exc') bindata = bytes(range(256)) exclude = self.app.config['exclude_fnc'] def tarball_files(path): page = self.get('download_directory', path=path) iodata = io.BytesIO(page.data) with tarfile.open('p.tgz', mode='r:gz', fileobj=iodata) as tgz: tgz_files = [member.name for member in tgz.getmembers() if member.name] tgz_files.sort() return tgz_files for path in (binfile, excfile): with open(path, 'wb') as f: f.write(bindata) self.app.config['exclude_fnc'] = None self.assertEqual(tarball_files('start'), [('testfile.%s' % x) for x in ('bin', 'exc', 'txt')]) self.app.config['exclude_fnc'] = (lambda p: p.endswith('.exc')) self.assertEqual(tarball_files('start'), [('testfile.%s' % x) for x in ('bin', 'txt')]) self.app.config['exclude_fnc'] = exclude self.assertRaises(Page404Exception, self.get, 'download_directory', path='../../shall_not_pass') self.assertRaises(Page404Exception, self.get, 'download_directory', path='exclude') def test_upload(self): def genbytesio(nbytes, encoding): c = (unichr if PY_LEGACY else chr) return io.BytesIO(''.join(map(c, range(nbytes))).encode(encoding)) files = {'testfile.txt': genbytesio(127, 'ascii'), 'testfile.bin': genbytesio(255, 'utf-8')} output = self.post('upload', path='upload', data={('file%d' % n): (data, name) for (n, (name, data)) in enumerate(files.items())}) expected_links = sorted((self.url_for('open', path=('upload/%s' % i)) for i in files)) self.assertEqual(sorted(output.files), expected_links) self.clear(self.upload) self.assertRaises(Page404Exception, self.post, 'upload', path='start', data={'file': (genbytesio(127, 'ascii'), 'testfile.txt')}) def test_upload_duplicate(self): c = (unichr if PY_LEGACY else chr) files = (('testfile.txt', 'something'), ('testfile.txt', 'something_new')) output = self.post('upload', path='upload', data={('file%d' % n): (io.BytesIO(data.encode('ascii')), name) for (n, (name, data)) in enumerate(files)}) self.assertEqual(len(files), len(output.files)) first_file_url = self.url_for('open', path=('upload/%s' % files[0][0])) self.assertIn(first_file_url, output.files) file_contents = [] for filename in os.listdir(self.upload): with open(os.path.join(self.upload, filename), 'r') as f: file_contents.append(f.read()) file_contents.sort() expected_file_contents = sorted((content for (filename, content) in files)) self.assertEqual(file_contents, expected_file_contents) self.clear(self.upload) def test_upload_restrictions(self): pathconf = browsepy.compat.pathconf(self.upload) maxname = pathconf['PC_NAME_MAX'] maxpath = pathconf['PC_PATH_MAX'] longname = ''.join((('a',) * maxname)) self.assertRaises(Page400Exception, self.post, 'upload', path='upload', data={'file': (io.BytesIO('test'.encode('ascii')), (longname + 'a'))}) subdirs = ([longname] * (((maxpath - len(self.upload)) + len(os.sep)) // (maxname + len(os.sep)))) longpath = os.path.join(self.upload, *subdirs) os.makedirs(longpath) self.assertRaises(Page400Exception, self.post, 'upload', path=('upload/' + '/'.join(subdirs)), data={'file': (io.BytesIO('test'.encode('ascii')), longname)}) self.assertRaises(Page400Exception, self.post, 'upload', path='upload', data={'file': (io.BytesIO('test'.encode('ascii')), '..')}) def test_sort(self): self.assertRaises(Page404Exception, self.get, 'sort', property='text', path='exclude') files = {'a.txt': 'aaa', 'b.png': 'aa', 'c.zip': 'a'} by_name = [self.url_for('open', path=name) for name in sorted(files)] by_name_desc = list(reversed(by_name)) by_type = [self.url_for('open', path=name) for name in sorted(files, key=(lambda x: mimetypes.guess_type(x)[0]))] by_type_desc = list(reversed(by_type)) by_size = [self.url_for('open', path=name) for name in sorted(files, key=(lambda x: len(files[x])))] by_size_desc = list(reversed(by_size)) for (name, content) in files.items(): path = os.path.join(self.base, name) with open(path, 'wb') as f: f.write(content.encode('ascii')) client = self.app.test_client() page = self.get('browse', client=client) self.assertListEqual(page.files, by_name) self.assertRaises(Page302Exception, self.get, 'sort', property='text', client=client) page = self.get('browse', client=client) self.assertListEqual(page.files, by_name) page = self.get('sort', property='-text', client=client, follow_redirects=True) self.assertListEqual(page.files, by_name_desc) page = self.get('sort', property='type', client=client, follow_redirects=True) self.assertListEqual(page.files, by_type) page = self.get('sort', property='-type', client=client, follow_redirects=True) self.assertListEqual(page.files, by_type_desc) page = self.get('sort', property='size', client=client, follow_redirects=True) self.assertListEqual(page.files, by_size) page = self.get('sort', property='-size', client=client, follow_redirects=True) self.assertListEqual(page.files, by_size_desc) page = self.get('sort', property='modified', client=client, follow_redirects=True) page = self.get('sort', property='-modified', client=client, follow_redirects=True) def test_sort_cookie_size(self): files = [(chr(i) * 150) for i in range(97, 123)] for name in files: path = os.path.join(self.base, name) os.mkdir(path) client = self.app.test_client() for name in files: page = self.get('sort', property='modified', path=name, client=client, status_code=302) for cookie in page.response.headers.getlist('set-cookie'): if cookie.startswith('browse-sorting='): self.assertLessEqual(len(cookie), 4000) def test_endpoints(self): with self.app.app_context(): self.assertIsInstance(self.module.sort(property='name', path='..'), NotFound) self.assertIsInstance(self.module.browse(path='..'), NotFound) self.assertIsInstance(self.module.open_file(path='../something'), NotFound) self.assertIsInstance(self.module.download_file(path='../something'), NotFound) self.assertIsInstance(self.module.download_directory(path='..'), NotFound) self.assertIsInstance(self.module.remove(path='../something'), NotFound) self.assertIsInstance(self.module.upload(path='..'), NotFound)
.parametrize('capacity', [1, 2, 4, 10]) def test_consume_multiple_tokens_at_a_time(capacity): rate = 100 num_tokens = capacity key = 'key' storage = token_bucket.MemoryStorage() limiter = token_bucket.Limiter(rate, capacity, storage) assert (not limiter.consume(key, num_tokens=(capacity + 1))) for __ in range(int(((rate * 5) / num_tokens))): assert limiter.consume(key, num_tokens=num_tokens) assert (storage.get_token_count(key) < 1.0) time.sleep(((1.0 / rate) * num_tokens))
def _strip_award_id(api_dict): field_mapping = {'broker_subaward_id': 'id', 'subaward_number': 'subaward_number', 'subaward_description': 'description', 'sub_action_date': 'action_date', 'subaward_amount': 'amount', 'sub_awardee_or_recipient_legal': 'recipient_name'} return {field_mapping[k]: v for (k, v) in api_dict.items() if (k not in ('award_id', 'unique_award_key'))}
(name='neighbor.delete', req_args=[neighbors.IP_ADDRESS]) def delete_neighbor(neigh_ip_address): neigh_conf = _get_neighbor_conf(neigh_ip_address) if neigh_conf: neigh_conf.enabled = False CORE_MANAGER.neighbors_conf.remove_neighbor_conf(neigh_ip_address) return True return False
class TestGetreadsFunction(unittest.TestCase): def setUp(self): self.wd = tempfile.mkdtemp() self.example_fastq_data = u':43:HL3LWBBXX:8:1101:21440:1121 1:N:0:CNATGT\nGCCNGACAGCAGAAAT\n+\nAAF#FJJJJJJJJJJJ\:43:HL3LWBBXX:8:1101:21460:1121 1:N:0:CNATGT\nGGGNGTCATTGATCAT\n+\nAAF#FJJJJJJJJJJJ\:43:HL3LWBBXX:8:1101:21805:1121 1:N:0:CNATGT\nCCCNACCCTTGCCTAC\n+\nAAF#FJJJJJJJJJJJ\n' self.example_csfasta_data = u'# Cwd: /home/pipeline\n# Title: solid0127__FRAG_BC_Run_56_pool_LC_CK\n>1_51_38_F3\nT3..3.213.12211.01..000..111...0\n>1_51_301_F3\nT0..3.222.21233.00..022..110...2\n>1_52_339_F3\nT1....2\n' self.example_qual_data = u'# Cwd: /home/pipeline\n# Title: solid0127__FRAG_BC_Run_56_pool_LC_CK\n>1_51_38_F3\n16 -1 -1 5 -1 24 15 12 -1 21 12 16 22 19 -1 26 13 -1 -1 4 21 4 -1 -1 4 7 9 -1 4 5 4 4 4 4 4 13 4 4 4 5 4 4 10 4 4 4 4 -1 -1 4 \n>1_51_301_F3\n22 -1 -1 4 -1 24 30 7 -1 4 9 26 6 16 -1 25 25 -1 -1 17 18 13 -1 -1 4 14 24 -1 4 14 17 32 4 7 13 13 22 4 12 19 4 24 6 9 8 4 4 -1 -1 9 \n>1_52_339_F3\n27 -1 33 24 28 32 29 17 25 27 26 30 30 31 -1 28 33 19 19 13 4 20 21 13 5 4 12 -1 4 23 13 8 4 10 4 6 5 7 4 8 4 8 12 5 12 10 8 7 -1 4\n' def tearDown(self): shutil.rmtree(self.wd) def test_getreads_fastq(self): example_fastq = os.path.join(self.wd, 'example.fastq') with io.open(example_fastq, 'wt') as fp: fp.write(self.example_fastq_data) fastq_reads = getreads(example_fastq) reference_reads = [self.example_fastq_data.split('\n')[i:(i + 4)] for i in range(0, len(self.example_fastq_data.split('\n')), 4)] for (r1, r2) in zip(reference_reads, fastq_reads): self.assertEqual(r1, r2) def test_getreads_gzipped_fastq(self): example_fastq = os.path.join(self.wd, 'example.fastq.gz') with gzip.open(example_fastq, 'wt') as fp: fp.write(self.example_fastq_data) fastq_reads = getreads(example_fastq) reference_reads = [self.example_fastq_data.split('\n')[i:(i + 4)] for i in range(0, len(self.example_fastq_data.split('\n')), 4)] for (r1, r2) in zip(reference_reads, fastq_reads): self.assertEqual(r1, r2) def test_getreads_csfasta(self): example_csfasta = os.path.join(self.wd, 'example.csfasta') with io.open(example_csfasta, 'wt') as fp: fp.write(self.example_csfasta_data) csfasta_reads = getreads(example_csfasta) reference_reads = [self.example_csfasta_data.split('\n')[i:(i + 2)] for i in range(2, len(self.example_fastq_data.split('\n')), 2)] for (r1, r2) in zip(reference_reads, csfasta_reads): self.assertEqual(r1, r2) def test_getreads_qual(self): example_qual = os.path.join(self.wd, 'example.qual') with io.open(example_qual, 'wt') as fp: fp.write(self.example_qual_data) qual_reads = getreads(example_qual) reference_reads = [self.example_qual_data.split('\n')[i:(i + 2)] for i in range(2, len(self.example_qual_data.split('\n')), 2)] for (r1, r2) in zip(reference_reads, qual_reads): self.assertEqual(r1, r2)
def remove_view_op_from_sorted_graph(op: Operator) -> None: if (op._attrs['op'] != 'reshape'): remove_single_tensor_op_from_sorted_graph(op) return input_tensor = op._attrs['inputs'][0] output_tensor = op._attrs['outputs'][0] input_tensor._attrs['dst_ops'].remove(op) input_tensor._attrs['dst_ops'].update(output_tensor._attrs['dst_ops']) for dst_op in output_tensor._attrs['dst_ops']: dst_op.replace_input_tensor(output_tensor, input_tensor) if output_tensor._attrs['is_output']: input_tensor._attrs['is_output'] = True input_tensor._attrs['name'] = output_tensor._attrs['name'] input_tensor._attrs['shape'] = output_tensor._attrs['shape'] for shape_tensor in set(op._attrs['inputs'][1:]): shape_tensor._attrs['dst_ops'].remove(op) remove_tensor_from_sorted_graph(output_tensor)
def test_saveload_lmer(df, tmp_path): model = Lmer('DV ~ IV3 + IV2 + (IV2|Group) + (1|IV3)', data=df) model.fit(summarize=False) output_file = (tmp_path / 'model.joblib') rds_file = (tmp_path / 'model.rds') save_model(model, output_file) assert output_file.exists() assert rds_file.exists() m = load_model(output_file) assert m.coefs.equals(model.coefs) assert m.data.equals(model.data)
class RepresentationGenerator(object): def __init__(self, version=None): local_session = LocalSession() self.logged_in_user = local_session.logged_in_user if (not self.logged_in_user): raise RuntimeError('Please login first!') from anima.dcc.mayaEnv import Maya self.maya_env = Maya() self.base_take_name = None self.version = version def get_local_root_nodes(cls): return [node for node in auxiliary.get_root_nodes() if (node.referenceFile() is None)] def get_latest_repr_version(self, take_name): from stalker import Version v = Version.query.filter((Version.task == self.version.task)).filter((Version.take_name == take_name)).filter((Version.is_published == True)).order_by(Version.version_number.desc()).first() if (v is None): v = Version(created_by=self.logged_in_user, task=self.version.task, take_name=take_name) v.is_published = True else: v.updated_by = self.logged_in_user return v def is_model_task(cls, task): task_type = task.type if task_type: if (task_type.name.lower() in ['model']): return True elif (task.name.lower() in ['model']): return True return False def is_look_dev_task(cls, task): task_type = task.type if task_type: if (task_type.name.lower() in ['look development']): return True elif (task.name.lower() in ['look_dev', 'lookdev', 'look dev']): return True return False def is_scene_assembly_task(cls, task): task_type = task.type if task_type: if (task_type.name.lower() in ['scene assembly']): return True elif (task.name.lower() in ['scene assembly']): return True return False def is_vegetation_task(cls, task): task_type = task.type if task_type: if (task_type.name.lower() in ['vegetation']): return True elif (task.name.lower() in ['vegetation']): return True return False def is_exterior_or_interior_task(cls, task): if (task.type and (task.type.name.lower() == 'layout')): parent = task.parent if (task.name.lower() == 'hires'): if (parent and parent.parent and parent.parent.type and (parent.parent.type.name.lower() in ['exterior', 'interior'])): return True elif (task.name.lower() == 'layout'): if (parent and parent.type and (parent.type.name.lower() in ['exterior', 'interior'])): return True return False def _validate_version(self, version): if (not version): raise RuntimeError('Please supply a valid Stalker Version object!') from stalker import Version if (not isinstance(version, Version)): raise TypeError('version should be a stalker.models.version.Version instance') r = Representation(version=version) self.base_take_name = r.get_base_take_name(version) if (not r.is_base()): raise RuntimeError(('This is not a Base take for this representation series, please open the base (%s) take!!!' % self.base_take_name)) return version def open_version(self, version=None): current_v = self.maya_env.get_current_version() if (current_v is not version): self.maya_env.open(version, force=True, skip_update_check=True, reference_depth=3) def make_unique(cls, filename, force=True): import uuid def generate_filename(): random_part = uuid.uuid4().hex[(- 4):] data = os.path.splitext(filename) return ('%s_%s%s' % (data[0], random_part, data[1])) if (not force): if os.path.exists(filename): new_filename = generate_filename() while os.path.exists(new_filename): new_filename = generate_filename() return new_filename else: return filename else: return generate_filename() def generate_all(self): self.generate_rs() def generate_bbox(self): self.version = self._validate_version(self.version) if (not os.path.exists(self.version.absolute_full_path)): raise RuntimeError(("Path doesn't exists: %s" % self.version.absolute_full_path)) self.open_version(self.version) task = self.version.task refs_with_no_bbox_repr = [] for ref in pm.listReferences(): if (ref.version and (not ref.has_repr('BBOX'))): refs_with_no_bbox_repr.append(ref) if len(refs_with_no_bbox_repr): raise RuntimeError(('Please generate the BBOX Representation of the references first!!!\n%s' % '\n'.join(map((lambda x: str(x.path)), refs_with_no_bbox_repr)))) if self.is_vegetation_task(task): for ref in pm.listReferences(): ref.load() pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons') all_children = [] for node in pfx_polygons_node.getChildren(): for child_node in node.getChildren(): all_children.append(child_node) auxiliary.replace_with_bbox(all_children) pm.delete('kks___vegetation_pfxStrokes') pm.delete('kks___vegetation_paintableGeos') elif self.is_scene_assembly_task(task): for ref in pm.listReferences(): ref.to_repr('BBOX') else: root_nodes = self.get_local_root_nodes() if len(root_nodes): all_children = [] for root_node in root_nodes: for child in root_node.getChildren(): all_children.append(child) auxiliary.replace_with_bbox(all_children) for ref in pm.listReferences(): ref.to_repr('BBOX') if self.is_exterior_or_interior_task(task): all_refs = pm.listReferences() while (len(all_refs) != 0): for ref in all_refs: if (not ref.isLoaded()): ref.load() ref.importContents() all_refs = pm.listReferences() take_name = ('%s%s%s' % (self.base_take_name, Representation.repr_separator, 'BBOX')) v = self.get_latest_repr_version(take_name) self.maya_env.save_as(v) pm.newFile(force=True) def generate_proxy(self): pass def generate_gpu(self): self.version = self._validate_version(self.version) if (not os.path.exists(self.version.absolute_full_path)): raise RuntimeError(("Path doesn't exists: %s" % self.version.absolute_full_path)) self.open_version(self.version) pm.loadPlugin('gpuCache') pm.loadPlugin('AbcExport') pm.loadPlugin('AbcImport') refs_with_no_gpu_repr = [] for ref in pm.listReferences(): if (ref.version and (not ref.has_repr('GPU'))): refs_with_no_gpu_repr.append(ref) if len(refs_with_no_gpu_repr): raise RuntimeError(('Please generate the GPU Representation of the references first!!!\n%s' % '\n'.join(map((lambda x: str(x.path)), refs_with_no_gpu_repr)))) for ref in pm.listReferences(): ref.unload() output_path = os.path.join(self.version.absolute_path, 'Outputs/alembic/').replace('\\', '/') gpu_command = 'gpuCache -startTime %(start_frame)s -endTime %(end_frame)s -optimize -optimizationThreshold 40000 -writeMaterials -dataFormat ogawa -directory "%(path)s" -fileName "%(filename)s" %(node)s;' if (pm.versions.current() < 201450): gpu_command = 'gpuCache -startTime %(start_frame)s -endTime %(end_frame)s -optimize -optimizationThreshold 40000 -writeMaterials -directory "%(path)s" -fileName "%(filename)s" %(node)s;' start_frame = end_frame = int(pm.currentTime(q=1)) if (not self.is_scene_assembly_task(self.version.task)): if self.is_vegetation_task(self.version.task): for ref in pm.listReferences(): ref.load() try: pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons') except pm.MayaNodeError: pfx_polygons_node = None try: pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons') pfx_polygons_node_children = pfx_polygons_node.getChildren() except pm.MayaNodeError: pfx_polygons_node_children = [] for node in pfx_polygons_node_children: for child_node in node.getChildren(): child_node_name = child_node.name().split('___')[(- 1)] child_node_shape = child_node.getShape() child_node_shape_name = None if child_node_shape: child_node_shape_name = child_node_shape.name() pm.select(child_node) temp_output_fullpath = tempfile.mktemp().replace('\\', '/') (temp_output_path, temp_output_filename) = os.path.split(temp_output_fullpath) output_filename = ('%s_%s' % (self.version.nice_name, child_node_name.split(':')[(- 1)].replace(':', '_').replace('|', '_'))) pm.mel.eval((gpu_command % {'start_frame': start_frame, 'end_frame': start_frame, 'node': child_node.fullPath(), 'path': temp_output_path, 'filename': temp_output_filename})) cache_file_full_path = os.path.join(output_path, (output_filename + '.abc')).replace('\\', '/') try: os.makedirs(os.path.dirname(cache_file_full_path)) except OSError: pass shutil.move((temp_output_fullpath + '.abc'), cache_file_full_path) rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) pm.delete(child_node) if os.path.exists(cache_file_full_path): gpu_node = pm.createNode('gpuCache') gpu_node_tra = gpu_node.getParent() pm.parent(gpu_node_tra, node) gpu_node_tra.rename(child_node_name) if (child_node_shape_name is not None): gpu_node.rename(child_node_shape_name) pm.xform(gpu_node_tra, ws=1, rp=rp) pm.xform(gpu_node_tra, ws=1, sp=sp) gpu_node.setAttr('cacheFileName', cache_file_full_path, type='string') else: print(('File not found!: %s' % cache_file_full_path)) try: pm.delete('kks___vegetation_pfxStrokes') except pm.MayaNodeError: pass try: pm.delete('kks___vegetation_paintableGeos') except pm.MayaNodeError: pass rs_proxy_lut = {} rs_proxy_meshes = pm.ls(type='RedshiftProxyMesh') for rs_proxy_mesh in rs_proxy_meshes: rs_proxy_mesh_file_path = rs_proxy_mesh.fileName.get() rs_proxy_mesh_file_name = os.path.basename(rs_proxy_mesh_file_path) cache_file_full_path = None if (rs_proxy_mesh_file_path in rs_proxy_lut): cache_file_full_path = rs_proxy_lut[rs_proxy_mesh_file_path] child_node = rs_proxy_mesh.outputs(type='mesh')[0] child_shape = child_node.getShape() root_node = child_node.getParent() child_name = child_node.name() child_shape_name = None if child_shape: child_shape_name = child_shape.name() tra = child_node.t.get() rot = child_node.r.get() sca = child_node.s.get() if (cache_file_full_path is None): temp_output_fullpath = tempfile.mktemp().replace('\\', '/') (temp_output_path, temp_output_filename) = os.path.split(temp_output_fullpath) output_filename = ('%s.abc' % os.path.splitext(os.path.basename(rs_proxy_mesh_file_name))[0]) pm.parent(child_node, world=1) child_node.t.set(0, 0, 0) child_node.r.set(0, 0, 0) child_node.s.set(1, 1, 1) rs_proxy_mesh.displayMode.set(1) pm.mel.eval((gpu_command % {'start_frame': start_frame, 'end_frame': end_frame, 'node': child_node.fullPath(), 'path': temp_output_path, 'filename': temp_output_filename})) pm.parent(child_node, root_node) child_node.t.set(tra) child_node.r.set(rot) child_node.s.set(sca) cache_file_full_path = os.path.join(output_path, output_filename).replace('\\', '/') rs_proxy_lut[rs_proxy_mesh_file_path] = cache_file_full_path try: os.makedirs(os.path.dirname(cache_file_full_path)) except OSError: pass shutil.move((temp_output_fullpath + '.abc'), cache_file_full_path) pm.delete(child_node) if os.path.exists(cache_file_full_path): gpu_node = pm.createNode('gpuCache') gpu_node_tra = gpu_node.getParent() pm.parent(gpu_node_tra, root_node) gpu_node_tra.rename(child_name) if (child_shape_name is not None): gpu_node.rename(child_shape_name) gpu_node_tra.t.set(tra) gpu_node_tra.r.set(rot) gpu_node_tra.s.set(sca) gpu_node.setAttr('cacheFileName', cache_file_full_path, type='string') else: root_nodes = self.get_local_root_nodes() if len(root_nodes): for root_node in root_nodes: for child_node in root_node.getChildren(): if (not isinstance(child_node, pm.nt.Transform)): continue if (not auxiliary.has_shape(child_node)): continue child_name = child_node.name() child_shape = child_node.getShape() child_shape_name = None if child_shape: child_shape_name = child_shape.name() child_full_path = child_node.fullPath()[1:].replace('|', '_') temp_output_fullpath = tempfile.mktemp().replace('\\', '/') (temp_output_path, temp_output_filename) = os.path.split(temp_output_fullpath) output_filename = ('%s_%s' % (self.version.nice_name, child_full_path)) for node in child_node.listRelatives(ad=1, type='mesh'): if (node.getAttr('aiSubdivType') != 0): node.displaySmoothMesh.set(2) node.smoothLevel.set(node.aiSubdivIterations.get()) pm.mel.eval((gpu_command % {'start_frame': start_frame, 'end_frame': end_frame, 'node': child_node.fullPath(), 'path': temp_output_path, 'filename': temp_output_filename})) cache_file_full_path = os.path.join(output_path, ('%s.abc' % output_filename)).replace('\\', '/') try: os.makedirs(os.path.dirname(cache_file_full_path)) except OSError: pass shutil.move((temp_output_fullpath + '.abc'), cache_file_full_path) rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) pm.delete(child_node) if os.path.exists(cache_file_full_path): gpu_node = pm.createNode('gpuCache') gpu_node_tra = gpu_node.getParent() pm.parent(gpu_node_tra, root_node) gpu_node_tra.rename(child_name) if (child_shape_name is not None): gpu_node.rename(child_shape_name) pm.xform(gpu_node_tra, ws=1, rp=rp) pm.xform(gpu_node_tra, ws=1, sp=sp) gpu_node.setAttr('cacheFileName', cache_file_full_path, type='string') logger.debug('converting all references to GPU') for ref in pm.listReferences(): ref.to_repr('GPU') ref.load() task = self.version.task is_exterior_or_interior_task = self.is_exterior_or_interior_task(task) if is_exterior_or_interior_task: logger.debug('importing all references') all_refs = pm.listReferences() while (len(all_refs) != 0): for ref in all_refs: if (not ref.isLoaded()): ref.load() ref.importContents() all_refs = pm.listReferences() pm.sets('initialShadingGroup', e=1, fe=auxiliary.get_root_nodes()) self.clean_up() take_name = ('%s%s%s' % (self.base_take_name, Representation.repr_separator, 'GPU')) v = self.get_latest_repr_version(take_name) self.maya_env.save_as(v) if is_exterior_or_interior_task: logger.debug('exporting root nodes') pm.select(auxiliary.get_root_nodes()) pm.exportSelected(v.absolute_full_path, type='mayaAscii', force=True) logger.debug('renewing scene') pm.newFile(force=True) def make_tx(self, texture_path): tile_path = texture_path orig_path_as_tx = ''.join([os.path.splitext(texture_path)[0], '.tx']) if ('<' in tile_path): tile_path = tile_path.replace('<U>', '*') tile_path = tile_path.replace('<V>', '*') tile_path = tile_path.replace('<UDIM>', '*') import glob files_to_process = glob.glob(tile_path) for tile_path in files_to_process: tx_path = ''.join([os.path.splitext(tile_path)[0], '.tx']) if (not os.path.exists(tx_path)): cmd = ('maketx -o "%s" -u --oiio %s' % (tx_path, tile_path)) if (os.name == 'nt'): proc = subprocess.Popen(cmd, creationflags=subprocess.SW_HIDE, shell=True) else: proc = subprocess.Popen(cmd, shell=True) proc.wait() return orig_path_as_tx def clean_up(cls): pm.mel.eval('MLdeleteUnused') logger.debug('deleting unknown references') delete_nodes_types = ['reference', 'unknown'] for node in pm.ls(type=delete_nodes_types): node.unlock() logger.debug('deleting "delete_nodes_types"') try: pm.delete(pm.ls(type=delete_nodes_types)) except RuntimeError: pass def generate_ass(self): pm.loadPlugin('mtoa') active_panel = auxiliary.Playblaster.get_active_panel() show_plugin_shapes = pm.modelEditor(active_panel, q=1, pluginShapes=1) pm.modelEditor(active_panel, e=1, pluginShapes=False) self.version = self._validate_version(self.version) if (not os.path.exists(self.version.absolute_full_path)): raise RuntimeError(("Path doesn't exists: %s" % self.version.absolute_full_path)) self.open_version(self.version) task = self.version.task export_command = 'arnoldExportAss -f "%(path)s" -s -mask 60-lightLinks 1 -compressed -boundingBox -shadowLinks 1 -cam perspShape;' output_path = os.path.join(self.version.absolute_path, 'Outputs/ass/').replace('\\', '/') refs_with_no_ass_repr = [] for ref in pm.listReferences(): if (ref.version and (not ref.has_repr('ASS'))): refs_with_no_ass_repr.append(ref) if len(refs_with_no_ass_repr): raise RuntimeError(('Please generate the ASS Representation of the references first!!!\n%s' % '\n'.join(map((lambda x: str(x.path)), refs_with_no_ass_repr)))) if self.is_look_dev_task(task): for ref in pm.listReferences(): v = ref.version load_ref = False if v: ref_task = v.task if self.is_model_task(ref_task): load_ref = True if load_ref: ref.load() ref.importContents() maya_version = int(pm.about(v=1)) if (maya_version == 2014): types_and_attrs = {'aiImage': 'filename', 'file': 'fileTextureName', 'imagePlane': 'imageName'} else: types_and_attrs = {'aiImage': 'filename', 'file': ('computedFileTextureNamePattern', 'fileTextureName'), 'imagePlane': 'imageName'} for node_type in types_and_attrs.keys(): attr_name = types_and_attrs[node_type] set_attr_name = attr_name if isinstance(attr_name, tuple): set_attr_name = attr_name[1] attr_name = attr_name[0] for node in pm.ls(type=node_type): orig_path = node.getAttr(attr_name).replace('\\', '/') path = re.sub('(\\$REPO[0-9/]+)', '', orig_path) tx_path = self.make_tx(path) inputs = node.attr(set_attr_name).inputs(p=1) if len(inputs): for input_node_attr in inputs: input_node_attr.set(tx_path) else: node.setAttr(set_attr_name, tx_path) all_render_related_nodes = [node for node in pm.ls() if (node.type() in RENDER_RELATED_NODE_TYPES)] for node in all_render_related_nodes: if ((node.referenceFile() is None) and (node.name() not in READ_ONLY_NODE_NAMES)): node.rename(('%s_%s' % (node.name(), uuid.uuid4().hex))) nodes_to_ass_files = {} for root_node in auxiliary.get_root_nodes(): for child_node in root_node.getChildren(): if (not isinstance(child_node, pm.nt.Transform)): continue if (not auxiliary.has_shape(child_node)): continue child_node_name = child_node.fullPath().replace('|', '_').split(':')[(- 1)] child_node_full_path = child_node.fullPath() pm.select(child_node) child_node.rename(('%s_%s' % (child_node.name(), uuid.uuid4().hex))) output_filename = ('%s_%s.ass' % (self.version.nice_name, child_node_name)) output_full_path = os.path.join(output_path, output_filename) pm.mel.eval((export_command % {'path': output_full_path.replace('\\', '/')})) nodes_to_ass_files[child_node_full_path] = ('%s.gz' % output_full_path) pm.newFile(force=True) self.open_version(self.version) for ref in pm.listReferences(): ref.to_repr('ASS') all_stand_ins = pm.ls(type='aiStandIn') for ass_node in all_stand_ins: ass_tra = ass_node.getParent() full_path = ass_tra.fullPath() if (full_path in nodes_to_ass_files): ass_file_path = Repository.to_os_independent_path(nodes_to_ass_files[full_path]) ass_node.setAttr('dso', ass_file_path) elif self.is_vegetation_task(task): for ref in pm.listReferences(): ref.load() maya_version = int(pm.about(v=1)) if (maya_version == 2014): types_and_attrs = {'aiImage': 'filename', 'file': 'fileTextureName', 'imagePlane': 'imageName'} else: types_and_attrs = {'aiImage': 'filename', 'file': ('computedFileTextureNamePattern', 'fileTextureName'), 'imagePlane': 'imageName'} for node_type in types_and_attrs.keys(): attr_name = types_and_attrs[node_type] set_attr_name = attr_name if isinstance(attr_name, tuple): set_attr_name = attr_name[1] attr_name = attr_name[0] for node in pm.ls(type=node_type): orig_path = node.getAttr(attr_name).replace('\\', '/') path = re.sub('(\\$REPO[0-9/]+)', '', orig_path) tx_path = self.make_tx(path) inputs = node.attr(set_attr_name).inputs(p=1) if len(inputs): for input_node_attr in inputs: input_node_attr.set(tx_path) else: node.setAttr(set_attr_name, tx_path) for ref in pm.listReferences(): ref.importContents() all_render_related_nodes = [node for node in pm.ls() if (node.type() in RENDER_RELATED_NODE_TYPES)] for node in all_render_related_nodes: if ((node.referenceFile() is None) and (node.name() not in READ_ONLY_NODE_NAMES)): node.rename(('%s_%s' % (node.name(), uuid.uuid4().hex))) pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons') for node in pfx_polygons_node.getChildren(): for child_node in node.getChildren(): child_node_name = child_node.name().split('___')[(- 1)] pm.select(child_node) output_filename = ('%s_%s.ass' % (self.version.nice_name, child_node_name.replace(':', '_').replace('|', '_'))) output_full_path = os.path.join(output_path, output_filename) pm.mel.eval((export_command % {'path': output_full_path.replace('\\', '/')})) ass_node = auxiliary.create_arnold_stand_in(path=('%s.gz' % output_full_path)) ass_tra = ass_node.getParent() pm.parent(ass_tra, node) rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) pm.xform(ass_tra, ws=1, rp=rp) pm.xform(ass_tra, ws=1, sp=sp) pm.delete(child_node) ass_tra.rename(('%s' % child_node_name)) pm.delete('kks___vegetation_pfxStrokes') pm.delete('kks___vegetation_paintableGeos') elif self.is_model_task(task): root_nodes = self.get_local_root_nodes() for root_node in root_nodes: for child_node in root_node.getChildren(type=pm.nt.Transform): child_node_name = child_node.name() rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) pm.delete(child_node) ass_node = auxiliary.create_arnold_stand_in(path='') ass_tra = ass_node.getParent() pm.parent(ass_tra, root_node) ass_tra.rename(child_node_name) pm.xform(ass_tra, ws=1, rp=rp) pm.xform(ass_tra, ws=1, sp=sp) ass_node.setAttr('overrideShaders', False) ass_node.setAttr('overrideLightLinking', False) for ref in pm.listReferences(): ref.to_repr('ASS') ref.load() for node_name in ['initialShadingGroup', 'initialParticleSE']: node = pm.PyNode(node_name) node.setAttr('ai_surface_shader', (0, 0, 0), type='float3') node.setAttr('ai_volume_shader', (0, 0, 0), type='float3') is_exterior_or_interior_task = self.is_exterior_or_interior_task(task) if is_exterior_or_interior_task: all_refs = pm.listReferences() while (len(all_refs) != 0): for ref in all_refs: if (not ref.isLoaded()): ref.load() ref.importContents() all_refs = pm.listReferences() pm.sets('initialShadingGroup', e=1, fe=auxiliary.get_root_nodes()) pm.sets('initialShadingGroup', e=1, rm=pm.ls()) [node.setAttr('overrideLightLinking', False) for node in pm.ls(type='aiStandIn')] [node.setAttr('motionBlur', False) for node in pm.ls(type='aiStandIn')] self.clean_up() try: arnold_stand_in_default_light_set = pm.PyNode('ArnoldStandInDefaultLightSet') except pm.MayaNodeError: arnold_stand_in_default_light_set = pm.createNode('objectSet', name='ArnoldStandInDefaultLightSet') pm.select(None) pm.sets(arnold_stand_in_default_light_set, fe=pm.ls(type='aiStandIn')) take_name = ('%s%s%s' % (self.base_take_name, Representation.repr_separator, 'ASS')) v = self.get_latest_repr_version(take_name) self.maya_env.save_as(v) if is_exterior_or_interior_task: pm.select(auxiliary.get_root_nodes()) pm.exportSelected(v.absolute_full_path, type='mayaAscii', force=True) pm.newFile(force=True) active_panel = auxiliary.Playblaster.get_active_panel() pm.modelEditor(active_panel, e=1, pluginShapes=show_plugin_shapes) def generate_rs(self): try: pm.loadPlugin('redshift4maya') except RuntimeError: return try: pm.pluginInfo('redshift4maya', query=True, vendor=True) except RuntimeError: return self.version = self._validate_version(self.version) if (not os.path.exists(self.version.absolute_full_path)): raise RuntimeError(("Path doesn't exists: %s" % self.version.absolute_full_path)) self.open_version(self.version) task = self.version.task export_command = 'rsProxy -fp "%(path)s" -c -z -sl;' output_path = os.path.join(self.version.absolute_path, 'Outputs/rs/').replace('\\', '/') refs_with_no_ass_repr = [] for ref in pm.listReferences(): if (ref.version and (not ref.has_repr('RS'))): refs_with_no_ass_repr.append(ref) if len(refs_with_no_ass_repr): raise RuntimeError(('Please generate the RS Representation of the references first!!!\n%s' % '\n'.join(map((lambda x: str(x.path)), refs_with_no_ass_repr)))) if self.is_model_task(task): root_nodes = self.get_local_root_nodes() for root_node in root_nodes: for child_node in root_node.getChildren(type=pm.nt.Transform): child_node_name = child_node.name() rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) pm.delete(child_node) (rs_proxy_node, rs_proxy_mesh) = auxiliary.create_rs_proxy_node(path='') rs_proxy_tra = rs_proxy_mesh.getParent() pm.parent(rs_proxy_tra, root_node) rs_proxy_tra.rename(child_node_name) pm.xform(rs_proxy_tra, ws=1, rp=rp) pm.xform(rs_proxy_tra, ws=1, sp=sp) rs_proxy_tra.overrideEnabled.set(1) rs_proxy_tra.overrideShading.set(0) elif self.is_look_dev_task(task): for ref in pm.listReferences(): v = ref.version load_ref = False if v: ref_task = v.task if self.is_model_task(ref_task): load_ref = True if load_ref: ref.load() ref.importContents() nodes_to_rs_files = {} for root_node in auxiliary.get_root_nodes(): for child_node in root_node.getChildren(): if (not isinstance(child_node, pm.nt.Transform)): continue if (not auxiliary.has_shape(child_node)): continue child_node_name = child_node.fullPath().replace('|', '_').split(':')[(- 1)] child_node_full_path = child_node.fullPath() pm.select(child_node) child_node.rename(('%s_%s' % (child_node.name(), uuid.uuid4().hex))) output_filename = ('%s_%s.rs' % (self.version.nice_name, child_node_name)) output_full_path = os.path.join(output_path, output_filename) temp_full_path = tempfile.mktemp(suffix='.rs') try: os.makedirs(output_path) except OSError: pass try: pm.mel.eval((export_command % {'path': temp_full_path.replace('\\', '/')})) try: shutil.move(temp_full_path, output_full_path) except OSError: if (os.name == 'posix'): os.remove(output_full_path) shutil.move(temp_full_path, output_full_path) nodes_to_rs_files[child_node_full_path] = output_full_path except pm.MelError: pass pm.newFile(force=True) self.open_version(self.version) for ref in pm.listReferences(): ref.to_repr('RS') all_proxies = pm.ls(type='RedshiftProxyMesh') for rs_proxy_node in all_proxies: rs_proxy_tra = rs_proxy_node.outMesh.outputs()[0] full_path = rs_proxy_tra.fullPath() if (full_path in nodes_to_rs_files): proxy_file_path = nodes_to_rs_files[full_path] rs_proxy_node.setAttr('fileName', proxy_file_path) elif self.is_vegetation_task(task): for ref in pm.listReferences(): ref.load() for ref in pm.listReferences(): ref.importContents() try: pfx_polygons_node = pm.PyNode('kks___vegetation_pfxPolygons') pfx_polygons_node_children = pfx_polygons_node.getChildren() except pm.MayaNodeError: pfx_polygons_node_children = [] for node in pfx_polygons_node_children: for child_node in node.getChildren(): child_node_name = child_node.name().split('___')[(- 1)] pm.select(child_node) output_filename = ('%s_%s.rs' % (self.version.nice_name, child_node_name.replace(':', '_').replace('|', '_'))) output_full_path = os.path.join(output_path, output_filename) try: os.makedirs(os.path.dirname(output_full_path)) except OSError: pass pm.mel.eval((export_command % {'path': output_full_path.replace('\\', '/')})) (rs_proxy_mesh_node, rs_proxy_mesh_shape) = auxiliary.create_rs_proxy_node(path=output_full_path) rs_proxy_tra = rs_proxy_mesh_shape.getParent() pm.parent(rs_proxy_tra, node) rp = pm.xform(child_node, q=1, ws=1, rp=1) sp = pm.xform(child_node, q=1, ws=1, sp=1) pm.xform(rs_proxy_tra, ws=1, rp=rp) pm.xform(rs_proxy_tra, ws=1, sp=sp) pm.delete(child_node) rs_proxy_tra.rename(('%s' % child_node_name)) rs_proxy_tra.overrideEnabled.set(1) rs_proxy_tra.overrideShading.set(0) try: pm.delete('kks___vegetation_pfxStrokes') except pm.MayaNodeError: pass try: pm.delete('kks___vegetation_paintableGeos') except pm.MayaNodeError: pass for ref in pm.listReferences(): ref.to_repr('RS') ref.load() is_exterior_or_interior_task = self.is_exterior_or_interior_task(task) if is_exterior_or_interior_task: all_refs = pm.listReferences() while (len(all_refs) != 0): for ref in all_refs: if (not ref.isLoaded()): ref.load() ref.importContents() all_refs = pm.listReferences() self.clean_up() take_name = ('%s%s%s' % (self.base_take_name, Representation.repr_separator, 'RS')) v = self.get_latest_repr_version(take_name) self.maya_env.save_as(v) if is_exterior_or_interior_task: pm.select(auxiliary.get_root_nodes()) pm.exportSelected(v.absolute_full_path, type='mayaAscii', force=True) pm.newFile(force=True)
def load_cfda(original_df, new_df): if new_df.equals(original_df): logger.info('Skipping CFDA load, no new data') return False (Reporter['new_record_count'], Reporter['updated_record_count']) = (0, 0) logger.info('Inserting new CFDA data') for row in new_df.itertuples(): record = row._asdict() del record['Index'] (_, created) = Cfda.objects.update_or_create(program_number=record['program_number'], defaults=record) if created: Reporter['new_record_count'] += 1 else: Reporter['updated_record_count'] += 1 logger.info('Completed data load') return True
def find_conn_info() -> ConnInfo: global _CACHED_CONN_INFO if (_CACHED_CONN_INFO is not None): return _CACHED_CONN_INFO conn_str = os.environ.get(ENV_VAR) if (not conn_str): root = Path('/') path = Path.cwd() while (path != root): try: conn_str = (path / 'storage_server.json').read_text() break except FileNotFoundError: path = path.parent if (not conn_str): raise RuntimeError('No Storage connection configuration found') try: conn_info = ConnInfo.parse_obj(json.loads(conn_str)) _CACHED_CONN_INFO = conn_info return conn_info except json.JSONDecodeError as e: raise RuntimeError('Invalid storage conneciton configuration') from e except ValidationError as e: raise RuntimeError('Invalid storage conneciton configuration') from e
class role_request(message): version = 5 type = 24 def __init__(self, xid=None, role=None, generation_id=None): if (xid != None): self.xid = xid else: self.xid = None if (role != None): self.role = role else: self.role = 0 if (generation_id != None): self.generation_id = generation_id else: self.generation_id = 0 return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!L', self.role)) packed.append(('\x00' * 4)) packed.append(struct.pack('!Q', self.generation_id)) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = role_request() _version = reader.read('!B')[0] assert (_version == 5) _type = reader.read('!B')[0] assert (_type == 24) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] obj.role = reader.read('!L')[0] reader.skip(4) obj.generation_id = reader.read('!Q')[0] return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.role != other.role): return False if (self.generation_id != other.generation_id): return False return True def pretty_print(self, q): q.text('role_request {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('role = ') value_name_map = {0: 'OFPCR_ROLE_NOCHANGE', 1: 'OFPCR_ROLE_EQUAL', 2: 'OFPCR_ROLE_MASTER', 3: 'OFPCR_ROLE_SLAVE'} if (self.role in value_name_map): q.text(('%s(%d)' % (value_name_map[self.role], self.role))) else: q.text(('%#x' % self.role)) q.text(',') q.breakable() q.text('generation_id = ') q.text(('%#x' % self.generation_id)) q.breakable() q.text('}')
_meta(basic.UseAttack) class UseAttack(): def choose_card_text(self, act, cards): if act.cond(cards): return (True, '?!') else: return (False, '...') def effect_string(self, act): if (not act.succeeded): return None t = act.target return f'{N.char(t)}{N.card(act.card)}'
class SyncSQServer(SyncServer): def __init__(self, *, global_model: IFLModel, channel: Optional[ScalarQuantizationChannel]=None, **kwargs): init_self_cfg(self, component_class=__class__, config_class=SyncSQServerConfig, **kwargs) super().__init__(global_model=global_model, channel=channel, **kwargs) if (not isinstance(self._channel, ScalarQuantizationChannel)): raise TypeError('SyncSQServer expects channel of type ScalarQuantizationChannel,', f' {type(self._channel)} given.') self._global_qparams: Dict[(str, Tuple[(Tensor, Tensor)])] = {} def global_qparams(self): return self._global_qparams def update_qparams(self, aggregated_model: nn.Module): (observer, _) = self._channel.get_observers_and_quantizers() for (name, param) in aggregated_model.state_dict().items(): observer.reset_min_max_vals() _ = observer(param.data) self._global_qparams[name] = observer.calculate_qparams() def receive_update_from_client(self, message: Message): message.qparams = self.global_qparams super().receive_update_from_client(message)
class bsn_vlan_counter_stats_reply(bsn_stats_reply): version = 4 type = 19 stats_type = 65535 experimenter = 6035143 subtype = 9 def __init__(self, xid=None, flags=None, entries=None): if (xid != None): self.xid = xid else: self.xid = None if (flags != None): self.flags = flags else: self.flags = 0 if (entries != None): self.entries = entries else: self.entries = [] return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!H', self.stats_type)) packed.append(struct.pack('!H', self.flags)) packed.append(('\x00' * 4)) packed.append(struct.pack('!L', self.experimenter)) packed.append(struct.pack('!L', self.subtype)) packed.append(loxi.generic_util.pack_list(self.entries)) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = bsn_vlan_counter_stats_reply() _version = reader.read('!B')[0] assert (_version == 4) _type = reader.read('!B')[0] assert (_type == 19) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] _stats_type = reader.read('!H')[0] assert (_stats_type == 65535) obj.flags = reader.read('!H')[0] reader.skip(4) _experimenter = reader.read('!L')[0] assert (_experimenter == 6035143) _subtype = reader.read('!L')[0] assert (_subtype == 9) obj.entries = loxi.generic_util.unpack_list(reader, ofp.common.bsn_vlan_counter_stats_entry.unpack) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.flags != other.flags): return False if (self.entries != other.entries): return False return True def pretty_print(self, q): q.text('bsn_vlan_counter_stats_reply {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPSF_REPLY_MORE'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('entries = ') q.pp(self.entries) q.breakable() q.text('}')
class ListeningAnimator(Animator): def __init__(self, window, **properties): super().__init__(**properties) self.window = window self.tc = 0 def draw(self, ctx, width, height): self.tc += 0.2 self.tc %= (2 * math.pi) for i in range((- 4), 5): ctx.set_source_rgb(0.2, 0.5, 1) ctx.set_line_width(6) ctx.set_line_cap(cairo.LINE_CAP_ROUND) if ((i % 2) == 0): ctx.move_to(((width / 2) + (i * 10)), (((height / 2) + 3) - (8 * math.sin((self.tc + i))))) ctx.line_to(((width / 2) + (i * 10)), (((height / 2) - 3) + (8 * math.sin((self.tc + i))))) else: ctx.set_source_rgb(0.2, 0.7, 1) ctx.move_to(((width / 2) + (i * 10)), (((height / 2) + 3) - (8 * math.cos((self.tc - i))))) ctx.line_to(((width / 2) + (i * 10)), (((height / 2) - 3) + (8 * math.cos((self.tc - i))))) ctx.stroke()
(boundscheck=False, wraparound=False, cdivision=True, nonecheck=False) def row_sum_loops(arr: V2d, columns: V1d_i): i: T_index j: T_index sum_: T dtype = type(arr[(0, 0)]) res: V1d = np.empty(arr.shape[0], dtype=dtype) for i in range(arr.shape[0]): sum_ = dtype(0) for j in range(columns.shape[0]): sum_ += arr[(i, columns[j])] res[i] = sum_ return res
def _prepare_outputs(output_tensors): def _to_int_list(shape): result = [] for d in shape: assert isinstance(d, IntImm) result.append(d._attrs['values'][0]) return result output_shapes = [_to_int_list(output._attrs['shape']) for output in output_tensors] outputs = [torch.empty(shape).half().cuda() for shape in output_shapes] return outputs
def migrate_comparisons(mongo: MigrationMongoInterface): count = 0 compare_db = ComparisonDbInterface() for entry in mongo.compare_results.find({}): results = {key: value for (key, value) in entry.items() if (key not in ['_id', 'submission_date'])} comparison_id = entry['_id'] if (not compare_db.comparison_exists(comparison_id)): if (not compare_db.all_uids_found_in_database(comparison_id.split(';'))): logging.warning(f'Could not migrate comparison {comparison_id}: not all firmwares found in the DB') continue compare_db.insert_comparison(comparison_id, results) count += 1 if (not count): print('No firmware comparison entries to migrate') else: print(f'Migrated {count} comparison DB entries')
def _import_irap_ascii(mfile): logger.debug('Enter function...') cfhandle = mfile.get_cfhandle() xlist = _cxtgeo.surf_import_irap_ascii(cfhandle, 0, 1, 0) nvn = (xlist[1] * xlist[2]) xlist = _cxtgeo.surf_import_irap_ascii(cfhandle, 1, nvn, 0) (ier, ncol, nrow, _, xori, yori, xinc, yinc, rot, val) = xlist if (ier != 0): mfile.cfclose() raise RuntimeError(f'Problem in {__name__}, code {ier}') val = np.reshape(val, (ncol, nrow), order='C') val = ma.masked_greater(val, xtgeo.UNDEF_LIMIT) if np.isnan(val).any(): logger.info('NaN values are found, will mask...') val = ma.masked_invalid(val) yflip = 1 if (yinc < 0.0): yinc = (yinc * (- 1)) yflip = (- 1) args = {} args['ncol'] = ncol args['nrow'] = nrow args['xori'] = xori args['yori'] = yori args['xinc'] = xinc args['yinc'] = yinc args['yflip'] = yflip args['rotation'] = rot args['values'] = val mfile.cfclose() return args
class MyDialog(HasTraits): scene1 = Instance(MlabSceneModel, ()) scene2 = Instance(MlabSceneModel, ()) button1 = Button('Redraw') button2 = Button('Redraw') _trait_change('button1') def redraw_scene1(self): self.redraw_scene(self.scene1) _trait_change('button2') def redraw_scene2(self): self.redraw_scene(self.scene2) def redraw_scene(self, scene): mlab.clf(figure=scene.mayavi_scene) (x, y, z, s) = np.random.random((4, 100)) mlab.points3d(x, y, z, s, figure=scene.mayavi_scene) view = View(HSplit(Group(Item('scene1', editor=SceneEditor(), height=250, width=300), 'button1', show_labels=False), Group(Item('scene2', editor=SceneEditor(), height=250, width=300, show_label=False), 'button2', show_labels=False)), resizable=True)
class CpuLoad(SensorInterface): def __init__(self, hostname='', interval=5.0, warn_level=0.9): self._cpu_load_warn = warn_level self._count_processes = 3 SensorInterface.__init__(self, hostname, sensorname='CPU Load', interval=interval) def reload_parameter(self, settings): self._cpu_load_warn = settings.param('sysmon/CPU/load_warn_level', self._cpu_load_warn) self._count_processes = settings.param('sysmon/CPU/count_processes', 3) def check_sensor(self): cpu_percents = psutil.cpu_percent(interval=None, percpu=True) diag_level = 0 diag_vals = [] diag_msg = ('warn at >%.2f%%' % (self._cpu_load_warn * 100.0)) warn_level = self._cpu_load_warn if (diag_level == DiagnosticStatus.WARN): warn_level = (warn_level * 0.9) count_warn_cpu = 0 cpu_max_percent = 0 cpu_percent_total = 0 for cpu_idx in range(len(cpu_percents)): cpu_percent = cpu_percents[cpu_idx] if (cpu_percent > cpu_max_percent): cpu_max_percent = cpu_percent if ((cpu_percents[cpu_idx] / 100.0) >= warn_level): count_warn_cpu += 1 cpu_percent_total += cpu_percent diag_vals.append(KeyValue(key='Max [%]', value=cpu_max_percent)) diag_vals.append(KeyValue(key='Avg [%]', value=('%.2f' % (cpu_percent_total / len(cpu_percents))))) if (count_warn_cpu > 1): diag_level = DiagnosticStatus.WARN diag_msg = ('CPU load of %d cores is >%.0f%%' % (count_warn_cpu, (self._cpu_load_warn * 100))) try: processes = [] for pi in sorted(psutil.process_iter(attrs=['name', 'cpu_percent']), key=(lambda pi: pi.info['cpu_percent']), reverse=True): if ((pi.info['cpu_percent'] / 100.0) >= warn_level): phlmsg = ('%.2f%% %s [%d]' % (pi.info['cpu_percent'], pi.info['name'], pi.pid)) processes.append(phlmsg) if (len(processes) >= self._count_processes): break for msg in processes: diag_vals.append(KeyValue(key='Process load', value=msg)) except Exception: pass with self.mutex: self._ts_last = time.time() self._stat_msg.level = diag_level self._stat_msg.values = diag_vals self._stat_msg.message = diag_msg
def _detect_unpack_loss(binary: bytes, extracted_files: List[Path], meta_data: Dict, header_overhead: int): decoding_overhead = (1 - meta_data.get('encoding_overhead', 0)) cleaned_size = ((get_binary_size_without_padding(binary) * decoding_overhead) - header_overhead) size_of_extracted_files = _total_size_of_extracted_files(extracted_files) meta_data['size_packed'] = cleaned_size meta_data['size_unpacked'] = size_of_extracted_files meta_data['summary'] = (['data lost'] if (cleaned_size > size_of_extracted_files) else ['no data lost'])
class TestDownsampleParamSource(): def test_downsample_all_params(self): source = params.DownsampleParamSource(track.Track(name='unit-test'), params={'source-index': 'test-source-index', 'target-index': 'test-target-index', 'fixed-interval': '1m'}) p = source.params() assert (p['fixed-interval'] == '1m') assert (p['source-index'] == 'test-source-index') assert (p['target-index'] == 'test-target-index') def test_downsample_default_index_param(self): source = params.DownsampleParamSource(track.Track(name='unit-test', indices=[track.Index(name='test-source-index', body='index.json')]), params={'fixed-interval': '1m', 'target-index': 'test-target-index'}) p = source.params() assert (p['fixed-interval'] == '1m') assert (p['source-index'] == 'test-source-index') assert (p['target-index'] == 'test-target-index') def test_downsample_source_index_override_default_index_param(self): source = params.DownsampleParamSource(track.Track(name='unit-test', indices=[track.Index(name='test-source-index', body='index.json')]), params={'source-index': 'another-index', 'fixed-interval': '1m', 'target-index': 'test-target-index'}) p = source.params() assert (p['fixed-interval'] == '1m') assert (p['source-index'] == 'another-index') assert (p['target-index'] == 'test-target-index') def test_downsample_empty_params(self): source = params.DownsampleParamSource(track.Track(name='unit-test'), params={}) p = source.params() assert (p['fixed-interval'] == '1h') assert (p['target-index'] == f"{p['source-index']}-{p['fixed-interval']}")
class BlockCursor(StmtCursorPrototype, ListCursorPrototype): def as_block(self): return self def expand(self, delta_lo=None, delta_hi=None): if ((delta_lo is not None) and (delta_lo < 0)): raise ValueError('delta_lo must be non-negative') if ((delta_hi is not None) and (delta_hi < 0)): raise ValueError('delta_hi must be non-negative') assert isinstance(self._impl, C.Block) return BlockCursor(self._impl.expand(delta_lo, delta_hi), self._proc) def anchor(self) -> StmtCursor: assert isinstance(self._impl, C.Block) return lift_cursor(self._impl.parent(), self._proc) def before(self) -> GapCursor: assert isinstance(self._impl, C.Block) return lift_cursor(self._impl.before(), self._proc) def after(self) -> GapCursor: assert isinstance(self._impl, C.Block) return lift_cursor(self._impl.after(), self._proc)
def install_activate(env_dir, args): if is_WIN: files = {'activate.bat': ACTIVATE_BAT, 'deactivate.bat': DEACTIVATE_BAT, 'Activate.ps1': ACTIVATE_PS1} bin_dir = join(env_dir, 'Scripts') shim_node = join(bin_dir, 'node.exe') shim_nodejs = join(bin_dir, 'nodejs.exe') else: files = {'activate': ACTIVATE_SH, 'activate.fish': ACTIVATE_FISH, 'shim': SHIM} bin_dir = join(env_dir, 'bin') shim_node = join(bin_dir, 'node') shim_nodejs = join(bin_dir, 'nodejs') if is_CYGWIN: mkdir(bin_dir) if (args.node == 'system'): files['node'] = SHIM mod_dir = join('lib', 'node_modules') prompt = (args.prompt or ('(%s)' % os.path.basename(os.path.abspath(env_dir)))) if (args.node == 'system'): env = os.environ.copy() env.update({'PATH': remove_env_bin_from_path(env['PATH'], bin_dir)}) for candidate in ('nodejs', 'node'): (which_node_output, _) = subprocess.Popen(['which', candidate], stdout=subprocess.PIPE, env=env).communicate() shim_node = clear_output(which_node_output) if shim_node: break assert shim_node, 'Did not find nodejs or node system executable' for (name, content) in files.items(): file_path = join(bin_dir, name) content = content.replace('__NODE_VIRTUAL_PROMPT__', prompt) content = content.replace('__NODE_VIRTUAL_ENV__', os.path.abspath(env_dir)) content = content.replace('__SHIM_NODE__', shim_node) content = content.replace('__BIN_NAME__', os.path.basename(bin_dir)) content = content.replace('__MOD_NAME__', mod_dir) if is_CYGWIN: (_, cyg_bin_dir) = callit(['cygpath', '-w', os.path.abspath(bin_dir)], show_stdout=False, in_shell=False) content = content.replace('__NPM_CONFIG_PREFIX__', cyg_bin_dir[0]) else: content = content.replace('__NPM_CONFIG_PREFIX__', '$NODE_VIRTUAL_ENV') need_append = False if args.python_virtualenv: disable_prompt = DISABLE_PROMPT.get(name, '') enable_prompt = ENABLE_PROMPT.get(name, '') content = ((disable_prompt + content) + enable_prompt) need_append = bool(disable_prompt) writefile(file_path, content, append=need_append) if (not os.path.exists(shim_nodejs)): if is_WIN: try: callit(['mklink', shim_nodejs, 'node.exe'], True, True) except OSError: logger.error('Error: Failed to create nodejs.exe link') else: os.symlink('node', shim_nodejs)
class EmvistaApi(ProviderInterface, TextInterface): provider_name = 'emvista' def __init__(self, api_keys: Dict={}): self.api_settings = load_provider(ProviderDataEnum.KEY, self.provider_name, api_keys=api_keys) self.api_key = self.api_settings['api_key'] self.base_url = ' def text__summarize(self, text: str, output_sentences: int, language: str, model: str=None) -> ResponseType[SummarizeDataClass]: if (not language): raise LanguageException('Language not provided') files = {'text': text, 'parameters': [{'name': 'lang', 'value': language}]} headers = {'Poa-Token': self.api_key, 'Content-Type': 'application/json', 'Accept': 'application/json'} url = f'{self.base_url}summarizer' response = requests.post(url, headers=headers, json=files) original_response = response.json() status_code = response.status_code if (status_code == 201): raise ProviderException('Input text is too long', code=status_code) if (status_code != 200): raise ProviderException(original_response['message'], code=status_code) standardized_response_list = original_response.get('result', {}).get('sentences', []) level_items = [element for element in standardized_response_list if (element['level'] == 10)] result = ''.join([element['value'] for element in level_items]) standardized_response = SummarizeDataClass(result=result) result = ResponseType[SummarizeDataClass](original_response=original_response, standardized_response=standardized_response) return result def text__syntax_analysis(self, language: str, text: str) -> ResponseType[SyntaxAnalysisDataClass]: if (not language): raise LanguageException('Language not provided') files = {'text': text, 'parameters': [{'name': 'lang', 'value': language}]} headers = {'Poa-Token': self.api_key, 'Content-Type': 'application/json', 'Accept': 'application/json'} url = f'{self.base_url}parser' response = requests.post(url, headers=headers, json=files) if (response.status_code == 201): raise ProviderException('Input text is too long', code=response.status_code) original_response = response.json() items: Sequence[InfosSyntaxAnalysisDataClass] = [] for sentence in original_response.get('result', {}).get('sentences', []): for word in sentence['tokens']: if (word['pos'] in tags): gender = word.get('gender') plural = word.get('plural') other = {'gender': gender, 'plural': plural, 'mode': word.get('mode'), 'infinitive': None} items.append(InfosSyntaxAnalysisDataClass(word=word['form'], tag=tags[word['pos']], lemma=word['lemma'], others=other, importance=None)) standardized_response = SyntaxAnalysisDataClass(items=items) result = ResponseType[SyntaxAnalysisDataClass](original_response=original_response, standardized_response=standardized_response) return result def text__anonymization(self, text: str, language: str) -> ResponseType[AnonymizationDataClass]: files = {'text': text, 'parameters': [{'name': 'lang', 'value': language}]} headers = {'Poa-Token': self.api_key, 'Content-Type': 'application/json', 'Accept': 'application/json'} url = f'{self.base_url}anonymizer' response = requests.post(url, headers=headers, json=files) original_response = response.json() status_code = response.status_code if (status_code == 201): raise ProviderException('Input text is too long', code=status_code) if (status_code != 200): raise ProviderException(original_response['message'], code=status_code) entities: Sequence[AnonymizationEntity] = [] new_text = text result = (original_response['result'] or {}) for entity in result.get('namedEntities', []): classification = CategoryType.choose_category_subcategory(entity['tags'][0].split('/')[(- 1)]) tmp_new_text = (new_text[0:entity['start']] + ('*' * (entity['end'] - entity['start']))) tmp_new_text += new_text[entity['end']:] new_text = tmp_new_text entities.append(AnonymizationEntity(content=entity['value'], original_label=entity['tags'][0], offset=entity['start'], length=len(entity['value']), confidence_score=None, category=classification['category'], subcategory=classification['subcategory'])) standardized_response = AnonymizationDataClass(result=new_text, entities=entities) result = ResponseType[AnonymizationDataClass](original_response=original_response, standardized_response=standardized_response) return result def _normalize_sentiment(self, rate: float) -> str: if (rate == 'NaN'): return SentimentEnum.NEUTRAL.value if (rate > 0): return SentimentEnum.POSITIVE.value if (rate < 0): return SentimentEnum.NEGATIVE.value return SentimentEnum.NEUTRAL.value def text__sentiment_analysis(self, language: str, text: str) -> ResponseType[SentimentAnalysisDataClass]: if (not language): raise LanguageException('Language not provided') files = {'text': text, 'parameters': [{'name': 'lang', 'value': language}]} headers = {'Poa-Token': self.api_key, 'Content-Type': 'application/json', 'Accept': 'application/json'} url = f'{self.base_url}opinions' response = requests.post(url, headers=headers, json=files) if (response.status_code == 201): raise ProviderException('Input text is too long', code=response.status_code) elif (response.status_code != 200): try: msg = response.json()['message'] raise ProviderException(msg, code=response.status_code) except: raise ProviderException(response.text, code=response.status_code) original_response = response.json() result = (original_response['result'] or {}) standardized_response = SentimentAnalysisDataClass(general_sentiment=self._normalize_sentiment(result.get('globalScore', 0)), general_sentiment_rate=(abs(result.get('globalScore', 0)) if (result.get('globalScore') != 'NaN') else 0)) result = ResponseType[SentimentAnalysisDataClass](original_response=original_response, standardized_response=standardized_response) return result def text__keyword_extraction(self, language: str, text: str) -> ResponseType[KeywordExtractionDataClass]: if (not language): raise LanguageException('Language not provided') files = {'text': text, 'parameters': [{'name': 'lang', 'value': language}]} headers = {'Poa-Token': self.api_key, 'Content-Type': 'application/json', 'Accept': 'application/json'} url = f'{self.base_url}keywords' response = requests.post(url, headers=headers, json=files) original_response = response.json() status_code = response.status_code if (status_code == 201): raise ProviderException('Input text is too long', code=status_code) if (status_code != 200): raise ProviderException(original_response['message'], code=status_code) items: Sequence[InfosKeywordExtractionDataClass] = [] keywords = ((original_response.get('result', {}) or {}).get('keywords', []) or []) for keyword in keywords: items.append(InfosKeywordExtractionDataClass(keyword=keyword['value'], importance=(float(keyword['score']) * 0.25))) standardized_response = KeywordExtractionDataClass(items=items) result = ResponseType[KeywordExtractionDataClass](original_response=original_response, standardized_response=standardized_response) return result
.parametrize('pool', [ThreadPoolExecutor, ProcessPoolExecutor], ids=['threads', 'processes']) def test_make_local_storage_parallel(pool, monkeypatch): makedirs = os.makedirs def mockmakedirs(path, exist_ok=False): time.sleep(1.5) makedirs(path, exist_ok=exist_ok) monkeypatch.setattr(os, 'makedirs', mockmakedirs) data_cache = os.path.join(os.curdir, 'test_parallel_cache') assert (not os.path.exists(data_cache)) try: with pool() as executor: futures = [executor.submit(make_local_storage, data_cache) for i in range(4)] for future in futures: future.result() assert os.path.exists(data_cache) finally: if os.path.exists(data_cache): shutil.rmtree(data_cache)
class HealthCheckTestCase(TestCase): def test_health_check_url_returns_200_status(self): self.response = self.client.get('/health/ok/') self.assertEqual(self.response.status_code, 200) def test_version_info_url_returns_200_status(self): self.response = self.client.get('/health/version/') self.assertEqual(self.response.status_code, 200)
def yotpo_reviews_runner(db, cache, yotpo_reviews_secrets, yotpo_reviews_external_references, yotpo_reviews_erasure_external_references) -> ConnectorRunner: return ConnectorRunner(db, cache, 'yotpo_reviews', yotpo_reviews_secrets, external_references=yotpo_reviews_external_references, erasure_external_references=yotpo_reviews_erasure_external_references)
def tokenize(expr): tokens = [] escape = False cur_token = '' for c in expr: if (escape == True): cur_token += c escape = False elif (c == '\\'): escape = True continue elif (c == '['): if (len(cur_token) > 0): tokens.append(cur_token) cur_token = '' elif (c == ']'): if (len(cur_token) > 0): tokens.append(int(cur_token)) cur_token = '' elif (c == '.'): if (len(cur_token) > 0): tokens.append(cur_token) cur_token = '' else: cur_token += c if (len(cur_token) > 0): tokens.append(cur_token) return tokens
class AlertType(str, _enum.Enum): CRASHLYTICS_NEW_FATAL_ISSUE = 'crashlytics.newFatalIssue' CRASHLYTICS_NEW_NONFATAL_ISSUE = 'crashlytics.newNonfatalIssue' CRASHLYTICS_REGRESSION = 'crashlytics.regression' CRASHLYTICS_STABILITY_DIGEST = 'crashlytics.stabilityDigest' CRASHLYTICS_VELOCITY = 'crashlytics.velocity' CRASHLYTICS_NEW_ANR_ISSUE = 'crashlytics.newAnrIssue' BILLING_PLAN_UPDATE = 'billing.planUpdate' BILLING_PLAN_AUTOMATED_UPDATE = 'billing.planAutomatedUpdate' APP_DISTRIBUTION_NEW_TESTER_IOS_DEVICE = 'appDistribution.newTesterIosDevice' APP_DISTRIBUTION_IN_APP_FEEDBACK = 'appDistribution.inAppFeedback' PERFORMANCE_THRESHOLD = 'performance.threshold'
def chunked_post(env, start_response): start_response('200 OK', [('Content-type', 'text/plain')]) if (env['PATH_INFO'] == '/a'): return [env['wsgi.input'].read()] elif (env['PATH_INFO'] == '/b'): return [x for x in iter((lambda : env['wsgi.input'].read(4096)), b'')] elif (env['PATH_INFO'] == '/c'): return [x for x in iter((lambda : env['wsgi.input'].read(1)), b'')]
class TaskExecutionIdentifier(_core_identifier.TaskExecutionIdentifier): def promote_from_model(cls, base_model): return cls(task_id=base_model.task_id, node_execution_id=base_model.node_execution_id, retry_attempt=base_model.retry_attempt) def from_flyte_idl(cls, pb2_object): base_model = super().from_flyte_idl(pb2_object) return cls.promote_from_model(base_model) def from_python_std(cls, string): segments = string.split(':') if (len(segments) != 10): raise _user_exceptions.FlyteValueException(string, 'The provided string was not in a parseable format. The string for an identifier must be in the format te:exec_project:exec_domain:exec_name:node_id:task_project:task_domain:task_name:task_version:retry.') (resource_type, ep, ed, en, node_id, tp, td, tn, tv, retry) = segments if (resource_type != 'te'): raise _user_exceptions.FlyteValueException(resource_type, "The provided string could not be parsed. The first element of an execution identifier must be 'ex'.") return cls(task_id=Identifier(_core_identifier.ResourceType.TASK, tp, td, tn, tv), node_execution_id=_core_identifier.NodeExecutionIdentifier(node_id=node_id, execution_id=_core_identifier.WorkflowExecutionIdentifier(ep, ed, en)), retry_attempt=int(retry)) def __str__(self): return 'te:{ep}:{ed}:{en}:{node_id}:{tp}:{td}:{tn}:{tv}:{retry}'.format(ep=self.node_execution_id.execution_id.project, ed=self.node_execution_id.execution_id.domain, en=self.node_execution_id.execution_id.name, node_id=self.node_execution_id.node_id, tp=self.task_id.project, td=self.task_id.domain, tn=self.task_id.name, tv=self.task_id.version, retry=self.retry_attempt)
class ThingsResource(): def __init__(self, db): self.db = db self.logger = logging.getLogger(('thingsapp.' + __name__)) def on_get(self, req, resp, user_id): marker = (req.get_param('marker') or '') limit = (req.get_param_as_int('limit') or 50) try: result = self.db.get_things(marker, limit) except Exception as ex: self.logger.error(ex) description = 'Aliens have attacked our base! We will be back as soon as we fight them off. We appreciate your patience.' raise falcon.HTTPServiceUnavailable(title='Service Outage', description=description, retry_after=30) resp.context.result = result resp.set_header('Powered-By', 'Falcon') resp.status = falcon.HTTP_200 (max_body((64 * 1024))) def on_post(self, req, resp, user_id): try: doc = req.context.doc except AttributeError: raise falcon.HTTPBadRequest(title='Missing thing', description='A thing must be submitted in the request body.') proper_thing = self.db.add_thing(doc) resp.status = falcon.HTTP_201 resp.location = ('/%s/things/%s' % (user_id, proper_thing['id']))
def upgrade(): op.add_column('events', sa.Column('has_organizer_info', sa.Boolean(), nullable=True)) op.add_column('events_version', sa.Column('has_organizer_info', sa.Boolean(), autoincrement=False, nullable=True)) op.alter_column('events', 'event_url', new_column_name='external_event_url') op.alter_column('events_version', 'event_url', new_column_name='external_event_url')
def _nice(x, round=False): if (x <= 0): import warnings warnings.warn('Invalid (negative) range passed to tick interval calculation') x = abs(x) expv = floor(log10(x)) f = (x / pow(10, expv)) if round: if (f < 1.75): nf = 1.0 elif (f < 3.75): nf = 2.5 elif (f < 7.0): nf = 5.0 else: nf = 10.0 elif (f <= 1.0): nf = 1.0 elif (f <= 2.5): nf = 2.5 elif (f <= 5.0): nf = 5.0 else: nf = 10.0 return (nf * pow(10, expv))
def clip_channels(color: Color, nans: bool=True) -> bool: clipped = False cs = color._space for (i, value) in enumerate(cs.normalize(color[:(- 1)])): chan = cs.CHANNELS[i] if ((not chan.bound) or math.isnan(value) or (chan.flags & FLG_ANGLE)): color[i] = value continue if (value < chan.low): color[i] = chan.low elif (value > chan.high): color[i] = chan.high else: color[i] = value continue clipped = True return clipped
class SearchResult(Html.Html): name = 'Search Result' tag = 'div' requirements = ('jquery',) _option_cls = OptText.OptSearchResult def __init__(self, page: primitives.PageModel, records, width, height, options, profile): super(SearchResult, self).__init__(page, records, options=options, profile=profile, css_attrs={'width': width, 'height': height}) self.style.css.margin = '5px 10px 5px 10px' _js__builder__ = ('\n jHtmlObj = %(jquery)s; jHtmlObj.empty();\n if (typeof options.currPage == \'undefined\'){options.currPage = 0}; var pageNumber = options.pageNumber;\n data.slice(options.currPage * pageNumber).forEach( function(rec){\n var newItem = $(\'<div style="margin:5px 10px 5px 10px;"></div>\') ; \n var title = $(\'<div>\'+ rec[\'title\'] + \'</div>\').css( options.title );\n if (rec[\'urlTitle\'] != undefined){\n title.css({\'cursor\': \'pointer\'}); title.click(function(e){window.open(rec[\'urlTitle\'], \'_blank\')})}\n newItem.append(title);\n if (rec.icon != undefined){\n var item = $(\'<div></div>\').css(options.url);\n item.append( $(\'<i class="\'+ rec[\'icon\'] +\'" style="margin-right:5px"></i>\')).append(rec[\'url\']);\n newItem.append(item)} \n else if(rec.url != undefined) {newItem.append($(\'<div>\'+ rec[\'url\'] +\'</div>\').css(options.url))}\n newItem.append( $(\'<div>\'+ rec[\'dsc\'] +\'</div>\').css(options.dsc));\n if(rec.visited != undefined){newItem.append($(\'<div>\'+ rec.visited +\'</div>\').css(options.visited))}\n if(rec.links != undefined){\n rec.links.forEach(function(link){ \n if (link.url == undefined) {link.url = link.val};\n newItem.append($(\'<a href=\'+ link.url +\' target="_blank">\'+ link.val +\'</a><br>\').css(options.link))})};\n jHtmlObj.append(newItem);\n }); \n if(data.length > 0) {\n var reste = data.length/ pageNumber; var currIndex = options.currPage+1; var roundRest = Math.trunc(reste);\n if (roundRest > reste) {reste ++};\n var paginate = $(\'<div style="display:inline-block;height:35px;padding:0;width:100%%;text-align:center;margin-top:10px" class="py_cssdivpagination"></div>\');\n if (currIndex > 1){\n var href = $(\'<a href="#">&laquo;</a>\');\n href.click({page: options.currPage-1, rec: data}, function(e){options.builder(htmlObj, e.data.rec, options, e.data.page)});\n paginate.append(href)};\n for (var i = 0; i < reste; i++){\n var indexPage = i + 1;\n if (options.currPage == i) { \n var href = $(\'<a href="#" style="background-color:\'+ options.grey +\';padding:5px;color:\'+ options.white +\'">\'+ indexPage +\'</a>\');\n href.click({page: i, rec: data}, function(e) {options.builder(htmlObj, e.data.rec, options, e.data.page)});\n paginate.append(href)}\n else{\n var href = $(\'<a href="#" style="padding:5px;">\'+ indexPage +\'</a>\') ;\n href.click({page: i, rec: data}, function(e){options.builder(htmlObj, e.data.rec, options, e.data.page)});\n paginate.append(href)}}\n if(currIndex < reste){\n var href = $(\'<a href="#">&raquo;</a>\');\n href.click({page: options.currPage+1, rec: data}, function(e){options.builder(htmlObj, e.data.rec, options, e.data.page)});\n paginate.append(href)};\n jHtmlObj.append(paginate)\n } ' % {'jquery': JsQuery.decorate_var('htmlObj', convert_var=False)}) def __str__(self): self.page.properties.js.add_builders(self.refresh()) return ('<%s %s></%s> ' % (self.tag, self.get_attrs(css_class_names=self.style.get_classes()), self.tag))
class PartialSnapshot(): def __init__(self, snapshot: Optional['Snapshot']=None) -> None: self._realization_states: Dict[(str, Dict[(str, Union[(bool, datetime.datetime, str)])])] = defaultdict(dict) self._forward_model_states: Dict[(Tuple[(str, str)], Dict[(str, Union[(str, datetime.datetime)])])] = defaultdict(dict) self._ensemble_state: Optional[str] = None self._metadata: Dict[(str, Any)] = defaultdict(dict) self._snapshot = snapshot def status(self) -> Optional[str]: return self._ensemble_state def update_metadata(self, metadata: Dict[(str, Any)]) -> None: self._metadata.update(_filter_nones(metadata)) def update_realization(self, real_id: str, status: str, start_time: Optional[datetime.datetime]=None, end_time: Optional[datetime.datetime]=None) -> 'PartialSnapshot': self._realization_states[real_id].update(_filter_nones({'status': status, 'start_time': start_time, 'end_time': end_time})) return self def update_forward_model(self, real_id: str, forward_model_id: str, forward_model: 'ForwardModel') -> 'PartialSnapshot': forward_model_update = _filter_nones(forward_model.model_dump()) self._forward_model_states[(real_id, forward_model_id)].update(forward_model_update) if self._snapshot: self._snapshot._my_partial._forward_model_states[(real_id, forward_model_id)].update(forward_model_update) return self def get_all_forward_models(self) -> Mapping[(Tuple[(str, str)], 'ForwardModel')]: if self._snapshot: return self._snapshot.get_all_forward_models() return {} def get_forward_model_status_for_all_reals(self) -> Mapping[(Tuple[(str, str)], Union[(str, datetime.datetime)])]: if self._snapshot: return self._snapshot.get_forward_model_status_for_all_reals() return {} def reals(self) -> Mapping[(str, 'RealizationSnapshot')]: return {real_id: RealizationSnapshot(**real_data) for (real_id, real_data) in self._realization_states.items()} def get_real_ids(self) -> Sequence[str]: real_ids = [] for idx in self._forward_model_states: real_id = idx[0] if (real_id not in real_ids): real_ids.append(real_id) for real_id in self._realization_states: if (real_id not in real_ids): real_ids.append(real_id) return sorted(real_ids, key=int) def metadata(self) -> Mapping[(str, Any)]: return self._metadata def get_real(self, real_id: str) -> 'RealizationSnapshot': return RealizationSnapshot(**self._realization_states[real_id]) def to_dict(self) -> Dict[(str, Any)]: _dict: Dict[(str, Any)] = {} if self._metadata: _dict['metadata'] = self._metadata if self._ensemble_state: _dict['status'] = self._ensemble_state if self._realization_states: _dict['reals'] = self._realization_states for (fm_tuple, fm_values_dict) in self._forward_model_states.items(): real_id = fm_tuple[0] if ('reals' not in _dict): _dict['reals'] = {} if (real_id not in _dict['reals']): _dict['reals'][real_id] = {} if ('forward_models' not in _dict['reals'][real_id]): _dict['reals'][real_id]['forward_models'] = {} forward_model_id = fm_tuple[1] _dict['reals'][real_id]['forward_models'][forward_model_id] = fm_values_dict return _dict def data(self) -> Mapping[(str, Any)]: return self.to_dict() def _merge(self, other: 'PartialSnapshot') -> 'PartialSnapshot': self._metadata.update(other._metadata) if (other._ensemble_state is not None): self._ensemble_state = other._ensemble_state for (real_id, other_real_data) in other._realization_states.items(): self._realization_states[real_id].update(other_real_data) for (forward_model_id, other_fm_data) in other._forward_model_states.items(): self._forward_model_states[forward_model_id].update(other_fm_data) return self def from_cloudevent(self, event: CloudEvent) -> 'PartialSnapshot': e_type = event['type'] e_source = event['source'] timestamp = event['time'] if (self._snapshot is None): raise UnsupportedOperationException(f'updating {self.__class__} without a snapshot is not supported') if (e_type in ids.EVGROUP_REALIZATION): status = _FM_TYPE_EVENT_TO_STATUS[e_type] start_time = None end_time = None if (e_type == ids.EVTYPE_REALIZATION_RUNNING): start_time = convert_iso8601_to_datetime(timestamp) elif (e_type in {ids.EVTYPE_REALIZATION_SUCCESS, ids.EVTYPE_REALIZATION_FAILURE, ids.EVTYPE_REALIZATION_TIMEOUT}): end_time = convert_iso8601_to_datetime(timestamp) self.update_realization(_get_real_id(e_source), status, start_time, end_time) if (e_type == ids.EVTYPE_REALIZATION_TIMEOUT): for (forward_model_id, forward_model) in self._snapshot.get_forward_models_for_real(_get_real_id(e_source)).items(): if (forward_model.status != state.FORWARD_MODEL_STATE_FINISHED): real_id = _get_real_id(e_source) forward_model_idx = (real_id, forward_model_id) if (forward_model_idx not in self._forward_model_states): self._forward_model_states[forward_model_idx] = {} self._forward_model_states[forward_model_idx].update({'status': state.FORWARD_MODEL_STATE_FAILURE, 'end_time': end_time, 'error': 'The run is cancelled due to reaching MAX_RUNTIME'}) elif (e_type in ids.EVGROUP_FORWARD_MODEL): status = _FM_TYPE_EVENT_TO_STATUS[e_type] start_time = None end_time = None if (e_type == ids.EVTYPE_FORWARD_MODEL_START): start_time = convert_iso8601_to_datetime(timestamp) elif (e_type in {ids.EVTYPE_FORWARD_MODEL_SUCCESS, ids.EVTYPE_FORWARD_MODEL_FAILURE}): end_time = convert_iso8601_to_datetime(timestamp) fm_dict = {'status': status, 'start_time': start_time, 'end_time': end_time, 'index': _get_forward_model_index(e_source)} if (e_type == ids.EVTYPE_FORWARD_MODEL_RUNNING): fm_dict[ids.CURRENT_MEMORY_USAGE] = event.data.get(ids.CURRENT_MEMORY_USAGE) fm_dict[ids.MAX_MEMORY_USAGE] = event.data.get(ids.MAX_MEMORY_USAGE) if (e_type == ids.EVTYPE_FORWARD_MODEL_START): fm_dict['stdout'] = event.data.get(ids.STDOUT) fm_dict['stderr'] = event.data.get(ids.STDERR) if (e_type == ids.EVTYPE_FORWARD_MODEL_FAILURE): fm_dict['error'] = event.data.get(ids.ERROR_MSG) self.update_forward_model(_get_real_id(e_source), _get_forward_model_id(e_source), ForwardModel(**fm_dict)) elif (e_type in ids.EVGROUP_ENSEMBLE): self._ensemble_state = _ENSEMBLE_TYPE_EVENT_TO_STATUS[e_type] elif (e_type == ids.EVTYPE_EE_SNAPSHOT_UPDATE): other_partial = _from_nested_dict(event.data) self._merge(other_partial) else: raise ValueError(f'Unknown type: {e_type}') return self
def convert_save(save_stats: dict[(str, Any)]) -> dict[(str, Any)]: gvs = ['en', 'jp', 'kr', 'tw'] helper.colored_text('WARNING: This may cause issues, and both apps must be the same version (e.g both 12.1.0)!', helper.RED) if (save_stats['version'] in gvs): gvs.remove(save_stats['version']) gv_index = (user_input_handler.select_single(gvs, title='Select a version to convert the save into:') - 1) gv = gvs[gv_index] return convert(save_stats, gv)
('sys.argv', ['flakehell']) def test_baseline(capsys, tmp_path: Path): code_path = (tmp_path / 'example.py') code_path.write_text('a\nb\n') with chdir(tmp_path): result = main(['baseline', str(code_path)]) assert (result == (0, '')) captured = capsys.readouterr() assert (captured.err == '') hashes = captured.out.strip().split() assert (len(hashes) == 2) line_path = (tmp_path / 'baseline.txt') line_path.write_text(hashes[0]) with chdir(tmp_path): result = main(['lint', '--baseline', str(line_path), '--format', 'default', str(code_path)]) assert (result == (1, '')) captured = capsys.readouterr() assert (captured.err == '') assert (captured.out.strip() == "{}:2:1: F821 undefined name 'b'".format(str(code_path)))
class ForeignKey(ModelField): class ForeignKeyValidator(typesystem.Field): def validate(self, value): return value.pk def __init__(self, to, allow_null: bool=False, on_delete: typing.Optional[str]=None): super().__init__(allow_null=allow_null) self.to = to self.on_delete = on_delete def target(self): if (not hasattr(self, '_target')): if isinstance(self.to, str): self._target = self.registry.models[self.to] else: self._target = self.to return self._target def get_validator(self, **kwargs) -> typesystem.Field: return self.ForeignKeyValidator(**kwargs) def get_column(self, name: str) -> sqlalchemy.Column: target = self.target to_field = target.fields[target.pkname] column_type = to_field.get_column_type() constraints = [sqlalchemy.schema.ForeignKey(f'{target.tablename}.{target.pkname}', ondelete=self.on_delete)] return sqlalchemy.Column(name, column_type, *constraints, nullable=self.allow_null) def expand_relationship(self, value): target = self.target if isinstance(value, target): return value return target(pk=value)
.parametrize('screenshot_manager', [{}, {'show_image': True, 'show_text': False}, {'show_image': True, 'show_text': False, 'wifi_shape': 'rectangle', 'wifi_rectangle_width': 10}], indirect=True) def ss_iwd(dbus_thread, screenshot_manager): wait_for_text(screenshot_manager.c.widget['iwd'], 'qtile_extras (50%)') screenshot_manager.take_screenshot()
def v1_sort_packages(packages, fdroid_signing_key_fingerprints): GROUP_DEV_SIGNED = 1 GROUP_FDROID_SIGNED = 2 GROUP_OTHER_SIGNED = 3 def v1_sort_keys(package): packageName = package.get('packageName', None) signer = package.get('signer', None) dev_signer = common.metadata_find_developer_signature(packageName) group = GROUP_OTHER_SIGNED if (dev_signer and (dev_signer == signer)): group = GROUP_DEV_SIGNED else: fdroid_signer = fdroid_signing_key_fingerprints.get(packageName, {}).get('signer') if (fdroid_signer and (fdroid_signer == signer)): group = GROUP_FDROID_SIGNED versionCode = None if package.get('versionCode', None): versionCode = (- package['versionCode']) return (packageName, group, signer, versionCode) packages.sort(key=v1_sort_keys)
def get_factory(get_manifest, escrow_manifest, w3): def _get_factory(package, factory_name): manifest = get_manifest(package) if (package == 'escrow'): manifest = escrow_manifest Pkg = Package(manifest, w3) factory = Pkg.get_contract_factory(factory_name) return factory return _get_factory
def ensure_data_dir_is_initialized(trinity_config: TrinityConfig) -> None: if (not is_data_dir_initialized(trinity_config)): try: initialize_data_dir(trinity_config) except AmbigiousFileSystem: parser.error(TRINITY_AMBIGIOUS_FILESYSTEM_INFO) except MissingPath as e: parser.error(f''' It appears that {e.path} does not exist. Trinity does not attempt to create directories outside of its root path. Either manually create the path or ensure you are using a data directory inside the XDG_TRINITY_ROOT path''')
class LiteScopeIODriver(): def __init__(self, regs, name): self.regs = regs self.name = name self.build() def build(self): self.input = getattr(self.regs, (self.name + '_in')) self.output = getattr(self.regs, (self.name + '_out')) def write(self, value): self.output.write(value) def read(self): return self.input.read()
def tlscontext_for_tcpmapping(irgroup: IRTCPMappingGroup, config: 'V3Config') -> Optional['IRTLSContext']: group_host = irgroup.get('host') if (not group_host): return None for irhost in sorted(config.ir.get_hosts(), key=(lambda h: h.hostname)): if (irhost.context and hostglob_matches(irhost.hostname, group_host)): return irhost.context for context in config.ir.get_tls_contexts(): for context_host in (context.get('hosts') or []): if (context_host == group_host): return context return None
def call_func_in_js(func, classes, extra_nodejs_args=None): all_classes = [] for cls in classes: for c in cls.mro(): if ((c is Component) or (c is Property) or (c in all_classes)): break all_classes.append(c) code = JS_EVENT for c in reversed(all_classes): code += create_js_component_class(c, c.__name__, (c.__bases__[0].__name__ + '.prototype')) code += py2js(func, 'test', inline_stdlib=False, docstrings=False) code += 'test();loop.reset();' (nargs, function_deps, method_deps) = get_std_info(code) code = (get_partial_std_lib(function_deps, method_deps, []) + code) return evaljs(code, print_result=False, extra_nodejs_args=extra_nodejs_args)
class TypeDeclarationTestCase(unittest.TestCase): def setUp(self): ast = parserFactory(**smiV1Relaxed)().parse(self.__class__.__doc__)[0] (mibInfo, symtable) = SymtableCodeGen().genCode(ast, {}, genTexts=True) (self.mibInfo, pycode) = PySnmpCodeGen().genCode(ast, {mibInfo.name: symtable}, genTexts=True) codeobj = compile(pycode, 'test', 'exec') mibBuilder = MibBuilder() mibBuilder.loadTexts = True self.ctx = {'mibBuilder': mibBuilder} exec(codeobj, self.ctx, self.ctx) def protoTestSymbol(self, symbol, klass): self.assertTrue((symbol in self.ctx), ('symbol %s not present' % symbol)) def protoTestClass(self, symbol, klass): self.assertEqual(self.ctx[symbol].__bases__[0].__name__, klass, ('expected class %s, got %s at %s' % (klass, self.ctx[symbol].__bases__[0].__name__, symbol)))
def main(): module_spec = schema_to_module_spec(versioned_schema) mkeyname = 'policyid' fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_interface_policy': {'required': False, 'type': 'dict', 'default': None, 'options': {}}} for attribute_name in module_spec['options']: fields['firewall_interface_policy']['options'][attribute_name] = module_spec['options'][attribute_name] if (mkeyname and (mkeyname == attribute_name)): fields['firewall_interface_policy']['options'][attribute_name]['required'] = True module = AnsibleModule(argument_spec=fields, supports_check_mode=True) check_legacy_fortiosapi(module) is_error = False has_changed = False result = None diff = None versions_check_result = None if module._socket_path: connection = Connection(module._socket_path) if ('access_token' in module.params): connection.set_option('access_token', module.params['access_token']) if ('enable_log' in module.params): connection.set_option('enable_log', module.params['enable_log']) else: connection.set_option('enable_log', False) fos = FortiOSHandler(connection, module, mkeyname) versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_interface_policy') (is_error, has_changed, result, diff) = fortios_firewall(module.params, fos, module.check_mode) else: module.fail_json(**FAIL_SOCKET_MSG) if (versions_check_result and (versions_check_result['matched'] is False)): module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv') if (not is_error): if (versions_check_result and (versions_check_result['matched'] is False)): module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff) else: module.exit_json(changed=has_changed, meta=result, diff=diff) elif (versions_check_result and (versions_check_result['matched'] is False)): module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result) else: module.fail_json(msg='Error in repo', meta=result)
.parametrize('data,key,expected', [(None, 'x', None), ({}, 'x', None), ({'x': 1}, 'x', 1), ({'x': {'y': 1}}, 'x.y', 1), ({'x': 1}, 'x.y', None), ({'x': {'y': {}}}, 'x.y.z', None)]) def test_nested_key(data, key, expected): r = nested_key(data, *key.split('.')) if (expected is None): assert (r is expected) else: assert (r == expected)
class FragmentGenerator(): def __init__(self): pass def get_args(self) -> List[bytes]: raise NotImplementedError() def get_initial_frame(self) -> FrameWithArgs: raise NotImplementedError() def get_continue_frame(self) -> FrameWithArgs: raise NotImplementedError() def build_frames(self, message_id) -> List[FrameWithArgs]: args: List[bytes] = self.get_args() frames = [] while args: frame: FrameWithArgs = (self.get_initial_frame() if (not frames) else self.get_continue_frame()) frame.id = message_id while (args and (not frame.is_full())): buf: bytes = args[0] n = len(buf) avail = (frame.space_available() - 2) if (avail <= 0): break to_write = (n if (avail >= n) else avail) arg = Arg(buf[0:to_write]) frame.args.append(arg) buf = buf[to_write:] n = len(buf) if ((not n) and (not ((len(args) > 1) and frame.is_frame_boundary()))): args.pop(0) else: args[0] = buf if args: assert (isinstance(frame, CallFlags) and isinstance(frame, FrameWithArgs)) frame.set_more_fragments_follow(True) frames.append(frame) return frames
def test_finish_same_task_twice(): ti = OrderedTaskPreparation(TwoPrereqs, identity, (lambda x: (x - 1))) ti.set_finished_dependency(1) ti.register_tasks((2,)) ti.finish_prereq(TwoPrereqs.PREREQ1, (2,)) with pytest.raises(ValidationError): ti.finish_prereq(TwoPrereqs.PREREQ1, (2,))
class PasswordManager(): data: Optional[StorageData] hass: HomeAssistant crypto: Fernet def __init__(self, hass: HomeAssistant): self.hass = hass self.data = None async def _load_key(self): if (self.data is None): storage_manager = StorageManager(self.hass) self.data = (await storage_manager.async_load_from_store()) if (self.data.key is None): legacy_key_path = self.hass.config.path(DOMAIN_KEY_FILE) if path.exists(legacy_key_path): with open(legacy_key_path, 'rb') as file: self.data.key = file.read().decode('utf-8') remove(legacy_key_path) else: self.data.key = Fernet.generate_key().decode('utf-8') (await storage_manager.async_save_to_store(self.data)) self.crypto = Fernet(self.data.key.encode()) async def encrypt(self, data: str): (await self._load_key()) encrypted = self.crypto.encrypt(data.encode()).decode() return encrypted async def decrypt(self, data: str): (await self._load_key()) decrypted = self.crypto.decrypt(data.encode()).decode() return decrypted
def tf_model(n_hidden, input_size): import tensorflow as tf tf_model = tf.keras.Sequential([tf.keras.layers.Dense(n_hidden, input_shape=(input_size,)), tf.keras.layers.LayerNormalization(), tf.keras.layers.Dense(n_hidden, activation='relu'), tf.keras.layers.LayerNormalization(), tf.keras.layers.Dense(10, activation='softmax')]) return tf_model
class ColumnsDriftParameters(ConditionTestParameters): features: Dict[(str, ColumnDriftParameter)] def from_data_drift_table(cls, table: DataDriftTableResults, condition: TestValueCondition): return ColumnsDriftParameters(features={feature: ColumnDriftParameter.from_metric(data) for (feature, data) in table.drift_by_columns.items()}, condition=condition) def to_dataframe(self) -> pd.DataFrame: return pd.DataFrame([{'Feature name': feature, 'Stattest': data.stattest, 'Drift score': data.score, 'Threshold': data.threshold, 'Data Drift': ('Detected' if data.detected else 'Not detected')} for (feature, data) in self.features.items()])
class OptionSeriesDependencywheelLevelsStatesSelect(Options): def animation(self) -> 'OptionSeriesDependencywheelLevelsStatesSelectAnimation': return self._config_sub_data('animation', OptionSeriesDependencywheelLevelsStatesSelectAnimation) def borderColor(self): return self._config_get('#000000') def borderColor(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get('#cccccc') def color(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False)
def run(): print('\nmodule top();\n ') params = {} sites = list(gen_sites()) for ((tile_name, sites), isone) in zip(sites, util.gen_fuzz_states(len(sites))): site_name = sites[0] params[tile_name] = (site_name, isone) print('\n wire clk_{site};\n (* KEEP, DONT_TOUCH, LOC = "{site}" *)\n BUFMRCE #(\n .INIT_OUT({isone})\n ) buf_{site} (\n .O(clk_{site})\n );\n\n BUFR bufr_{site} (\n .I(clk_{site})\n );\n'.format(site=site_name, isone=isone)) print('endmodule') write_params(params)
def create_privacy_notices_util(db: Session, privacy_notice_schemas: List[PrivacyNoticeCreation], should_escape: bool=True) -> Tuple[(List[PrivacyNotice], Set[PrivacyNoticeRegion])]: validate_notice_data_uses(privacy_notice_schemas, db) existing_notices = PrivacyNotice.query(db).filter(PrivacyNotice.disabled.is_(False)).all() new_notices = [PrivacyNotice(**privacy_notice.dict(exclude_unset=True)) for privacy_notice in privacy_notice_schemas] check_conflicting_data_uses(new_notices, existing_notices) check_conflicting_notice_keys(new_notices, existing_notices) created_privacy_notices: List[PrivacyNotice] = [] affected_regions: Set = set() for privacy_notice in privacy_notice_schemas: if should_escape: privacy_notice = transform_fields(transformation=escape, model=privacy_notice, fields=PRIVACY_NOTICE_ESCAPE_FIELDS) created_privacy_notice = PrivacyNotice.create(db=db, data=privacy_notice.dict(exclude_unset=True), check_name=False) created_privacy_notices.append(created_privacy_notice) affected_regions.update(created_privacy_notice.regions) upsert_privacy_experiences_after_notice_update(db, affected_regions=list(affected_regions)) return (created_privacy_notices, affected_regions)
def test_address(): reg = Register(address=5) assert (reg.address == 5) reg = Register(address='0x5') assert (reg.address == 5) reg.address = 3 assert (reg.address == 3) reg.address = '0x6' assert (reg.address == 6) with pytest.raises(ValueError): reg.address = 'zz' reg.address = None with pytest.raises(AssertionError): reg.validate()
def verify_private_keys_ctx(ctx: Context, aea_project_path: Path=ROOT, password: Optional[str]=None) -> None: try: AgentConfigManager.verify_private_keys(aea_project_path, private_key_helper=private_key_verify, substitude_env_vars=False, password=password).dump_config() agent_config = AgentConfigManager.verify_private_keys(aea_project_path, private_key_helper=private_key_verify, password=password).agent_config if (ctx is not None): ctx.agent_config = agent_config except ValueError as e: raise click.ClickException(str(e))
class TestComponentProperties(): def setup_class(self): self.configuration = ProtocolConfig('name', 'author', '0.1.0', protocol_specification_id='some/author:0.1.0') self.configuration.build_directory = 'test' self.component = Component(configuration=self.configuration) self.directory = Path() self.component._directory = self.directory def test_component_type(self): assert (self.component.component_type == self.configuration.component_type) def test_is_vendor(self): assert (self.component.is_vendor is False) def test_prefix_import_path(self): assert (self.component.prefix_import_path == 'packages.author.protocols.name') def test_component_id(self): assert (self.component.component_id == self.configuration.component_id) def test_public_id(self): assert (self.component.public_id == self.configuration.public_id) def test_directory(self): assert (self.component.directory == self.directory) def test_build_directory(self): assert self.component.build_directory
class HelpCentreApi(HelpCentreApiBase): def __init__(self, config): super(HelpCentreApi, self).__init__(config, endpoint=EndpointFactory('help_centre'), object_type='help_centre') self.articles = ArticleApi(config, self.endpoint.articles, object_type='article') self.comments = CommentApi(config, self.endpoint.articles, object_type='comment') self.sections = SectionApi(config, self.endpoint.sections, object_type='section') self.categories = CategoryApi(config, self.endpoint.categories, object_type='category') self.attachments = ArticleAttachmentApi(config, self.endpoint.attachments, object_type='article_attachment') self.labels = LabelApi(config, self.endpoint.labels, object_type='label') self.topics = TopicApi(config, self.endpoint.topics, object_type='topic') self.posts = PostApi(config, self.endpoint.posts, object_type='post') self.user_segments = UserSegmentApi(config, self.endpoint.user_segments, object_type='user_segment') self.permission_groups = PermissionGroupApi(config, self.endpoint.permission_groups, object_type='permission_group') self.users = UserApi(config) def __call__(self, *args, **kwargs): raise NotImplementedError('Cannot directly call the HelpCentreApi!')
_event class Presence(Event): statuses = attr.ib(type=Mapping[(str, '_models.ActiveStatus')]) full = attr.ib(type=bool) def _parse(cls, session, data): statuses = {str(d['u']): _models.ActiveStatus._from_orca_presence(d) for d in data['list']} return cls(statuses=statuses, full=(data['list_type'] == 'full'))
_shopify_session def _resync_product(product): savepoint = 'shopify_resync_product' try: item = Product.find(product) frappe.db.savepoint(savepoint) for variant in item.variants: shopify_product = ShopifyProduct(product, variant_id=variant.id) shopify_product.sync_product() return True except Exception: frappe.db.rollback(save_point=savepoint) return False
class FillMaskInferenceOptions(InferenceConfig): def __init__(self, *, tokenization: NlpTokenizationConfig, results_field: t.Optional[str]=None, num_top_classes: t.Optional[int]=None): super().__init__(configuration_type='fill_mask') self.num_top_classes = num_top_classes self.tokenization = tokenization self.results_field = results_field
.skipif((not has_hf_transformers), reason='requires huggingface transformers') .parametrize('torch_device', TORCH_DEVICES) def test_torch_sdp_causal_with_mask(torch_device): model = GPTNeoXDecoder.from_hf_hub(name='trl-internal-testing/tiny-random-GPTNeoXForCausalLM', device=torch_device) model.eval() torch.manual_seed(0) X = torch.randint(0, N_PIECES, (2, 10), device=torch_device) mask = (torch.rand((2, 10), dtype=torch.float, device=torch_device) < 0.5) with torch.no_grad(): Y = (model(X, attention_mask=AttentionMask(mask)).last_hidden_layer_state * mask.unsqueeze((- 1))) with enable_torch_sdp(): Y_sdp = (model(X, attention_mask=AttentionMask(mask)).last_hidden_layer_state * mask.unsqueeze((- 1))) torch_assertclose(Y, Y_sdp)
def similarities(obj, limit=5): if isinstance(obj, models.Model): cache_key = ('recommends:similarities:%s:%s.%s:%s:%s' % (settings.SITE_ID, obj._meta.app_label, obj._meta.object_name.lower(), obj.id, limit)) similarities = cache.get(cache_key) if (similarities is None): provider = recommendation_registry.get_provider_for_content(obj) similarities = provider.storage.get_similarities_for_object(obj, int(limit)) cache.set(cache_key, similarities, RECOMMENDS_CACHE_TEMPLATETAGS_TIMEOUT) return similarities