code
stringlengths
281
23.7M
class Test_reply_egress_tlv(unittest.TestCase): def setUp(self): self._type = cfm.CFM_REPLY_EGRESS_TLV self.length = 12 self.action = 2 self.mac_address = 'aa:bb:cc:56:34:12' self.port_id_length = 3 self.port_id_subtype = 2 self.port_id = b'\x01\x04\t' self.ins = cfm.reply_egress_tlv(self.length, self.action, self.mac_address, self.port_id_length, self.port_id_subtype, self.port_id) self.form = '!BHB6sBB3s' self.buf = struct.pack(self.form, self._type, self.length, self.action, addrconv.mac.text_to_bin(self.mac_address), self.port_id_length, self.port_id_subtype, self.port_id) def tearDown(self): pass def test_init(self): eq_(self.length, self.ins.length) eq_(self.action, self.ins.action) eq_(self.mac_address, self.ins.mac_address) eq_(self.port_id_length, self.ins.port_id_length) eq_(self.port_id_subtype, self.ins.port_id_subtype) eq_(self.port_id, self.ins.port_id) def test_parser(self): _res = cfm.reply_ingress_tlv.parser(self.buf) if (type(_res) is tuple): res = _res[0] else: res = _res eq_(self.length, res.length) eq_(self.action, res.action) eq_(self.mac_address, res.mac_address) eq_(self.port_id_length, res.port_id_length) eq_(self.port_id_subtype, res.port_id_subtype) eq_(self.port_id, res.port_id) def test_serialize(self): buf = self.ins.serialize() res = struct.unpack_from(self.form, six.binary_type(buf)) eq_(self._type, res[0]) eq_(self.length, res[1]) eq_(self.action, res[2]) eq_(addrconv.mac.text_to_bin(self.mac_address), res[3]) eq_(self.port_id_length, res[4]) eq_(self.port_id_subtype, res[5]) eq_(self.port_id, res[6]) def test_serialize_with_zero(self): ins = cfm.reply_egress_tlv(0, self.action, self.mac_address, 0, self.port_id_subtype, self.port_id) buf = ins.serialize() res = struct.unpack_from(self.form, six.binary_type(buf)) eq_(self._type, res[0]) eq_(self.length, res[1]) eq_(self.action, res[2]) eq_(addrconv.mac.text_to_bin(self.mac_address), res[3]) eq_(self.port_id_length, res[4]) eq_(self.port_id_subtype, res[5]) eq_(self.port_id, res[6]) def test_len(self): eq_(((1 + 2) + 12), len(self.ins)) def test_default_args(self): ins = cfm.reply_egress_tlv() buf = ins.serialize() res = struct.unpack_from(cfm.reply_egress_tlv._PACK_STR, six.binary_type(buf)) eq_(res[0], cfm.CFM_REPLY_EGRESS_TLV) eq_(res[1], 7) eq_(res[2], 1) eq_(res[3], addrconv.mac.text_to_bin('00:00:00:00:00:00'))
class OptionSeriesItemPointEvents(Options): def click(self): return self._config_get(None) def click(self, value: Any): self._config(value, js_type=False) def drag(self): return self._config_get(None) def drag(self, value: Any): self._config(value, js_type=False) def dragStart(self): return self._config_get(None) def dragStart(self, value: Any): self._config(value, js_type=False) def drop(self): return self._config_get(None) def drop(self, value: Any): self._config(value, js_type=False) def legendItemClick(self): return self._config_get(None) def legendItemClick(self, value: Any): self._config(value, js_type=False) def mouseOut(self): return self._config_get(None) def mouseOut(self, value: Any): self._config(value, js_type=False) def mouseOver(self): return self._config_get(None) def mouseOver(self, value: Any): self._config(value, js_type=False) def remove(self): return self._config_get(None) def remove(self, value: Any): self._config(value, js_type=False) def select(self): return self._config_get(None) def select(self, value: Any): self._config(value, js_type=False) def unselect(self): return self._config_get(None) def unselect(self, value: Any): self._config(value, js_type=False) def update(self): return self._config_get(None) def update(self, value: Any): self._config(value, js_type=False)
class PostConversationOperator(BaseConversationOperator, MapOperator[(ModelOutput, ModelOutput)]): def __init__(self, **kwargs): MapOperator.__init__(self, **kwargs) async def map(self, input_value: ModelOutput) -> ModelOutput: storage_conv: StorageConversation = (await self.get_storage_conversation()) storage_conv.add_ai_message(input_value.text) return input_value
class AsyncioThriftClient(ServiceObj): _TIMEOUT = 60 def __init__(self, client_class, host, port, service=None, timeout=None, open_timeout=None): super().__init__(service) self._client_class = client_class self._host = host self._port = port self._connected = False self._timeout = timeout self._open_timeout = open_timeout self._protocol = None self._transport = None self._client = None if self.service: self._register_counter('connected') self._register_counter('lookup.failed') def _format_counter(self, counter): return 'thrift_client.{}.{}.{}'.format(self._host, self._port, counter) def _inc_counter(self, counter): if self.service: c = self._format_counter(counter) self.inc_counter(c) def _register_counter(self, counter): c = self._format_counter(counter) self.service.stats_mgr.register_counter(c) async def _lookup_service(self): return (self._host, self._port) async def _get_timeouts(self): return {'': (self._timeout or self._TIMEOUT)} async def open(self): (host, port) = (await self._lookup_service()) timeouts = (await self._get_timeouts()) conn_fut = self.loop.create_connection(ThriftClientProtocolFactory(self._client_class, timeouts=timeouts), host=host, port=port) (transport, protocol) = (await asyncio.wait_for(conn_fut, self._open_timeout, loop=self.loop)) self._inc_counter('connected') self._protocol = protocol self._transport = transport self._client = protocol.client self._client.close = self.close self._connected = True return self._client def close(self): if self._protocol: self._protocol.close() if self._transport: self._transport.close() def __await__(self): return self.open().__await__() async def __aenter__(self): (await self.open()) return self._client async def __aexit__(self, exc_type, exc, tb): self.close()
class NodeFactory(factory.Factory): class Meta(): model = Node.from_pubkey_and_addr pubkey = factory.LazyFunction(PublicKeyFactory) address = factory.SubFactory(AddressFactory) def with_nodeid(cls, nodeid: int, *args: Any, **kwargs: Any) -> NodeAPI: node = cls(*args, **kwargs) node._id_int = nodeid node._id = int_to_big_endian(nodeid) return node
class Report(BaseReport): def __init__(self, date: datetime): self.date = date def full_name(self) -> str: return self._get_name_with_date(self.date) def url(self) -> str: return DataLake().public.get_report_url(report_name=self.name, date=self.date) def path(self) -> str: return DataLake().public.get_report_path(report_name=self.name, date=self.date) def save(self, df: pd.DataFrame): DataLake().public.save_report(report_df=df, report_name=self.name, date=self.date) def read(self) -> pd.DataFrame: return DataLake().public.get_report(report_name=self.name, date=self.date)
class IPView(ip.IPythonView): __text_color = None __background_color = None __font = None __css_provider = None __text_color_str = None __background_color_str = None __font_str = None __iptheme = None def __init__(self, namespace): ip.IPythonView.__init__(self) event.add_ui_callback(self.__on_option_set, SETTINGS_STRING) self.set_wrap_mode(Gtk.WrapMode.CHAR) self.updateNamespace(namespace) self.updateNamespace({'exit': None, 'quit': None}) style_context = self.get_style_context() self.__css_provider = Gtk.CssProvider() style_context.add_provider(self.__css_provider, Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION) for option in ('text_color', 'background_color', 'font'): self.__on_option_set(None, xl_settings, 'plugin/ipconsole/{option}'.format(option=option)) def __on_option_set(self, _event, settings, option): if (option == 'plugin/ipconsole/font'): pango_font_str = settings.get_option(option, FONT) self.__font_str = guiutil.css_from_pango_font_description(pango_font_str) GLib.idle_add(self.__update_css) if (option == 'plugin/ipconsole/text_color'): rgba_str = settings.get_option(option, 'lavender') rgba = Gdk.RGBA() rgba.parse(rgba_str) self.__text_color_str = ('color: ' + guiutil.css_from_rgba_without_alpha(rgba)) GLib.idle_add(self.__update_css) if (option == 'plugin/ipconsole/background_color'): rgba_str = settings.get_option(option, 'black') rgba = Gdk.RGBA() rgba.parse(rgba_str) self.__background_color_str = ('background-color: ' + guiutil.css_from_rgba_without_alpha(rgba)) GLib.idle_add(self.__update_css) def __update_css(self): if ((self.__text_color_str is None) or (self.__background_color_str is None) or (self.__font_str is None)): return False data_str = ('text {%s; %s;} textview {%s;}' % (self.__background_color_str, self.__text_color_str, self.__font_str)) self.__css_provider.load_from_data(data_str.encode('utf-8')) return False def onKeyPressExtend(self, key_event): if ip.IPythonView.onKeyPressExtend(self, key_event): return True if (key_event.string == '\x04'): self.destroy()
class TVTKClassChooser(HasTraits): object = Property class_name = Str('', desc='class name of TVTK class (case sensitive)') search = Str('', desc='string to search in TVTK class documentation supports the "and" and "or" keywords. press <Enter> to start search. This is case insensitive.') clear_search = Button doc = Str(_search_help_doc) completions = List(Str) available = List finder = Instance(DocSearch) n_completion = Int(25) view = View(Group(Item(name='class_name', editor=EnumEditor(name='available')), Item(name='class_name', has_focus=True), Item(name='search', editor=TextEditor(enter_set=True, auto_set=False)), Item(name='clear_search', show_label=False), Item('_'), Item(name='completions', editor=ListEditor(columns=3), style='readonly'), Item(name='doc', resizable=True, label='Documentation', style='custom')), id='tvtk_doc', resizable=True, width=800, height=600, title='TVTK class chooser', buttons=['OK', 'Cancel']) def __init__(self, **traits): super(TVTKClassChooser, self).__init__(**traits) self._orig_available = list(self.available) def _get_object(self): o = None if (len(self.class_name) > 0): try: o = getattr(tvtk, self.class_name)() except (AttributeError, TypeError): pass return o def _class_name_changed(self, value): av = self.available comp = [x for x in av if x.startswith(value)] self.completions = comp[:self.n_completion] if ((len(comp) == 1) and (value != comp[0])): self.class_name = comp[0] o = self.object if (o is not None): self.doc = get_tvtk_class_doc(o) else: self.doc = _search_help_doc def _finder_default(self): return DocSearch() def _clear_search_fired(self): self.search = '' def _search_changed(self, value): if (len(value) < 3): self.available = self._orig_available return f = self.finder result = f.search(str(value)) if (len(result) == 0): self.available = self._orig_available elif (len(result) == 1): self.class_name = result[0] else: self.available = result self.completions = result[:self.n_completion] def _available_default(self): return get_tvtk_classes()
class OptionPlotoptionsSeriesStatesInactive(Options): def animation(self) -> 'OptionPlotoptionsSeriesStatesInactiveAnimation': return self._config_sub_data('animation', OptionPlotoptionsSeriesStatesInactiveAnimation) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def opacity(self): return self._config_get(0.2) def opacity(self, num: float): self._config(num, js_type=False)
class TestCreateCustomToken(): valid_args = {'Basic': (MOCK_UID, {'one': 2, 'three': 'four'}), 'NoDevClaims': (MOCK_UID, None), 'EmptyDevClaims': (MOCK_UID, {})} invalid_args = {'NoUid': (None, None, ValueError), 'EmptyUid': ('', None, ValueError), 'LongUid': (('x' * 129), None, ValueError), 'BoolUid': (True, None, ValueError), 'IntUid': (1, None, ValueError), 'ListUid': ([], None, ValueError), 'EmptyDictUid': ({}, None, ValueError), 'NonEmptyDictUid': ({'a': 1}, None, ValueError), 'BoolClaims': (MOCK_UID, True, ValueError), 'IntClaims': (MOCK_UID, 1, ValueError), 'StrClaims': (MOCK_UID, 'foo', ValueError), 'ListClaims': (MOCK_UID, [], ValueError), 'TupleClaims': (MOCK_UID, (1, 2), ValueError), 'SingleReservedClaim': (MOCK_UID, {'sub': '1234'}, ValueError), 'MultipleReservedClaims': (MOCK_UID, {'sub': '1234', 'aud': 'foo'}, ValueError)} .parametrize('values', valid_args.values(), ids=list(valid_args)) def test_valid_params(self, auth_app, values): (user, claims) = values custom_token = auth.create_custom_token(user, claims, app=auth_app) verify_custom_token(custom_token, claims) .parametrize('values', invalid_args.values(), ids=list(invalid_args)) def test_invalid_params(self, auth_app, values): (user, claims, error) = values with pytest.raises(error): auth.create_custom_token(user, claims, app=auth_app) def test_noncert_credential(self, user_mgt_app): if _is_emulated(): custom_token = auth.create_custom_token(MOCK_UID, app=user_mgt_app).decode() self._verify_signer(custom_token, _token_gen.AUTH_EMULATOR_EMAIL) return with pytest.raises(ValueError): auth.create_custom_token(MOCK_UID, app=user_mgt_app) def test_sign_with_iam(self): options = {'serviceAccountId': 'test-service-account', 'projectId': 'mock-project-id'} app = firebase_admin.initialize_app(testutils.MockCredential(), name='iam-signer-app', options=options) try: signature = base64.b64encode(b'test').decode() iam_resp = '{{"signedBlob": "{0}"}}'.format(signature) _overwrite_iam_request(app, testutils.MockRequest(200, iam_resp)) custom_token = auth.create_custom_token(MOCK_UID, app=app).decode() assert custom_token.endswith(('.' + signature.rstrip('='))) self._verify_signer(custom_token, 'test-service-account') finally: firebase_admin.delete_app(app) def test_sign_with_iam_error(self): options = {'serviceAccountId': 'test-service-account', 'projectId': 'mock-project-id'} app = firebase_admin.initialize_app(testutils.MockCredential(), name='iam-signer-app', options=options) try: iam_resp = '{"error": {"code": 403, "message": "test error"}}' _overwrite_iam_request(app, testutils.MockRequest(403, iam_resp)) with pytest.raises(auth.TokenSignError) as excinfo: auth.create_custom_token(MOCK_UID, app=app) error = excinfo.value assert (error.code == exceptions.UNKNOWN) assert (iam_resp in str(error)) assert isinstance(error.cause, google.auth.exceptions.TransportError) finally: firebase_admin.delete_app(app) def test_sign_with_discovered_service_account(self): request = testutils.MockRequest(200, 'discovered-service-account') options = {'projectId': 'mock-project-id'} app = firebase_admin.initialize_app(testutils.MockCredential(), name='iam-signer-app', options=options) try: _overwrite_iam_request(app, request) client = auth._get_client(app) assert (client._token_generator.signing_provider is not None) signature = base64.b64encode(b'test').decode() request.response = testutils.MockResponse(200, '{{"signedBlob": "{0}"}}'.format(signature)) custom_token = auth.create_custom_token(MOCK_UID, app=app).decode() assert custom_token.endswith(('.' + signature.rstrip('='))) self._verify_signer(custom_token, 'discovered-service-account') assert (len(request.log) == 2) assert (request.log[0][1]['headers'] == {'Metadata-Flavor': 'Google'}) finally: firebase_admin.delete_app(app) def test_sign_with_discovery_failure(self): request = testutils.MockFailedRequest(Exception('test error')) options = {'projectId': 'mock-project-id'} app = firebase_admin.initialize_app(testutils.MockCredential(), name='iam-signer-app', options=options) try: _overwrite_iam_request(app, request) with pytest.raises(ValueError) as excinfo: auth.create_custom_token(MOCK_UID, app=app) assert str(excinfo.value).startswith('Failed to determine service account: test error') assert (len(request.log) == 1) assert (request.log[0][1]['headers'] == {'Metadata-Flavor': 'Google'}) finally: firebase_admin.delete_app(app) def _verify_signer(self, token, signer): segments = token.split('.') assert (len(segments) == 3) body = jwt.decode(token, verify=False) assert (body['iss'] == signer) assert (body['sub'] == signer)
.parametrize('SKs,messages', [(list(range(1, 6)), list(range(1, 6)))]) def test_core_aggregate_verify(SKs, messages): PKs = [G2Basic.SkToPk(sk) for sk in SKs] messages = [bytes(msg) for msg in messages] signatures = [G2Basic._CoreSign(sk, msg, G2Basic.DST) for (sk, msg) in zip(SKs, messages)] aggregate_signature = G2Basic.Aggregate(signatures) assert G2Basic._CoreAggregateVerify(PKs, messages, aggregate_signature, G2Basic.DST)
class Solution(object): def decodeAtIndex(self, S, K): (clens, clen) = ([], 0) first_num_idx = (- 1) for (idx, ch) in enumerate(S): res = (ord(ch) - ord('0')) if (0 <= res <= 10): if (first_num_idx < 0): first_num_idx = idx clen *= res else: clen += 1 clens.append(clen) if ((clen >= K) and (0 <= res <= 10)): break elif (clen >= K): return ch while (idx >= 0): ch = S[idx] clen = clens[idx] idx -= 1 res = (ord(ch) - ord('0')) if ((not (0 <= res <= 10)) and (K == clen)): return ch elif (0 <= res <= 10): K = (K % clens[idx]) if (K == 0): K = clens[idx] return ch
class EasyForm(Form): name = TextField('name', validators=[wtforms.validators.DataRequired()], default=u'test') email = TextField('email', validators=[wtforms.validators.Email(), wtforms.validators.DataRequired()]) message = TextAreaField('message', validators=[wtforms.validators.DataRequired()])
class Ban(BaseObject): def __init__(self, api=None, ip_address=None, visitor=None, **kwargs): self.api = api self.ip_address = ip_address self.visitor = visitor for (key, value) in kwargs.items(): setattr(self, key, value) for key in self.to_dict(): if (getattr(self, key) is None): try: self._dirty_attributes.remove(key) except KeyError: continue
_defaults() class BadgeFieldFormSchema(Schema): class Meta(): type_ = 'badge-field-form' self_view = 'v1.badge_field_form_detail' self_view_kwargs = {'id': '<id>'} inflect = dasherize badge_field_id = fields.Integer(dump_only=True) badge_form = Relationship(self_view='v1.badge_field_form_badge_form', self_view_kwargs={'id': '<id>'}, related_view='v1.badge_form_detail', related_view_kwargs={'badge_field_form_id': '<id>'}, schema='BadgeFormSchema', type_='badge_form') badge_id = fields.Str(required=True) field_identifier = fields.String(required=False) custom_field = fields.String(required=False) sample_text = fields.String(required=False) font_size = fields.Integer(required=False) text_alignment = fields.String(required=False) text_type = fields.String(required=False) is_deleted = fields.Boolean(required=False, default=False) font_name = fields.String(required=False) font_weight = fields.List(fields.Nested(FontWeight), allow_none=True, required=False) font_color = fields.String(required=False) margin_top = fields.Integer(required=False) margin_bottom = fields.Integer(required=False) margin_left = fields.Integer(required=False) margin_right = fields.Integer(required=False) text_rotation = fields.Integer(required=False) qr_custom_field = fields.List(fields.String(), required=False) is_field_expanded = fields.Boolean(required=False, default=False)
class ApiToken(Base): __tablename__ = 'tokens' token = sa.Column(sa.String(40), default=_api_token_generator, primary_key=True) created = sa.Column(sa.DateTime, default=datetime.datetime.utcnow, nullable=False) user_id = sa.Column(GUID, sa.ForeignKey('users.id'), nullable=False) user = sa.orm.relationship('User', lazy='joined', backref=sa.orm.backref('api_tokens', cascade='all, delete-orphan')) description = sa.Column(sa.Text, nullable=True)
class OptionSeriesArcdiagramAccessibility(Options): def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def descriptionFormat(self): return self._config_get(None) def descriptionFormat(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(None) def enabled(self, flag: bool): self._config(flag, js_type=False) def exposeAsGroupOnly(self): return self._config_get(None) def exposeAsGroupOnly(self, flag: bool): self._config(flag, js_type=False) def keyboardNavigation(self) -> 'OptionSeriesArcdiagramAccessibilityKeyboardnavigation': return self._config_sub_data('keyboardNavigation', OptionSeriesArcdiagramAccessibilityKeyboardnavigation) def point(self) -> 'OptionSeriesArcdiagramAccessibilityPoint': return self._config_sub_data('point', OptionSeriesArcdiagramAccessibilityPoint)
def vector_test_argsets(): argsets = [ReplaceTestArgs(TestFunction(Vv), {}, None), ReplaceTestArgs(TestFunction(V0), {}, ValueError), ReplaceTestArgs(TestFunction(Vv), {'new_idx': 0}, ValueError), ReplaceTestArgs(TestFunction(Wv), {'new_idx': 0}, None), ReplaceTestArgs(TestFunction(Wv), {'new_idx': 1}, ValueError), ReplaceTestArgs(TestFunctions(Wv), {'new_idx': 0}, None), ReplaceTestArgs(TestFunction(W), {'new_idx': 7}, IndexError)] return argsets
class InvitationResponseAllOf(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'data': (InvitationResponseData,)} _property def discriminator(): return None attribute_map = {'data': 'data'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class SnapshotClient(NamespacedClient): _rewrite_parameters() async def cleanup_repository(self, *, name: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None) -> ObjectApiResponse[t.Any]: if (name in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'name'") __path = f'/_snapshot/{_quote(name)}/_cleanup' __query: t.Dict[(str, t.Any)] = {} if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty if (timeout is not None): __query['timeout'] = timeout __headers = {'accept': 'application/json'} return (await self.perform_request('POST', __path, params=__query, headers=__headers)) _rewrite_parameters(body_fields=('indices',)) async def clone(self, *, repository: str, snapshot: str, target_snapshot: str, indices: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]: if (repository in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'repository'") if (snapshot in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'snapshot'") if (target_snapshot in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'target_snapshot'") if ((indices is None) and (body is None)): raise ValueError("Empty value passed for parameter 'indices'") __path = f'/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_clone/{_quote(target_snapshot)}' __query: t.Dict[(str, t.Any)] = {} __body: t.Dict[(str, t.Any)] = (body if (body is not None) else {}) if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty if (timeout is not None): __query['timeout'] = timeout if (not __body): if (indices is not None): __body['indices'] = indices __headers = {'accept': 'application/json', 'content-type': 'application/json'} return (await self.perform_request('PUT', __path, params=__query, headers=__headers, body=__body)) _rewrite_parameters(body_fields=('feature_states', 'ignore_unavailable', 'include_global_state', 'indices', 'metadata', 'partial')) async def create(self, *, repository: str, snapshot: str, error_trace: t.Optional[bool]=None, feature_states: t.Optional[t.Sequence[str]]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, ignore_unavailable: t.Optional[bool]=None, include_global_state: t.Optional[bool]=None, indices: t.Optional[t.Union[(str, t.Sequence[str])]]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, metadata: t.Optional[t.Mapping[(str, t.Any)]]=None, partial: t.Optional[bool]=None, pretty: t.Optional[bool]=None, wait_for_completion: t.Optional[bool]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]: if (repository in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'repository'") if (snapshot in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'snapshot'") __path = f'/_snapshot/{_quote(repository)}/{_quote(snapshot)}' __query: t.Dict[(str, t.Any)] = {} __body: t.Dict[(str, t.Any)] = (body if (body is not None) else {}) if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty if (wait_for_completion is not None): __query['wait_for_completion'] = wait_for_completion if (not __body): if (feature_states is not None): __body['feature_states'] = feature_states if (ignore_unavailable is not None): __body['ignore_unavailable'] = ignore_unavailable if (include_global_state is not None): __body['include_global_state'] = include_global_state if (indices is not None): __body['indices'] = indices if (metadata is not None): __body['metadata'] = metadata if (partial is not None): __body['partial'] = partial if (not __body): __body = None __headers = {'accept': 'application/json'} if (__body is not None): __headers['content-type'] = 'application/json' return (await self.perform_request('PUT', __path, params=__query, headers=__headers, body=__body)) _rewrite_parameters(body_fields=('settings', 'type', 'repository')) async def create_repository(self, *, name: str, settings: t.Optional[t.Mapping[(str, t.Any)]]=None, type: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, repository: t.Optional[t.Mapping[(str, t.Any)]]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, verify: t.Optional[bool]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]: if (name in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'name'") if ((settings is None) and (body is None)): raise ValueError("Empty value passed for parameter 'settings'") if ((type is None) and (body is None)): raise ValueError("Empty value passed for parameter 'type'") __path = f'/_snapshot/{_quote(name)}' __query: t.Dict[(str, t.Any)] = {} __body: t.Dict[(str, t.Any)] = (body if (body is not None) else {}) if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty if (timeout is not None): __query['timeout'] = timeout if (verify is not None): __query['verify'] = verify if (not __body): if (settings is not None): __body['settings'] = settings if (type is not None): __body['type'] = type if (repository is not None): __body['repository'] = repository __headers = {'accept': 'application/json', 'content-type': 'application/json'} return (await self.perform_request('PUT', __path, params=__query, headers=__headers, body=__body)) _rewrite_parameters() async def delete(self, *, repository: str, snapshot: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]: if (repository in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'repository'") if (snapshot in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'snapshot'") __path = f'/_snapshot/{_quote(repository)}/{_quote(snapshot)}' __query: t.Dict[(str, t.Any)] = {} if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty __headers = {'accept': 'application/json'} return (await self.perform_request('DELETE', __path, params=__query, headers=__headers)) _rewrite_parameters() async def delete_repository(self, *, name: t.Union[(str, t.Sequence[str])], error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None) -> ObjectApiResponse[t.Any]: if (name in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'name'") __path = f'/_snapshot/{_quote(name)}' __query: t.Dict[(str, t.Any)] = {} if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty if (timeout is not None): __query['timeout'] = timeout __headers = {'accept': 'application/json'} return (await self.perform_request('DELETE', __path, params=__query, headers=__headers)) _rewrite_parameters() async def get(self, *, repository: str, snapshot: t.Union[(str, t.Sequence[str])], after: t.Optional[str]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, from_sort_value: t.Optional[str]=None, human: t.Optional[bool]=None, ignore_unavailable: t.Optional[bool]=None, include_repository: t.Optional[bool]=None, index_details: t.Optional[bool]=None, index_names: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, offset: t.Optional[int]=None, order: t.Optional[t.Union[("t.Literal['asc', 'desc']", str)]]=None, pretty: t.Optional[bool]=None, size: t.Optional[int]=None, slm_policy_filter: t.Optional[str]=None, sort: t.Optional[t.Union[("t.Literal['duration', 'failed_shard_count', 'index_count', 'name', 'repository', 'shard_count', 'start_time']", str)]]=None, verbose: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]: if (repository in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'repository'") if (snapshot in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'snapshot'") __path = f'/_snapshot/{_quote(repository)}/{_quote(snapshot)}' __query: t.Dict[(str, t.Any)] = {} if (after is not None): __query['after'] = after if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (from_sort_value is not None): __query['from_sort_value'] = from_sort_value if (human is not None): __query['human'] = human if (ignore_unavailable is not None): __query['ignore_unavailable'] = ignore_unavailable if (include_repository is not None): __query['include_repository'] = include_repository if (index_details is not None): __query['index_details'] = index_details if (index_names is not None): __query['index_names'] = index_names if (master_timeout is not None): __query['master_timeout'] = master_timeout if (offset is not None): __query['offset'] = offset if (order is not None): __query['order'] = order if (pretty is not None): __query['pretty'] = pretty if (size is not None): __query['size'] = size if (slm_policy_filter is not None): __query['slm_policy_filter'] = slm_policy_filter if (sort is not None): __query['sort'] = sort if (verbose is not None): __query['verbose'] = verbose __headers = {'accept': 'application/json'} return (await self.perform_request('GET', __path, params=__query, headers=__headers)) _rewrite_parameters() async def get_repository(self, *, name: t.Optional[t.Union[(str, t.Sequence[str])]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, local: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]: if (name not in SKIP_IN_PATH): __path = f'/_snapshot/{_quote(name)}' else: __path = '/_snapshot' __query: t.Dict[(str, t.Any)] = {} if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (local is not None): __query['local'] = local if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty __headers = {'accept': 'application/json'} return (await self.perform_request('GET', __path, params=__query, headers=__headers)) _rewrite_parameters(body_fields=('feature_states', 'ignore_index_settings', 'ignore_unavailable', 'include_aliases', 'include_global_state', 'index_settings', 'indices', 'partial', 'rename_pattern', 'rename_replacement')) async def restore(self, *, repository: str, snapshot: str, error_trace: t.Optional[bool]=None, feature_states: t.Optional[t.Sequence[str]]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, ignore_index_settings: t.Optional[t.Sequence[str]]=None, ignore_unavailable: t.Optional[bool]=None, include_aliases: t.Optional[bool]=None, include_global_state: t.Optional[bool]=None, index_settings: t.Optional[t.Mapping[(str, t.Any)]]=None, indices: t.Optional[t.Union[(str, t.Sequence[str])]]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, partial: t.Optional[bool]=None, pretty: t.Optional[bool]=None, rename_pattern: t.Optional[str]=None, rename_replacement: t.Optional[str]=None, wait_for_completion: t.Optional[bool]=None, body: t.Optional[t.Dict[(str, t.Any)]]=None) -> ObjectApiResponse[t.Any]: if (repository in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'repository'") if (snapshot in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'snapshot'") __path = f'/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_restore' __query: t.Dict[(str, t.Any)] = {} __body: t.Dict[(str, t.Any)] = (body if (body is not None) else {}) if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty if (wait_for_completion is not None): __query['wait_for_completion'] = wait_for_completion if (not __body): if (feature_states is not None): __body['feature_states'] = feature_states if (ignore_index_settings is not None): __body['ignore_index_settings'] = ignore_index_settings if (ignore_unavailable is not None): __body['ignore_unavailable'] = ignore_unavailable if (include_aliases is not None): __body['include_aliases'] = include_aliases if (include_global_state is not None): __body['include_global_state'] = include_global_state if (index_settings is not None): __body['index_settings'] = index_settings if (indices is not None): __body['indices'] = indices if (partial is not None): __body['partial'] = partial if (rename_pattern is not None): __body['rename_pattern'] = rename_pattern if (rename_replacement is not None): __body['rename_replacement'] = rename_replacement if (not __body): __body = None __headers = {'accept': 'application/json'} if (__body is not None): __headers['content-type'] = 'application/json' return (await self.perform_request('POST', __path, params=__query, headers=__headers, body=__body)) _rewrite_parameters() async def status(self, *, repository: t.Optional[str]=None, snapshot: t.Optional[t.Union[(str, t.Sequence[str])]]=None, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, ignore_unavailable: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None) -> ObjectApiResponse[t.Any]: if ((repository not in SKIP_IN_PATH) and (snapshot not in SKIP_IN_PATH)): __path = f'/_snapshot/{_quote(repository)}/{_quote(snapshot)}/_status' elif (repository not in SKIP_IN_PATH): __path = f'/_snapshot/{_quote(repository)}/_status' else: __path = '/_snapshot/_status' __query: t.Dict[(str, t.Any)] = {} if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (ignore_unavailable is not None): __query['ignore_unavailable'] = ignore_unavailable if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty __headers = {'accept': 'application/json'} return (await self.perform_request('GET', __path, params=__query, headers=__headers)) _rewrite_parameters() async def verify_repository(self, *, name: str, error_trace: t.Optional[bool]=None, filter_path: t.Optional[t.Union[(str, t.Sequence[str])]]=None, human: t.Optional[bool]=None, master_timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None, pretty: t.Optional[bool]=None, timeout: t.Optional[t.Union[('t.Literal[-1]', 't.Literal[0]', str)]]=None) -> ObjectApiResponse[t.Any]: if (name in SKIP_IN_PATH): raise ValueError("Empty value passed for parameter 'name'") __path = f'/_snapshot/{_quote(name)}/_verify' __query: t.Dict[(str, t.Any)] = {} if (error_trace is not None): __query['error_trace'] = error_trace if (filter_path is not None): __query['filter_path'] = filter_path if (human is not None): __query['human'] = human if (master_timeout is not None): __query['master_timeout'] = master_timeout if (pretty is not None): __query['pretty'] = pretty if (timeout is not None): __query['timeout'] = timeout __headers = {'accept': 'application/json'} return (await self.perform_request('POST', __path, params=__query, headers=__headers))
class SatisfactionRating(BaseObject): def __init__(self, api=None, assignee_id=None, created_at=None, group_id=None, id=None, requester_id=None, score=None, ticket_id=None, updated_at=None, url=None, **kwargs): self.api = api self.assignee_id = assignee_id self.created_at = created_at self.group_id = group_id self.id = id self.requester_id = requester_id self.score = score self.ticket_id = ticket_id self.updated_at = updated_at self.url = url for (key, value) in kwargs.items(): setattr(self, key, value) for key in self.to_dict(): if (getattr(self, key) is None): try: self._dirty_attributes.remove(key) except KeyError: continue def assignee(self): if (self.api and self.assignee_id): return self.api._get_user(self.assignee_id) def assignee(self, assignee): if assignee: self.assignee_id = assignee.id self._assignee = assignee def created(self): if self.created_at: return dateutil.parser.parse(self.created_at) def created(self, created): if created: self.created_at = created def group(self): if (self.api and self.group_id): return self.api._get_group(self.group_id) def group(self, group): if group: self.group_id = group.id self._group = group def requester(self): if (self.api and self.requester_id): return self.api._get_user(self.requester_id) def requester(self, requester): if requester: self.requester_id = requester.id self._requester = requester def ticket(self): if (self.api and self.ticket_id): return self.api._get_ticket(self.ticket_id) def ticket(self, ticket): if ticket: self.ticket_id = ticket.id self._ticket = ticket def updated(self): if self.updated_at: return dateutil.parser.parse(self.updated_at) def updated(self, updated): if updated: self.updated_at = updated
class ViewSetIntegrationTests(TestCase): def setUp(self): self.list = BasicViewSet.as_view({'get': 'list'}) self.create = BasicViewSet.as_view({'post': 'create'}) def test_get_succeeds(self): request = factory.get('/') response = self.list(request) assert (response.status_code == status.HTTP_200_OK) assert (response.data == {'method': 'GET'}) def test_logged_in_get_succeeds(self): user = User.objects.create_user('user', '', 'password') request = factory.get('/') del user.is_active request.user = user response = self.list(request) assert (response.status_code == status.HTTP_200_OK) assert (response.data == {'method': 'GET'}) def test_post_succeeds(self): request = factory.post('/', {'test': 'foo'}) response = self.create(request) expected = {'method': 'POST', 'data': {'test': ['foo']}} assert (response.status_code == status.HTTP_200_OK) assert (response.data == expected) def test_options_succeeds(self): request = factory.options('/') response = self.list(request) assert (response.status_code == status.HTTP_200_OK) def test_400_parse_error(self): request = factory.post('/', 'f00bar', content_type='application/json') response = self.create(request) expected = {'detail': JSON_ERROR} assert (response.status_code == status.HTTP_400_BAD_REQUEST) assert (sanitise_json_error(response.data) == expected)
def test_apply_transaction(chain, funded_address, funded_address_private_key, funded_address_initial_balance): vm = chain.get_vm() recipient = decode_hex('0xa94f5374fce5edbc8e2a8697ce6ebf0c') amount = 100 from_ = funded_address tx = new_transaction(vm, from_, recipient, amount, funded_address_private_key) (receipt, computation) = vm.apply_transaction(vm.get_header(), tx) new_header = vm.add_receipt_to_header(vm.get_header(), receipt) assert (not computation.is_error) tx_gas = (tx.gas_price * constants.GAS_TX) state = vm.state assert (state.get_balance(from_) == ((funded_address_initial_balance - amount) - tx_gas)) assert (state.get_balance(recipient) == amount) assert (new_header.gas_used == constants.GAS_TX)
.xfail(raises=ImageComparisonFailure, reason='Matplotlib plots for reasons a different image size.') def test_plot_single_point_two_matrices(): outfile = NamedTemporaryFile(suffix='.png', prefix='viewpoint1', delete=False) matrix = ((((ROOT + 'Li_et_al_2015.h5') + ' ') + ROOT) + 'Li_et_al_2015_twice.h5') args = '--matrix {} --region X:3000000-3500000 -rp X:3200000 --outFileName {} --dpi 300'.format(matrix, outfile.name).split() compute(hicPlotViewpoint.main, args, 5) res = compare_images((ROOT + '/hicPlotViewpoint/li_viewpoint_32Mb_twice.png'), outfile.name, tol=40) assert (res is None), res os.remove(outfile.name)
class Translator(PyTranslator): ParseTree = ParseTree parser = parser ST = ast def do_file_input(self, st, ctx): return self.do(st[0]) def do_abcd_main(self, st, ctx): expr = (len(st) - 1) for (i, child) in enumerate(st): if (child.symbol == 'abcd_expr'): expr = i break return self.ST.AbcdSpec(lineno=st.srow, col_offset=st.scol, context=[self.do(child) for child in st[:expr] if (child.kind != self.NEWLINE)], body=self.do(st[expr]), asserts=[self.do(child) for child in st[(expr + 1):]]) def do_abcd_prop(self, st, ctx): return self.do(st[1]) def do_abcd_global(self, st, ctx): return self.do(st[0]) def do_abcd_spec(self, st, ctx): tree = self.do_abcd_main(st, ctx) tree.st = st return tree def do_abcd_decl(self, st, ctx): tree = self.do_abcd_global(st, ctx) tree.st = st return tree def do_abcd_const(self, st, ctx): return self.ST.AbcdConst(lineno=st.srow, col_offset=st.scol, name=st[1].text, value=self.do(st[3], ctx)) def do_abcd_symbol(self, st, ctx): return self.ST.AbcdSymbol(lineno=st.srow, col_offset=st.scol, symbols=self.do(st[1])) def do_abcd_namelist(self, st, ctx): return [child.text for child in st[::2]] def _do_flowop(self, st, op): nodes = [self.do(child) for child in st[::2]] while (len(nodes) > 1): left = nodes.pop(0) right = nodes.pop(0) theop = op() theop.st = st[1] flow = self.ST.AbcdFlowOp(lineno=left.lineno, col_offset=left.col_offset, left=left, op=theop, right=right) flow.st = st nodes.insert(0, flow) return nodes[0] def do_abcd_expr(self, st, ctx): return self._do_flowop(st, self.ST.Parallel) def do_abcd_choice_expr(self, st, ctx): return self._do_flowop(st, self.ST.Choice) def do_abcd_iter_expr(self, st, ctx): return self._do_flowop(st, self.ST.Loop) def do_abcd_seq_expr(self, st, ctx): return self._do_flowop(st, self.ST.Sequence) def do_abcd_base_expr(self, st, ctx): if (st[0].text == '('): return self.do(st[1]) else: return self.do(st[0]) def do_abcd_action(self, st, ctx): if (len(st) == 1): return self.do(st[0]) elif (st[1].text == 'True'): return self.ST.AbcdAction(lineno=st.srow, col_offset=st.scol, accesses=[], guard=True) elif (st[1].text == 'False'): return self.ST.AbcdAction(lineno=st.srow, col_offset=st.scol, accesses=[], guard=False) elif (len(st) == 3): return self.ST.AbcdAction(lineno=st.srow, col_offset=st.scol, accesses=self.do(st[1]), guard=self.ST.Name(lineno=st[(- 1)].srow, col_offset=st[(- 1)].scol, id='True', ctx=self.ST.Load())) else: return self.ST.AbcdAction(lineno=st.srow, col_offset=st.scol, accesses=self.do(st[1]), guard=self.do(st[3])) def do_abcd_access_list(self, st, ctx): return [self.do(child) for child in st[::2]] _arc = {'+': ast.Produce, '-': ast.Consume, '?': ast.Test, '<<': ast.Fill} def do_abcd_access(self, st, ctx): if (st[1].text in ('+', '?', '-')): return self.ST.SimpleAccess(lineno=st.srow, col_offset=st.scol, buffer=st[0].text, arc=self._arc[st[1].text](), tokens=self.do(st[3])) elif (st[1].text == '<<'): (loop, elts, atom) = self.do(st[3], ctx) if (atom is not None): args = atom elif (loop is None): args = self.ST.Tuple(lineno=st.srow, col_offset=st.scol, elts=elts, ctx=ctx()) else: args = self.ST.ListComp(lineno=st.srow, col_offset=st.scol, elt=loop, generators=elts) return self.ST.SimpleAccess(lineno=st.srow, col_offset=st.scol, buffer=st[0].text, arc=self._arc[st[1].text](), tokens=args) elif (st[1].text == '>>'): return self.ST.FlushAccess(lineno=st.srow, col_offset=st.scol, buffer=st[0].text, target=st[3].text) elif (st[1].text == '<>'): return self.ST.SwapAccess(lineno=st.srow, col_offset=st.scol, buffer=st[0].text, target=self.do(st[3]), tokens=self.do(st[5])) elif (st[2].text in ('suspend', 'resume')): if (len(st) > 6): raise ParseError(st.text, reason=('too many arguments for %s' % st[2].text)) if (st[2].text == 'suspend'): tree = self.ST.Suspend else: tree = self.ST.Resume return tree(lineno=st.srow, col_offset=st.scol, net=st[0].text, pid=self.do(st[4])) elif (st[2].text in ('spawn', 'wait')): if (len(st) > 6): args = [self.do(child) for child in st[6:(- 1):2]] else: args = [] if (st[2].text == 'spawn'): tree = self.ST.Spawn else: tree = self.ST.Wait return tree(lineno=st.srow, col_offset=st.scol, net=st[0].text, pid=self.do(st[4]), args=args) else: raise ParseError(st[2].text, reason=("expected 'spawn', 'wait', 'suspend' or 'resume', but found '%s'" % st[2].text)) def do_abcd_instance(self, st, ctx): if (len(st) in (3, 6)): (args, keywords, starargs, kwargs) = ([], [], None, None) else: (args, keywords, starargs, kwargs) = self.do(st[(- 2)]) if (st[1].text == ':'): net = st[3].text asname = st[0].text else: net = st[0].text asname = None return self.ST.AbcdInstance(lineno=st.srow, col_offset=st.scol, net=net, asname=asname, args=args, keywords=keywords, starargs=starargs, kwargs=kwargs) def do_abcd_net(self, st, ctx): params = self.do(st[2]) return self.ST.AbcdNet(lineno=st.srow, col_offset=st.scol, name=st[1].text, args=params, body=self.do(st[4])) def do_abcd_task(self, st, ctx): return self.ST.AbcdTask(lineno=st.srow, col_offset=st.scol, name=st[1].text, body=self.do(st[(- 1)]), input=self.do(st[2]), output=self.do(st[5])) def do_typelist(self, st, ctx): return [self.do(child) for child in st[1:(- 1):2]] def do_abcd_suite(self, st, ctx): if (len(st) == 1): return self.ST.AbcdSpec(lineno=st.srow, col_offset=st.scol, context=[], body=self.do(st[0])) else: return self.do(st[2]) def do_abcd_buffer(self, st, ctx): if (len(st) == 6): return self.ST.AbcdBuffer(lineno=st.srow, col_offset=st.scol, name=st[1].text, type=self.do(st[3]), capacity=None, content=self.do(st[(- 1)])) elif (len(st) == 7): deco = self.do_buffer_decorators(st[0], ctx) return self.ST.AbcdBuffer(lineno=st.srow, col_offset=st.scol, name=st[2].text, type=self.do(st[4]), capacity=deco['capacity'], content=self.do(st[(- 1)])) else: raise ParseError(st.text, reason='arrays not (yet) supported') def do_buffer_decorators(self, st, ctx): deco = {} for child in st: tree = self.do(child) if (isinstance(tree, self.ST.Call) and (tree.func.id == 'capacity')): if (tree.args or tree.starargs or tree.kwargs): raise ParseError(child, reason='invalid parameters') (min, max) = (None, None) for kw in tree.keywords: if (kw.arg == 'min'): min = kw.value elif (kw.arg == 'max'): max = kw.value else: raise ParseError(child, reason=('invalid parameter %r' % kw.arg)) if (min or max): deco['capacity'] = [min, max] else: deco['capacity'] = None continue raise ParseError(child, reason='invalid buffer decorator') return deco def do_abcd_typedef(self, st, ctx): return self.ST.AbcdTypedef(lineno=st.srow, col_offset=st.scol, name=st[1].text, type=self.do(st[3])) def do_abcd_type(self, st, ctx): if (len(st) == 1): return self.do(st[0]) else: return self.ST.UnionType(lineno=st.srow, col_offset=st.scol, types=[self.do(child) for child in st[::2]]) def do_abcd_and_type(self, st, ctx): if (len(st) == 1): return self.do(st[0]) else: return self.ST.IntersectionType(lineno=st.srow, col_offset=st.scol, types=[self.do(child) for child in st[::2]]) def do_abcd_cross_type(self, st, ctx): if (len(st) == 1): return self.do(st[0]) else: return self.ST.CrossType(lineno=st.srow, col_offset=st.scol, types=[self.do(child) for child in st[::2]]) def do_abcd_base_type(self, st, ctx): if (len(st) == 1): return self.ST.NamedType(lineno=st.srow, col_offset=st.scol, name=st[0].text) elif (len(st) == 3): return self.do(st[1]) elif (st[0].text in ('list', 'set', 'tuple')): if (len(st) > 4): raise ParseError(st.text, reason=('too many arguments for %s type' % st[0].text)) if (st[0].text == 'list'): tree = self.ST.ListType elif (st[0].text == 'tuple'): tree = self.ST.TupleType else: tree = self.ST.SetType return tree(lineno=st.srow, col_offset=st.scol, items=self.do(st[2])) elif (st[0].text == 'dict'): if (len(st) > 6): raise ParseError(st.text, reason='too many arguments for dict type') return self.ST.DictType(lineno=st.srow, col_offset=st.scol, keys=self.do(st[2]), values=self.do(st[4])) elif (st[0].text == 'enum'): return self.ST.EnumType(lineno=st.srow, col_offset=st.scol, items=[self.do(child) for child in st[2:(- 1):2]]) else: raise ParseError(st[0].text, reason=("expected 'enum', 'list', 'set' or 'dict' but found '%s'" % st[0].text)) def do_tfpdef(self, st, ctx): if (len(st) == 1): return (st[0].text, None) else: return (st[0].text, self.ST.Name(lineno=st[2].srow, col_offset=st[2].scol, id=st[2].text, ctx=ctx()))
(Output('api-connections', 'children'), [Input('submit-settings-button', 'n_clicks')]) def update_api_connection_status(n_clicks): if (n_clicks and (n_clicks > 0)): return html.Div(children=[html.Div(className='row ', children=[check_oura_connection()]), html.Div(className='row', children=[check_strava_connection()]), html.Div(className='row', children=[check_withings_connection()]), html.Div(className='row', children=[check_spotify_connection()])])
('aea.cli.registry.login.request_api', return_value={'key': 'key'}) class RegistryLoginTestCase(TestCase): def test_registry_login_positive(self, request_api_mock): result = registry_login('username', 'password') expected_result = 'key' self.assertEqual(result, expected_result) request_api_mock.assert_called_once()
def extractIncaroseJealousyMTL(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol or frag)) or ('preview' in item['title'].lower())): return None chp_prefixes = [('konyaku haki? yoroshi. naraba, fukushuda! chapter ', 'Konyaku haki? Yoroshi. Naraba, fukushuda!', 'translated'), ('akuyaku reijo ttenani o sureba yoi nda kke? chapter', 'Akuyaku Reijo ttenani o Sureba Yoi nda kke?', 'translated'), ('mochiron, isharyoseikyu itashimasu! chapter ', 'Mochiron, Isharyoseikyu Itashimasu!', 'translated'), ('dare ga tame chapter ', 'Dare ga Tame', 'translated'), ('The Analects of Righteous Fathers Collapse', 'The Analects of Righteous Fathers Collapse', 'translated')] for (prefix, series, tl_type) in chp_prefixes: if item['title'].lower().startswith(prefix.lower()): return buildReleaseMessageWithType(item, series, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
.slow .skipif((not has_hf_transformers), reason='requires huggingface transformers') .skipif((not has_torch_compile), reason='requires torch.compile') .parametrize('torch_device', TORCH_DEVICES) .parametrize('with_torch_sdp', [False, True]) def test_encoder_with_torch_compile(torch_device, with_torch_sdp): assert_encoder_output_equals_hf(CamemBERTEncoder, 'explosion-testing/camembert-test', torch_device, jit_method=JITMethod.TorchCompile, with_torch_sdp=with_torch_sdp)
.unit_saas class TestAuthenticatedClient(): .object(Session, 'send') def test_client_returns_ok_response(self, send, test_authenticated_client, test_saas_request, test_config_dev_mode_disabled): test_response = Response() test_response.status_code = 200 send.return_value = test_response returned_response = test_authenticated_client.send(test_saas_request) assert (returned_response == test_response) .parametrize('ip_address', ['localhost', '127.0.0.1', '169.254.0.1', '169.254.169.254']) def test_client_denied_url(self, test_authenticated_client: AuthenticatedClient, test_saas_request, test_config_dev_mode_disabled, ip_address): test_authenticated_client.uri = f' with pytest.raises(ConnectionException): test_authenticated_client.send(test_saas_request) .object(Session, 'send') def test_client_retries_429_and_throws(self, send, test_authenticated_client, test_saas_request): test_response = Response() test_response.status_code = 429 send.return_value = test_response with pytest.raises(ClientUnsuccessfulException): test_authenticated_client.send(test_saas_request) assert (send.call_count == 4) .object(Session, 'send') def test_client_retries_429_with_success(self, send, test_authenticated_client, test_saas_request): test_response_1 = Response() test_response_1.status_code = 429 test_response_2 = Response() test_response_2.status_code = 200 send.side_effect = [test_response_1, test_response_2] returned_response = test_authenticated_client.send(test_saas_request) (returned_response == test_response_2) assert (send.call_count == 2) .object(Session, 'send') def test_client_does_not_retry_connection_error(self, send, test_authenticated_client, test_saas_request): test_side_effect_1 = ConnectionError() send.side_effect = [test_side_effect_1] with pytest.raises(ConnectionException): test_authenticated_client.send(test_saas_request) assert (send.call_count == 1) def test_client_ignores_errors(self, test_authenticated_client): assert test_authenticated_client._should_ignore_error(status_code=400, errors_to_ignore=True) assert (not test_authenticated_client._should_ignore_error(status_code=400, errors_to_ignore=False)) assert test_authenticated_client._should_ignore_error(status_code=400, errors_to_ignore=[400]) assert (not test_authenticated_client._should_ignore_error(status_code=400, errors_to_ignore=[401]))
def send_closure(sock, hdrlen, fmts): def send_msg(msg, fmt=None, packed=False): if (not packed): if (fmt == 'floats'): msg = struct.pack(fmts[fmt], *msg) elif (fmt is not None): msg = struct.pack(fmts[fmt], msg) else: msg = f'{msg: <{hdrlen}}'.encode('ascii') print(f'SENDING: {msg}') sock.sendall(msg) return send_msg
class ValveAddPortTrafficTestCase(ValveTestBases.ValveTestNetwork): REQUIRE_TFM = False CONFIG = '\ndps:\n s1:\n dp_id: 1\n hardware: Generic\n interfaces:\n p1:\n number: 1\n tagged_vlans: [0x100]\n p2:\n number: 2\n tagged_vlans: [0x100]\n' MORE_CONFIG = '\ndps:\n s1:\n dp_id: 1\n hardware: Generic\n interfaces:\n p1:\n number: 1\n tagged_vlans: [0x100]\n p2:\n number: 2\n tagged_vlans: [0x100]\n p3:\n number: 3\n tagged_vlans: [0x100]\n' def _inport_flows(in_port, ofmsgs): return [ofmsg for ofmsg in ValveTestBases.flowmods_from_flows(ofmsgs) if (ofmsg.match.get('in_port') == in_port)] def _learn(self, in_port): ucast_pkt = self.pkt_match(in_port, 1) ucast_pkt['in_port'] = in_port ucast_pkt['vlan_vid'] = self.V100 table = self.network.tables[self.DP_ID] self.assertTrue(table.is_output(ucast_pkt, port=CONTROLLER_PORT)) self.rcv_packet(in_port, self.V100, ucast_pkt) def _unicast_between(self, in_port, out_port, not_out=1): ucast_match = self.pkt_match(in_port, out_port) ucast_match['in_port'] = in_port ucast_match['vlan_vid'] = self.V100 table = self.network.tables[self.DP_ID] self.assertTrue(table.is_output(ucast_match, port=out_port)) self.assertFalse(table.is_output(ucast_match, port=not_out)) def setUp(self): initial_ofmsgs = self.setup_valves(self.CONFIG)[self.DP_ID] self.assertFalse(self._inport_flows(3, initial_ofmsgs)) def test_port_add_no_ofmsgs(self): update_ofmsgs = self.update_config(self.MORE_CONFIG, reload_type='warm')[self.DP_ID] self.assertFalse(self._inport_flows(3, update_ofmsgs)) def test_port_add_link_state(self): self.update_config(self.MORE_CONFIG, reload_type='warm') self.add_port(3, link_up=False) self.port_expected_status(3, 0) self.set_port_link_up(3) self.port_expected_status(3, 1) def test_port_add_traffic(self): self.update_config(self.MORE_CONFIG, reload_type='warm') self.add_port(3) self._learn(2) self._learn(3) self._unicast_between(2, 3) self._unicast_between(3, 2)
class SOEFException(Exception): def warning(cls, msg: str, logger: logging.Logger=_default_logger) -> 'SOEFException': logger.warning(msg) return cls(msg) def debug(cls, msg: str, logger: logging.Logger=_default_logger) -> 'SOEFException': logger.debug(msg) return cls(msg) def error(cls, msg: str, logger: logging.Logger=_default_logger) -> 'SOEFException': logger.error(msg) return cls(msg) def exception(cls, msg: str, logger: logging.Logger=_default_logger) -> 'SOEFException': logger.exception(msg) return cls(msg)
def extractPopularwebnovelBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def extractNovelcvCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None return None tagmap = [] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionSeriesLineStates(Options): def hover(self) -> 'OptionSeriesLineStatesHover': return self._config_sub_data('hover', OptionSeriesLineStatesHover) def inactive(self) -> 'OptionSeriesLineStatesInactive': return self._config_sub_data('inactive', OptionSeriesLineStatesInactive) def normal(self) -> 'OptionSeriesLineStatesNormal': return self._config_sub_data('normal', OptionSeriesLineStatesNormal) def select(self) -> 'OptionSeriesLineStatesSelect': return self._config_sub_data('select', OptionSeriesLineStatesSelect)
def group_elements(osm: OSMData): elem2group = {'area': {}, 'way': {}, 'node': {}} for node in filter(filter_node, osm.nodes.values()): label = parse_node(node.tags) if (label is None): continue group = match_to_group(label, Patterns.nodes) if (group is None): group = match_to_group(label, Patterns.ways) if (group is None): continue elem2group['node'][node.id_] = group for way in filter(filter_way, osm.ways.values()): label = parse_way(way.tags) if (label is None): continue group = match_to_group(label, Patterns.ways) if (group is None): group = match_to_group(label, Patterns.nodes) if (group is None): continue elem2group['way'][way.id_] = group for area in filter(filter_area, osm.ways.values()): label = parse_area(area.tags) if (label is None): continue group = match_to_group(label, Patterns.areas) if (group is None): group = match_to_group(label, Patterns.ways) if (group is None): group = match_to_group(label, Patterns.nodes) if (group is None): continue elem2group['area'][area.id_] = group return elem2group
class CraftNet(nn.Module): def __init__(self, pretrained=False, freeze=False): super(CraftNet, self).__init__() self.basenet = vgg16_bn(pretrained, freeze) self.upconv1 = double_conv(1024, 512, 256) self.upconv2 = double_conv(512, 256, 128) self.upconv3 = double_conv(256, 128, 64) self.upconv4 = double_conv(128, 64, 32) num_class = 2 self.conv_cls = nn.Sequential(nn.Conv2d(32, 32, kernel_size=3, padding=1), nn.ReLU(inplace=True), nn.Conv2d(32, 32, kernel_size=3, padding=1), nn.ReLU(inplace=True), nn.Conv2d(32, 16, kernel_size=3, padding=1), nn.ReLU(inplace=True), nn.Conv2d(16, 16, kernel_size=1), nn.ReLU(inplace=True), nn.Conv2d(16, num_class, kernel_size=1)) init_weights(self.upconv1.modules()) init_weights(self.upconv2.modules()) init_weights(self.upconv3.modules()) init_weights(self.upconv4.modules()) init_weights(self.conv_cls.modules()) def forward(self, x): sources = self.basenet(x) y = torch.cat([sources[0], sources[1]], dim=1) y = self.upconv1(y) y = F.interpolate(y, size=sources[2].size()[2:], mode='bilinear', align_corners=False) y = torch.cat([y, sources[2]], dim=1) y = self.upconv2(y) y = F.interpolate(y, size=sources[3].size()[2:], mode='bilinear', align_corners=False) y = torch.cat([y, sources[3]], dim=1) y = self.upconv3(y) y = F.interpolate(y, size=sources[4].size()[2:], mode='bilinear', align_corners=False) y = torch.cat([y, sources[4]], dim=1) feature = self.upconv4(y) y = self.conv_cls(feature) return (y.permute(0, 2, 3, 1), feature)
class SpectralNoiseFFT(SpectralNoise): def __init__(self, factory, C): self.factory = factory self.C = C self.coords = C.grid.separated_coords def shift(self, shift): S = [(shift[i] * self.coords[i]) for i in range(len(self.coords))] S = np.add.reduce(np.ix_(*S)) self.C *= np.exp(((- 1j) * S.ravel())) def __call__(self): return self.factory.fourier.backward(self.C).real
def create_annotation_table(): df_annotations = pd.read_sql(sql=app.session.query(annotations.athlete_id, annotations.date, annotations.annotation).filter((athlete.athlete_id == 1)).statement, con=engine).sort_index(ascending=False) app.session.remove() return dash_table.DataTable(id='annotation-table', columns=[{'name': x, 'id': y} for (x, y) in zip(['Date', 'Annotation'], ['date', 'annotation'])], data=df_annotations[['date', 'annotation']].sort_index(ascending=False).to_dict('records'), style_as_list_view=True, fixed_rows={'headers': True, 'data': 0}, style_table={'height': '100%'}, style_header={'backgroundColor': 'rgba(0,0,0,0)', 'borderBottom': '1px solid rgb(220, 220, 220)', 'borderTop': '0px', 'textAlign': 'left', 'fontWeight': 'bold', 'fontFamily': '"Open Sans", "HelveticaNeue", "Helvetica Neue", Helvetica, Arial, sans-serif'}, style_cell={'backgroundColor': 'rgba(0,0,0,0)', 'borderBottom': '1px solid rgb(73, 73, 73)', 'textAlign': 'center', 'fontFamily': '"Open Sans", "HelveticaNeue", "Helvetica Neue", Helvetica, Arial, sans-serif'}, style_cell_conditional=[{'if': {'column_id': 'activity_id'}, 'display': 'none'}], filter_action='none', editable=True, row_deletable=True, page_action='none')
_set_stats_type(ofproto.OFPMP_QUEUE_STATS, OFPQueueStats) _set_msg_type(ofproto.OFPT_MULTIPART_REQUEST) class OFPQueueStatsRequest(OFPMultipartRequest): def __init__(self, datapath, flags=0, port_no=ofproto.OFPP_ANY, queue_id=ofproto.OFPQ_ALL, type_=None): super(OFPQueueStatsRequest, self).__init__(datapath, flags) self.port_no = port_no self.queue_id = queue_id def _serialize_stats_body(self): msg_pack_into(ofproto.OFP_QUEUE_STATS_REQUEST_PACK_STR, self.buf, ofproto.OFP_MULTIPART_REQUEST_SIZE, self.port_no, self.queue_id)
def extractCafeno20WordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class OptionSeriesTreemapSonificationDefaultinstrumentoptionsMappingPlaydelay(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class PackFile(object): def __init__(self, path): self.path = os.path.abspath(path) def get_file(self): return os.path.join(self.path, 'pack.py') def needs_model(self): file_name = self.get_file() line = None with open(file_name, 'r') as f: for l in f: if ('.pack(' in l): line = l if (line is None): return False if ('None' in line): return False return True def check(self): return True
def gtest(): herder = VpsHerder(debug=True) clientname = 'test-5' (provider, kwargs) = herder.generate_gce_conf() herder.log.info('Creating instance...') herder.log.info("\tClient name: '%s'", clientname) herder.log.info("\tusing provider: '%s'", provider) herder.log.info("\tkwargs: '%s'", kwargs) ret = herder.cc.create(names=[clientname], provider=provider, **kwargs) print('Create response:', ret) herder.configure_client(clientname, 0, provider=provider, provider_kwargs=kwargs) herder.log.info('Instance created!')
def test_prism_layer_no_regular_grid(dummy_layer): ((easting, northing), surface, reference, _) = dummy_layer easting_invalid = easting.copy() easting_invalid[3] = (- 22) with pytest.raises(ValueError): prism_layer((easting_invalid, northing), surface, reference) northing_invalid = northing.copy() northing_invalid[3] = (- 22) northing[3] = 12.98 with pytest.raises(ValueError): prism_layer((easting, northing_invalid), surface, reference)
class OptionSeriesBulletSonificationDefaultinstrumentoptionsMappingLowpassFrequency(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
class RuntimeProcessorType(Enum): LOCAL = 'Local' KUBEFLOW_PIPELINES = 'Kubeflow Pipelines' APACHE_AIRFLOW = 'Apache Airflow' ARGO = 'Argo' def get_instance_by_name(name: str) -> 'RuntimeProcessorType': return RuntimeProcessorType.__members__[name.upper()] def get_instance_by_value(value: str) -> 'RuntimeProcessorType': for instance in RuntimeProcessorType.__members__.values(): if (instance.value == value): return instance raise KeyError(f"'{value}'")
def read_segbits(segbits_file): segbits = [] with OpenSafeFile(segbits_file, 'r') as fp: for line in fp.readlines(): line = line.split() if (len(line) > 1): fields = line[0].split('.') segbit = '.'.join(fields[1:]) segbits.append(segbit) return segbits
def is_meet_conditions(args, conditions, threshold=1e-08): if (conditions is None): return True condition_names = list(conditions.keys()) condition_values = list(conditions.values()) assert (_is_same([len(values) for values in condition_values]) is True) num_condition = len(condition_values) num_condition_value = len(condition_values[0]) condition_values = [[condition_values[ind_cond][ind_value] for ind_cond in range(num_condition)] for ind_value in range(num_condition_value)] g_flag = False try: for cond_values in condition_values: l_flag = True for (ind, cond_value) in enumerate(cond_values): _cond = (cond_value == (args[condition_names[ind]] if (condition_names[ind] in args) else None)) if isinstance(cond_value, numbers.Number): _cond = (_cond or (abs((cond_value - args[condition_names[ind]])) <= threshold)) l_flag = (l_flag and _cond) g_flag = (g_flag or l_flag) return g_flag except: return False
(scope='function') def privacy_preference_history_for_vendor_legitimate_interests(db, provided_identity_and_consent_request, privacy_experience_france_overlay, fides_user_provided_identity, served_notice_history_for_vendor_legitimate_interests): preference_history_record = PrivacyPreferenceHistory.create(db=db, data={'anonymized_ip_address': '92.158.1.0', 'email': '', 'method': 'button', 'vendor_legitimate_interests': 'gvl.42', 'privacy_experience_config_history_id': None, 'privacy_experience_id': privacy_experience_france_overlay.id, 'preference': 'opt_out', 'fides_user_device_provided_identity_id': fides_user_provided_identity.id, 'request_origin': 'tcf_overlay', 'user_agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/324.42 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/425.24', 'user_geography': 'fr_idg', 'url_recorded': 'example.com/', 'served_notice_history_id': served_notice_history_for_vendor_legitimate_interests.id}, check_name=False) (yield preference_history_record) preference_history_record.delete(db)
('overrides, expected_files', [([], {'.hydra'}), (['hydra.output_subdir=foo'], {'foo'}), (['hydra.output_subdir=null'], set())]) ('calling_file, calling_module', [('tests/test_apps/app_with_cfg/my_app.py', None), (None, 'tests.test_apps.app_with_cfg.my_app')]) def test_hydra_output_dir(hydra_restore_singletons: Any, hydra_task_runner: TTaskRunner, calling_file: str, calling_module: str, overrides: List[str], expected_files: Set[str]) -> None: with hydra_task_runner(calling_file=calling_file, calling_module=calling_module, config_path=None, config_name=None, overrides=overrides) as task: assert (task.temp_dir is not None) path = Path(task.temp_dir) files = {str(x)[(len(task.temp_dir) + 1):] for x in path.iterdir()} assert (files == expected_files)
def test_param_iter(): step = Stepper(8, 12, 0.1) osc = [step, 0.5, 0.5] iter_1 = param_iter(osc) for (ind, val) in enumerate(iter_1): assert (val == [(8 + (0.1 * ind)), 0.5, 0.5]) step = Stepper(0.25, 3, 0.25) ap_params = [0, step] iter_1 = param_iter(ap_params) for (ind, val) in enumerate(iter_1): assert (val == [0, (0.25 + (0.25 * ind))]) step = Stepper(8, 12, 0.1) oscs = [step, 0.5, 0.5, 10, 0.25, 1] iter_1 = param_iter(oscs) for (ind, val) in enumerate(iter_1): assert (val == [(8 + (0.1 * ind)), 0.5, 0.5, 10, 0.25, 1]) step = Stepper(8, 12, 0.1) osc_1 = [1, 2, 3] osc_2 = [4, 5, 6] osc_3 = [7, 8, step] oscs = [osc_1, osc_2, osc_3] iter_2 = param_iter(oscs) for (ind, val) in enumerate(iter_2): assert (val == [1, 2, 3, 4, 5, 6, 7, 8, (8 + (0.1 * ind))]) step = Stepper(8, 12, 0.1) with raises(ValueError): for params in param_iter([[step, step, step]]): continue
def test_inforec_encoding(tmpdir, merge_lis_prs): fpath = os.path.join(str(tmpdir), 'encoded-inforec.dlis') content = ['data/lis/records/RHLR-1.lis.part', 'data/lis/records/THLR-1.lis.part', 'data/lis/records/FHLR-1.lis.part', 'data/lis/records/inforec-encoded.lis.part', 'data/lis/records/FTLR-1.lis.part', 'data/lis/records/TTLR-1.lis.part', 'data/lis/records/RTLR-1.lis.part'] merge_lis_prs(fpath, content) prev_encodings = dlisio.common.get_encodings() dlisio.common.set_encodings([]) try: (f,) = lis.load(fpath) wellsite = f.wellsite_data()[0] with pytest.warns(UnicodeWarning): assert (wellsite.table_name() == b'\xeb\xef\xe98') dlisio.common.set_encodings(['koi8_r']) wellsite = f.wellsite_data()[0] components = wellsite.components() assert (components[0].mnemonic == '') assert (components[0].units == ' ') assert (components[0].component == '8') assert (components[1].mnemonic == '') assert (components[1].units == '') assert (components[1].component == '') table = wellsite.table(simple=True) mnem = np.array([''], dtype='O') np.testing.assert_array_equal(table[''], mnem) finally: dlisio.common.set_encodings(prev_encodings) f.close()
def test_get_file_tree_data(frontend_db, backend_db): (fw, parent_fo, child_fo) = create_fw_with_parent_and_child() fw.processed_analysis = {'file_type': generate_analysis_entry(analysis_result={'failed': 'some error'})} parent_fo.processed_analysis = {'file_type': generate_analysis_entry(analysis_result={'mime': 'foo_type'})} child_fo.processed_analysis = {} backend_db.insert_multiple_objects(fw, parent_fo, child_fo) result = frontend_db.get_file_tree_data([fw.uid, parent_fo.uid, child_fo.uid]) assert (len(result) == 3) result_by_uid = {r.uid: r for r in result} assert (result_by_uid[parent_fo.uid].uid == parent_fo.uid) assert (result_by_uid[parent_fo.uid].file_name == parent_fo.file_name) assert (result_by_uid[parent_fo.uid].size == parent_fo.size) assert (result_by_uid[parent_fo.uid].virtual_file_path == parent_fo.virtual_file_path) assert (result_by_uid[fw.uid].mime is None) assert (result_by_uid[parent_fo.uid].mime == 'foo_type') assert (result_by_uid[child_fo.uid].mime is None) assert (result_by_uid[fw.uid].included_files == [parent_fo.uid]) assert (result_by_uid[parent_fo.uid].included_files == [child_fo.uid])
class OptionPlotoptionsGaugeSonificationContexttracks(Options): def activeWhen(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksActivewhen': return self._config_sub_data('activeWhen', OptionPlotoptionsGaugeSonificationContexttracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksMapping': return self._config_sub_data('mapping', OptionPlotoptionsGaugeSonificationContexttracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionPlotoptionsGaugeSonificationContexttracksPointgrouping': return self._config_sub_data('pointGrouping', OptionPlotoptionsGaugeSonificationContexttracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def timeInterval(self): return self._config_get(None) def timeInterval(self, num: float): self._config(num, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False) def valueInterval(self): return self._config_get(None) def valueInterval(self, num: float): self._config(num, js_type=False) def valueMapFunction(self): return self._config_get('linear') def valueMapFunction(self, value: Any): self._config(value, js_type=False) def valueProp(self): return self._config_get('"x"') def valueProp(self, text: str): self._config(text, js_type=False)
def debatch_actor_ids(actor_ids: List[ActorID]) -> List[ActorID]: for idx in range(len(actor_ids)): actor_id_tmp = actor_ids[idx] if (isinstance(actor_id_tmp.agent_id, torch.Tensor) and isinstance(actor_id_tmp.step_key, torch.Tensor)): assert (len(set(actor_id_tmp.agent_id.tolist())) == 1), actor_id_tmp.agent_id assert (len(set(actor_id_tmp.step_key.tolist())) == 1), actor_id_tmp.step_key actor_ids[idx] = ActorID(step_key=actor_id_tmp.step_key[0].item(), agent_id=actor_id_tmp.agent_id[0].item()) elif (isinstance(actor_id_tmp.agent_id, int) and isinstance(actor_id_tmp.step_key, (int, str))): pass else: raise NotImplementedError(f'Not implemented batched actor id type found: {type(actor_id_tmp.agent_id)}') return actor_ids
class TSweepRunner(Protocol): returns: List[List[JobReturn]] def __call__(self, calling_file: Optional[str], calling_module: Optional[str], task_function: Optional[TaskFunction], config_path: Optional[str], config_name: Optional[str], overrides: Optional[List[str]], temp_dir: Optional[Path]=None) -> SweepTaskFunction: ...
class GitChangeEntry(): status: str original_path: Path new_path: Optional[Path] = None def from_line(cls, text: str) -> 'GitChangeEntry': columns = text.split('\t') assert (2 <= len(columns) <= 3) columns[1:] = [Path(c) for c in columns[1:]] return cls(*columns) def path(self) -> Path: return (self.new_path or self.original_path) def revert(self, dry_run=False): def git(*args): command_line = (['git'] + [str(arg) for arg in args]) click.echo(subprocess.list2cmdline(command_line)) if (not dry_run): subprocess.check_call(command_line) if self.status.startswith('R'): GitChangeEntry('A', self.new_path).revert(dry_run=dry_run) GitChangeEntry('D', self.original_path).revert(dry_run=dry_run) return git('restore', '--staged', self.original_path) def read(self, git_tree='HEAD') -> bytes: if (self.status == 'D'): return subprocess.check_output(['git', 'show', f'{git_tree}:{self.path}']) return self.path.read_bytes()
.parametrize('lineno,context,expected', [(10, 5, (['5', '6', '7', '8', '9'], '10', ['11', '12', '13', '14', '15'])), (1, 5, ([], '1', ['2', '3', '4', '5', '6'])), (2, 5, (['1'], '2', ['3', '4', '5', '6', '7'])), (20, 5, (['15', '16', '17', '18', '19'], '20', [])), (19, 5, (['14', '15', '16', '17', '18'], '19', ['20'])), (1, 0, ([], '1', [])), (21, 0, (None, None, None))]) def test_get_lines_from_file(lineno, context, expected): stacks.get_lines_from_file.cache_clear() fname = os.path.join(os.path.dirname(__file__), 'linenos.py') result = stacks.get_lines_from_file(fname, lineno, context) assert (result == expected)
class ChartTreeMap(Chart): requirements = ('chart.js', 'chartjs-chart-treemap') _chart__type = 'treemap' _option_cls = OptChartJs.OptionsTreeMap builder_name = 'ChartTreeMap' def options(self) -> OptChartJs.OptionsTreeMap: return super().options def add_dataset(self, tree: List[dict], label: str, colors: List[str]=None, **kwargs) -> JsChartJs.DataSetTreeMap: if ('kind' not in kwargs): kwargs['kind'] = 'data' data = self.new_dataset(len(self._datasets), tree, label, colors, **kwargs) self._datasets.append(data) return data def new_dataset(self, index: int, data, label: str, colors: List[str]=None, kind: str=None, **kwargs) -> JsChartJs.DataSetTreeMap: data = JsChartJs.DataSetTreeMap(self.page, attrs={kind: data}) if (kind == 'tree'): data.key = label else: data.label = label if (colors is None): data.backgroundColor = self.options.colors[index] data.borderColor = self.options.colors[index] for (k, v) in kwargs.items(): if hasattr(data, k): data_attr = getattr(data, k) if (isinstance(v, dict) and hasattr(data_attr, '_attrs')): for (w, x) in v.items(): setattr(data_attr, w, x) else: setattr(data, k, v) else: data._attrs[k] = v return data def backgrounds(self, colors: Dict[(str, str)]): self.options.commons['backgroundColorMaps'] = colors self.dataset().custom('backgroundColor', JsUtils.jsWrap(('function(ctx){var item = ctx.dataset.data[ctx.dataIndex];\nif (item){\n var a = item.v / (item.gs || item.s) / 2 + 0.5;\n var colorsMaps = %s; if(colorsMaps[item.g]){return colorsMaps[item.g]}\n if(item.l === 0){return Chart.helpers.color("%s").alpha(a).rgbString()}\n if(item.l === 1){return Chart.helpers.color("white").alpha(0.3).rgbString()}\n else{return Chart.helpers.color("%s").alpha(a).rgbString()}}\n}' % (colors, self.options['commons']['colors']['light'], self.options['commons']['colors']['base']))))
class OptionPlotoptionsAreaSonificationTracksPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def test_colors_whole_table_only_bg_colors(data, header, footer, bg_colors): result = table(data, header=header, footer=footer, divider=True, bg_colors=bg_colors) if SUPPORTS_ANSI: assert (result == '\n\x1b[48;5;2mCOL A \x1b[0m \x1b[48;5;23mCOL B\x1b[0m COL 3 \n\x1b[48;5;2m\x1b[0m \x1b[48;5;23m-----\x1b[0m \n\x1b[48;5;2mHello \x1b[0m \x1b[48;5;23mWorld\x1b[0m \n\x1b[48;5;2mThis is a test\x1b[0m \x1b[48;5;23mWorld\x1b[0m 1234 \n\x1b[48;5;2m\x1b[0m \x1b[48;5;23m-----\x1b[0m \n\x1b[48;5;2m \x1b[0m \x1b[48;5;23m \x1b[0m 2030203.00\n') else: assert (result == '\nCOL A COL B COL 3 \n ----- \nHello World \nThis is a test World 1234 \n ----- \n 2030203.00\n')
def setNoZeroLSweakDirichletBCs(RDLSvt): assert hasattr(RDLSvt, 'freezeLevelSet') assert hasattr(RDLSvt, 'u_dof_last') assert hasattr(RDLSvt, 'weakDirichletConditionFlags') assert hasattr(RDLSvt.coefficients, 'epsFact') assert hasattr(RDLSvt, 'dofFlag_element') RDLSvt.freezeLevelSet = 0
class CompiledTask(_common.FlyteIdlEntity): def __init__(self, template): self._template = template def template(self): return self._template def to_flyte_idl(self): return _compiler_pb2.CompiledTask(template=self.template) def from_flyte_idl(cls, p): return cls(None)
def inv_hue_quadrature(H: float) -> float: Hp = (((H % 400) + 400) % 400) i = math.floor((0.01 * Hp)) Hp = (Hp % 100) (hi, hii) = HUE_QUADRATURE['h'][i:(i + 2)] (ei, eii) = HUE_QUADRATURE['e'][i:(i + 2)] return util.constrain_hue((((Hp * ((eii * hi) - (ei * hii))) - ((100 * hi) * eii)) / ((Hp * (eii - ei)) - (100 * eii))))
class HttpError(FaunaError): def __init__(self, request_result): self.errors = HttpError._get_errors(request_result) super(HttpError, self).__init__(self._get_description(), request_result) def _get_errors(request_result): response = request_result.response_content errors = _get_or_raise(request_result, response, 'errors') return [ErrorData.from_dict(error, request_result) for error in errors] def __str__(self): return repr(self.errors[0]) def _get_description(self): return (self.errors[0].description if self.errors else '(empty `errors`)')
class Colvar(metaclass=abc.ABCMeta): def __init__(self, force_agrad=False): try: getattr(self, '_gradient') except AttributeError: force_agrad = True if force_agrad: grad_func = autograd.grad(self.value) def wrapped(self, c3d): return grad_func(c3d) self._gradient = types.MethodType(wrapped, self) self.agrad = force_agrad def value(self, c3d): pass def gradient(self, c3d): return self._gradient(c3d) def eval(self, coords): c3d = coords.reshape((- 1), 3) return (self.value(c3d), self.gradient(c3d)) def _wilson_gradient(self, func, c3d): grad = np.zeros_like(c3d) grad[self.indices] = func(*c3d[self.indices].flatten()).reshape((- 1), 3) return grad def __str__(self): if hasattr(self, 'indices'): str_ = f'{self.__class__.__name__}({self.indices})' else: str_ = super().__str__() return str_
class LossMetrics(Metrics): def __init__(self, window_size: int=100, device: torch.device=torch.device('cpu'), world_size: int=0): super().__init__() self._world_size = world_size self._window_size = window_size self._epoch = 0 self._iteration = 0 self._window_losses = [] self._window_loss = torch.tensor(0.0, device=device) self._accumulated_loss = torch.tensor(0.0, device=device) self._lifetime_loss = torch.tensor(0.0, device=device) if (self._world_size > 1): self._global_window_loss = torch.tensor(0.0, device=device) self._global_lifetime_loss = torch.tensor(0.0, device=device) def reset(self): self._epoch = 0 self._iteration = 0 self._window_losses = [] self._window_loss = torch.tensor(0.0, device=device) self._accumulated_loss = torch.tensor(0.0, device=device) self._lifetime_loss = torch.tensor(0.0, device=device) def update(self, loss: torch.Tensor): self._iteration += 1 self._window_losses.append(loss) if (len(self._window_losses) > self._window_size): self._window_losses.pop(0) self._window_loss = torch.mean(torch.stack(self._window_losses)) self._accumulated_loss += loss self._lifetime_loss = (self._accumulated_loss / self._iteration) def log(self): logger.info(f'Epoch: {self._epoch} | Iteration: {self._iteration} | Local Lifetime Loss: {self._lifetime_loss} | Local Window Loss: {self._window_loss}') def update_global_metrics(self): if (dist.is_initialized() and (self._world_size > 1)): self._global_window_loss = (self._window_loss / self._world_size) self._global_lifetime_loss = (self._lifetime_loss / self._world_size) dist.all_reduce(self._global_window_loss, op=dist.reduce_op.SUM) dist.all_reduce(self._global_lifetime_loss, op=dist.reduce_op.SUM) else: pass def log_global_metrics(self): if (self._world_size > 1): logger.info(f'Epoch: {self._epoch} | Iteration: {self._iteration} | Global Lifetime Loss: {self._global_lifetime_loss} | Global Window Loss: {self._global_window_loss}') else: pass
def testdir(a): try: names = [n for n in os.listdir(a) if n.endswith('.py')] except OSError: sys.stderr.write(('Directory not readable: %s\n' % a)) else: for n in names: fullname = os.path.join(a, n) if os.path.isfile(fullname): output = io.StringIO() print(('Testing %s' % fullname)) try: roundtrip(fullname, output) except Exception: e = sys.exc_info()[1] print((' Failed to compile, exception is %s' % repr(e))) elif os.path.isdir(fullname): testdir(fullname)
class OptionPlotoptionsVennClusterLayoutalgorithm(Options): def distance(self): return self._config_get(40) def distance(self, num: float): self._config(num, js_type=False) def gridSize(self): return self._config_get(50) def gridSize(self, num: float): self._config(num, js_type=False) def iterations(self): return self._config_get(None) def iterations(self, num: float): self._config(num, js_type=False) def kmeansThreshold(self): return self._config_get(100) def kmeansThreshold(self, num: float): self._config(num, js_type=False) def type(self): return self._config_get(None) def type(self, text: str): self._config(text, js_type=False)
def get_ips(session, endpoint_id, limit=None): query = session.query(Request.ip, func.count(Request.ip)).filter((Request.endpoint_id == endpoint_id)).group_by(Request.ip).order_by(desc(func.count(Request.ip))) if limit: query = query.limit(limit) result = query.all() session.expunge_all() return result
def test_adding_an_externalTrafficPolicy(): config = '' r = helm_template(config) assert ('externalTrafficPolicy' not in r['service'][uname]['spec']) config = '\n service:\n externalTrafficPolicy: Local\n ' r = helm_template(config) assert (r['service'][uname]['spec']['externalTrafficPolicy'] == 'Local')
_required _required def details(request, hostname): dc1_settings = get_dc1_settings(request) context = collect_view_data(request, 'node_list') context['node'] = node = get_node(request, hostname, sr=('owner',)) context['nodes'] = Node.all() context['node_dcs'] = node.dc.all().values_list('alias', flat=True) context['node_vms'] = node.vm_set.count() context['node_real_vms'] = node.vm_set.filter(slavevm__isnull=True).count() context['form'] = NodeForm(request, node, initial=node.web_data) context['mon_sla_enabled'] = (settings.MON_ZABBIX_ENABLED and dc1_settings.MON_ZABBIX_NODE_SLA) if node.is_backup: context['node_backups'] = node.backup_set.count() else: context['node_backups'] = 0 view_node_details.send(sender='gui.node.views.details', request=request, context=context) return render(request, 'gui/node/details.html', context)
def convert_preprocessors(): subprocess.run(['curl', '-L', ' '-o', 'src/model.py'], check=True) run_conversion_script('convert_informative_drawings.py', 'tests/weights/carolineec/informativedrawings/model2.pth', 'tests/weights/informative-drawings.safetensors', expected_hash='93dca207') os.remove('src/model.py')
def jzazbz_to_xyz_d65(jzazbz: Vector) -> Vector: (jz, az, bz) = jzazbz iz = ((jz + D0) / ((1 + D) - (D * (jz + D0)))) pqlms = alg.matmul(izazbz_to_lms_p_mi, [iz, az, bz], dims=alg.D2_D1) lms = util.pq_st2084_eotf(pqlms, m2=M2) (xm, ym, za) = alg.matmul(lms_to_xyz_mi, lms, dims=alg.D2_D1) xa = ((xm + ((B - 1) * za)) / B) ya = ((ym + ((G - 1) * xa)) / G) return util.absxyz_to_xyz([xa, ya, za])
def gatherSplitTimeStepXDMFfilesOpt(size, filename, dataDir='.', addname='_all', nStepsOnly=None, stride=1): xmlFile = open(((filename + str(0)) + '.xmf'), 'r') tree = ElementTree(file=xmlFile) xmlFile.close() nSteps = len(tree.getroot()[(- 1)][(- 1)]) if (nStepsOnly != None): nSteps = nStepsOnly print('nSteps', nSteps) stepsToGather = [(i * stride) for i in range(old_div(nSteps, stride))] for tn in stepsToGather: fAll = open(os.path.join(dataDir, (((((filename + '_t') + str(tn)) + addname) + str(size)) + '.xmf')), 'w') fAll.write('<?xml version="1.0" ?>\n<!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>\n<Xdmf Version="2.0" xmlns:xi=" <Domain>\n <Grid CollectionType="Temporal" GridType="Collection" Name="Mesh Spatial_Domain">\n') print('time step', tn) print('subdomain', 0) fAll.write(' <Grid CollectionType="Spatial" GridType="Collection">\n ') xmlFile = open(os.path.join(dataDir, ((filename + str(0)) + '.xmf')), 'r') tree = ElementTree(file=xmlFile) xmlFile.close() Grid = tree.getroot()[(- 1)][(- 1)][tn] fAll.write(tostring(Grid[0])) del Grid[0] fAll.write(tostring(Grid)) for i in range(1, size): print('subdomain', i) xmlFile = open(os.path.join(dataDir, ((filename + str(i)) + '.xmf')), 'r') tree = ElementTree(file=xmlFile) xmlFile.close() Grid = tree.getroot()[(- 1)][(- 1)][tn] del Grid[0] fAll.write(tostring(Grid)) fAll.write(' </Grid>\n') fAll.write(' </Grid>\n </Domain>\n</Xdmf>\n') fAll.close()
_models('spacy.Text-Babbage.v2') def openai_text_babbage_v2(config: Dict[(Any, Any)]=SimpleFrozenDict(max_tokens=500, temperature=_DEFAULT_TEMPERATURE), name: Literal['text-babbage-001']='text-babbage-001', strict: bool=OpenAI.DEFAULT_STRICT, max_tries: int=OpenAI.DEFAULT_MAX_TRIES, interval: float=OpenAI.DEFAULT_INTERVAL, max_request_time: float=OpenAI.DEFAULT_MAX_REQUEST_TIME, endpoint: Optional[str]=None) -> Callable[([Iterable[str]], Iterable[str])]: return OpenAI(name=name, endpoint=(endpoint or Endpoints.NON_CHAT.value), config=config, strict=strict, max_tries=max_tries, interval=interval, max_request_time=max_request_time)
def report_results(results, hw_config, report_json_filename): if results: tests_json = {} report_title = 'test results' print('\n') print(report_title) print(('=' * len(report_title))) for result in results: test_lists = [('ERROR', result.errors), ('FAIL', result.failures), ('SKIPPED', result.skipped)] if hasattr(result, 'successes'): test_lists.append(('OK', result.successes)) for (test_status, test_list) in test_lists: tests_json.update(report_tests(test_status, test_list, result)) print(yaml_dump(tests_json)) if report_json_filename: report_json = {'hw_config': hw_config, 'tests': tests_json} with open(report_json_filename, 'w', encoding='utf-8') as report_json_file: report_json_file.write(json.dumps(report_json))
class ModelWorker(ABC): def worker_type(self) -> WorkerType: return WorkerType.LLM def model_param_class(self) -> Type: return ModelParameters def support_async(self) -> bool: return False def parse_parameters(self, command_args: List[str]=None) -> ModelParameters: def load_worker(self, model_name: str, model_path: str, **kwargs) -> None: def start(self, model_params: ModelParameters=None, command_args: List[str]=None) -> None: def stop(self) -> None: def restart(self, model_params: ModelParameters=None, command_args: List[str]=None) -> None: self.stop() self.start(model_params, command_args) def parameter_descriptions(self) -> List[ParameterDescription]: param_cls = self.model_param_class() return _get_parameter_descriptions(param_cls) def generate_stream(self, params: Dict) -> Iterator[ModelOutput]: async def async_generate_stream(self, params: Dict) -> Iterator[ModelOutput]: raise NotImplementedError def generate(self, params: Dict) -> ModelOutput: async def async_generate(self, params: Dict) -> ModelOutput: raise NotImplementedError def count_token(self, prompt: str) -> int: async def async_count_token(self, prompt: str) -> int: raise NotImplementedError def get_model_metadata(self, params: Dict) -> ModelMetadata: async def async_get_model_metadata(self, params: Dict) -> ModelMetadata: raise NotImplementedError def embeddings(self, params: Dict) -> List[List[float]]: async def async_embeddings(self, params: Dict) -> List[List[float]]: raise NotImplementedError
class OptionSeriesArcdiagramLabelStyle(Options): def fontSize(self): return self._config_get('0.8em') def fontSize(self, num: float): self._config(num, js_type=False) def fontWeight(self): return self._config_get('bold') def fontWeight(self, text: str): self._config(text, js_type=False)
class Filter(object): def __init__(self, **default_parameters): self.func = None self.filters = {} self._default_parameters = default_parameters self.set_parameters(default_parameters) def __call__(self, func): self.func = func return self def set_parameters(self, variables): if self._default_parameters: for (key, val) in self._default_parameters.items(): setattr(self, key, val) if variables: for (key, val) in variables.items(): setattr(self, key, val) def show_parameters(self): parameters = {} for (key, val) in self._default_parameters.items(): parameters[key] = getattr(self, key) print(parameters) def create(self, variables=None): def ret_f(ohlcv): variable_enumerate = enumerate_variables(variables) if (len(variable_enumerate) == 0): variable_enumerate.append(self._default_parameters) signals = {} fig_data = {} for v in variable_enumerate: self.set_parameters(v) results = self.func(ohlcv) v = remove_pd_object(v) if isinstance(results, Iterable): (signals[str(v)], fig_data) = results else: signals[str(v)] = results signals = pd.DataFrame(signals) signals.columns.name = 'filter' param_names = list(eval(signals.columns[0]).keys()) arrays = [signals.columns.map((lambda s: eval(s)[p])) for p in param_names] tuples = list(zip(*arrays)) columns = pd.MultiIndex.from_tuples(tuples, names=param_names) signals.columns = columns return (signals, fig_data) return ret_f
.parametrize('header', ['multipart/form-data; charset=utf-8', 'multipart/form-data; charset=utf-8; ']) def test_multipart_header_without_boundary(header: str) -> None: client = files = {'file': io.BytesIO(b'<file content>')} headers = {'content-type': header} response = client.post(' files=files, headers=headers) assert (response.status_code == 200) assert (response.request.headers['Content-Type'] == header)
def test_node_comparison(): node0 = MockNode({'name': 'test', 'type': 'none'}) node1 = MockNode({'name': 'test', 'type': 'none'}) node2 = MockNode({'name': 'test2', 'type': 'none'}) node3 = MockNode({'name': 'test', 'type': 'some'}) assert (node0 == node1) assert (node1 != node2) assert (node1 != node3)
class GainLossSet(AbstractEntrySet): def type_check(cls, name: str, instance: 'GainLossSet') -> 'GainLossSet': Configuration.type_check_parameter_name(name) if (not isinstance(instance, cls)): raise RP2TypeError(f"Parameter '{name}' is not of type {cls.__name__}: {instance}") return instance def __init__(self, configuration: Configuration, asset: str, from_date: date=MIN_DATE, to_date: date=MAX_DATE) -> None: super().__init__(configuration, 'MIXED', asset, from_date, to_date) self.__taxable_events_to_fraction: Dict[(GainLoss, int)] = {} self.__acquired_lots_to_fraction: Dict[(GainLoss, int)] = {} self.__taxable_events_to_number_of_fractions: Dict[(AbstractTransaction, int)] = {} self.__acquired_lots_to_number_of_fractions: Dict[(InTransaction, int)] = {} self.__transaction_type_2_count: Dict[(TransactionType, int)] = {transaction_type: 0 for transaction_type in TransactionType} def add_entry(self, entry: AbstractEntry) -> None: GainLoss.type_check('entry', entry) super().add_entry(entry) def get_transaction_type_count(self, transaction_type: TransactionType) -> int: TransactionType.type_check('transaction_type', transaction_type) self._check_sort() return self.__transaction_type_2_count[transaction_type] def get_taxable_event_fraction(self, entry: GainLoss) -> int: self._validate_entry(entry) self._check_sort() return self.__taxable_events_to_fraction[entry] def get_acquired_lot_fraction(self, entry: GainLoss) -> int: self._validate_entry(entry) self._check_sort() return self.__acquired_lots_to_fraction[entry] def get_taxable_event_number_of_fractions(self, transaction: AbstractTransaction) -> int: AbstractTransaction.type_check('transaction', transaction) if (transaction not in self.__taxable_events_to_number_of_fractions): raise RP2ValueError(f'''Unknown transaction: {transaction}''') self._check_sort() return self.__taxable_events_to_number_of_fractions[transaction] def get_acquired_lot_number_of_fractions(self, transaction: InTransaction) -> int: InTransaction.type_check('transaction', transaction) if (transaction not in self.__acquired_lots_to_number_of_fractions): raise RP2ValueError(f'''Unknown transaction: {transaction}''') self._check_sort() return self.__acquired_lots_to_number_of_fractions[transaction] def _validate_entry(self, entry: AbstractEntry) -> None: GainLoss.type_check('entry', entry) super()._validate_entry(entry) def _sort_entries(self) -> None: LOGGER.debug('Sort Gain-Loss Set:') super()._sort_entries() entry: AbstractEntry gain_loss: Optional[GainLoss] = None current_taxable_event_amount: RP2Decimal = ZERO current_taxable_event_fraction: int = 0 current_acquired_lot_amount: Dict[(InTransaction, RP2Decimal)] = {} current_acquired_lot_fraction: Dict[(InTransaction, int)] = {} last_gain_loss_with_acquired_lot: Optional[GainLoss] = None self.__taxable_events_to_fraction = {} self.__taxable_events_to_number_of_fractions = {} self.__acquired_lots_to_fraction = {} self.__acquired_lots_to_number_of_fractions = {} self.__transaction_type_2_count = {transaction_type: 0 for transaction_type in TransactionType} for entry in self._entry_list: gain_loss = cast(GainLoss, entry) if (gain_loss.timestamp.date() > self.to_date): break count: int = self.__transaction_type_2_count[gain_loss.taxable_event.transaction_type] self.__transaction_type_2_count[gain_loss.taxable_event.transaction_type] = (count + 1) if gain_loss.acquired_lot: last_gain_loss_with_acquired_lot = gain_loss current_taxable_event_amount += gain_loss.crypto_amount self.__taxable_events_to_fraction[gain_loss] = current_taxable_event_fraction if (current_taxable_event_amount == gain_loss.taxable_event.crypto_balance_change): if (gain_loss.taxable_event in self.__taxable_events_to_number_of_fractions): raise RP2ValueError(f'Taxable event crypto amount already exhausted for {gain_loss.taxable_event}') self.__taxable_events_to_number_of_fractions[gain_loss.taxable_event] = (current_taxable_event_fraction + 1) LOGGER.debug('%s (%d - %d): current amount == taxable event (%.16f)', gain_loss.internal_id, (current_acquired_lot_fraction[gain_loss.acquired_lot] if (gain_loss.acquired_lot in current_acquired_lot_fraction) else 0), current_taxable_event_fraction, current_taxable_event_amount) current_taxable_event_fraction = 0 current_taxable_event_amount = ZERO elif (current_taxable_event_amount < gain_loss.taxable_event.crypto_balance_change): LOGGER.debug('%s (%d - %d): current amount < taxable event (%.16f < %.16f)', gain_loss.internal_id, (current_acquired_lot_fraction[gain_loss.acquired_lot] if (gain_loss.acquired_lot in current_acquired_lot_fraction) else 0), current_taxable_event_fraction, current_taxable_event_amount, gain_loss.taxable_event.crypto_balance_change) current_taxable_event_fraction += 1 else: raise RP2ValueError(f'Current taxable event amount ({current_taxable_event_amount}) exceeded crypto balance change of taxable event ({gain_loss.taxable_event.crypto_balance_change}). {gain_loss}') if gain_loss.acquired_lot: current_acquired_lot_amount[gain_loss.acquired_lot] = (current_acquired_lot_amount.setdefault(gain_loss.acquired_lot, ZERO) + gain_loss.crypto_amount) self.__acquired_lots_to_fraction[gain_loss] = current_acquired_lot_fraction.setdefault(gain_loss.acquired_lot, 0) if (current_acquired_lot_amount[gain_loss.acquired_lot] == gain_loss.acquired_lot.crypto_balance_change): if (gain_loss.acquired_lot in self.__acquired_lots_to_number_of_fractions): raise RP2ValueError(f'Acquired lot crypto amount already exhausted for {gain_loss.acquired_lot}') self.__acquired_lots_to_number_of_fractions[gain_loss.acquired_lot] = (current_acquired_lot_fraction[gain_loss.acquired_lot] + 1) LOGGER.debug('%s (%d - %d): current amount == acquired lot amount (%.16f)', gain_loss.internal_id, current_acquired_lot_fraction[gain_loss.acquired_lot], current_taxable_event_fraction, current_acquired_lot_amount[gain_loss.acquired_lot]) del current_acquired_lot_amount[gain_loss.acquired_lot] del current_acquired_lot_fraction[gain_loss.acquired_lot] elif (current_acquired_lot_amount[gain_loss.acquired_lot] < gain_loss.acquired_lot.crypto_balance_change): LOGGER.debug('%s (%d - %d): current amount < acquired lot amount (%.16f < %.16f)', gain_loss.internal_id, current_acquired_lot_fraction[gain_loss.acquired_lot], current_taxable_event_fraction, current_acquired_lot_amount[gain_loss.acquired_lot], gain_loss.acquired_lot.crypto_balance_change) current_acquired_lot_fraction[gain_loss.acquired_lot] = (current_acquired_lot_fraction[gain_loss.acquired_lot] + 1) else: raise RP2ValueError(f'Current acquired lot amount ({current_acquired_lot_amount[gain_loss.acquired_lot]}) exceeded crypto balance change of acquired lot ({gain_loss.acquired_lot.crypto_balance_change}). {gain_loss}') if last_gain_loss_with_acquired_lot: if (current_taxable_event_amount > ZERO): if (last_gain_loss_with_acquired_lot.taxable_event in self.__taxable_events_to_number_of_fractions): raise RP2ValueError(f'Taxable event crypto amount already exhausted for {last_gain_loss_with_acquired_lot.taxable_event}') self.__taxable_events_to_number_of_fractions[last_gain_loss_with_acquired_lot.taxable_event] = current_taxable_event_fraction LOGGER.debug('%s (%d - %d): taxable event housekeeping', last_gain_loss_with_acquired_lot.internal_id, (current_acquired_lot_fraction[last_gain_loss_with_acquired_lot.acquired_lot] if (last_gain_loss_with_acquired_lot.acquired_lot in current_acquired_lot_fraction) else 0), current_taxable_event_fraction) for (acquired_lot, fraction) in current_acquired_lot_fraction.items(): if acquired_lot: if (acquired_lot in self.__acquired_lots_to_number_of_fractions): raise RP2ValueError(f'Acquired lot crypto amount already exhausted for {acquired_lot}') self.__acquired_lots_to_number_of_fractions[acquired_lot] = fraction LOGGER.debug('%s (%d): acquired_lot housekeeping', acquired_lot.internal_id, current_acquired_lot_fraction[acquired_lot]) def __str__(self) -> str: output: List[str] = [] output.append(f'{type(self).__name__}:') output.append(f' configuration={self.configuration.configuration_path}') output.append(f' asset={self.asset}') output.append(f" from_date={(str(self.from_date) if (self.from_date > MIN_DATE) else 'non-specified')}") output.append(f" to_date={(str(self.to_date) if (self.to_date < MAX_DATE) else 'non-specified')}") output.append(' entries=') for entry in self: parent: Optional[AbstractEntry] gain_loss: GainLoss = cast(GainLoss, entry) output.append(entry.to_string(indent=2, repr_format=False)) parent = self.get_parent(entry) output.append(f' taxable_event_fraction={(self.get_taxable_event_fraction(gain_loss) + 1)} of {self.get_taxable_event_number_of_fractions(gain_loss.taxable_event)}') if gain_loss.acquired_lot: output.append(f' acquired_lot_fraction={(self.get_acquired_lot_fraction(gain_loss) + 1)} of {self.get_acquired_lot_number_of_fractions(gain_loss.acquired_lot)}') output.append(f' parent={(parent.internal_id if parent else None)}') return '\n'.join(output) def __repr__(self) -> str: output: List[str] = [] output.append(f'{type(self).__name__}(') output.append(f'configuration={repr(self.configuration.configuration_path)}') output.append(f', asset={repr(self.asset)}') output.append(f", from_date={(repr(self.from_date) if (self.from_date > MIN_DATE) else 'non-specified')}") output.append(f", to_date={(repr(self.to_date) if (self.to_date < MAX_DATE) else 'non-specified')}") output.append(', entries=[') count: int = 0 for entry in self: parent: Optional[AbstractEntry] gain_loss: GainLoss = cast(GainLoss, entry) if (count > 0): output.append(', ') entry_string = repr(entry) if (entry_string[(- 1)] != ')'): raise RP2RuntimeError("Internal error: repr() of transaction doesn't end with ')'") output.append(entry_string[:(- 1)]) parent = self.get_parent(entry) output.append(f', taxable_event_fraction={(self.get_taxable_event_fraction(gain_loss) + 1)} of {self.get_taxable_event_number_of_fractions(gain_loss.taxable_event)}') if gain_loss.acquired_lot: output.append(f', acquired_lot_fraction={(self.get_acquired_lot_fraction(gain_loss) + 1)} of {self.get_acquired_lot_number_of_fractions(gain_loss.acquired_lot)}') output.append(f', parent={(parent.internal_id if parent else None)}') output.append(')') count += 1 output.append(']') return ''.join(output)
class BinaryServiceDbInterface(ReadOnlyDbInterface): def get_file_name(self, uid: str) -> (str | None): with self.get_read_only_session() as session: entry: FileObjectEntry = session.get(FileObjectEntry, uid) return (entry.file_name if (entry is not None) else None)
class FFI(object): def __init__(self, backend=None): from . import cparser, model if (backend is None): import _cffi_backend as backend from . import __version__ assert (backend.__version__ == __version__), ('version mismatch, %s != %s' % (backend.__version__, __version__)) self._backend = backend self._lock = allocate_lock() self._parser = cparser.Parser() self._cached_btypes = {} self._parsed_types = types.ModuleType('parsed_types').__dict__ self._new_types = types.ModuleType('new_types').__dict__ self._function_caches = [] self._libraries = [] self._cdefsources = [] self._windows_unicode = None if hasattr(backend, 'set_ffi'): backend.set_ffi(self) for name in backend.__dict__: if name.startswith('RTLD_'): setattr(self, name, getattr(backend, name)) with self._lock: self.BVoidP = self._get_cached_btype(model.voidp_type) self.BCharA = self._get_cached_btype(model.char_array_type) if isinstance(backend, types.ModuleType): if (not hasattr(FFI, 'NULL')): FFI.NULL = self.cast(self.BVoidP, 0) (FFI.CData, FFI.CType) = backend._get_types() else: self.NULL = self.cast(self.BVoidP, 0) (self.CData, self.CType) = backend._get_types() def cdef(self, csource, override=False, packed=False): if (not isinstance(csource, str)): if (not isinstance(csource, basestring)): raise TypeError('cdef() argument must be a string') csource = csource.encode('ascii') with self._lock: self._parser.parse(csource, override=override, packed=packed) self._cdefsources.append(csource) if override: for cache in self._function_caches: cache.clear() def dlopen(self, name, flags=0): assert (isinstance(name, basestring) or (name is None)) with self._lock: (lib, function_cache) = _make_ffi_library(self, name, flags) self._function_caches.append(function_cache) self._libraries.append(lib) return lib def _typeof_locked(self, cdecl): key = cdecl if (key in self._parsed_types): return self._parsed_types[key] if (not isinstance(cdecl, str)): cdecl = cdecl.encode('ascii') type = self._parser.parse_type(cdecl) really_a_function_type = type.is_raw_function if really_a_function_type: type = type.as_function_pointer() btype = self._get_cached_btype(type) result = (btype, really_a_function_type) self._parsed_types[key] = result return result def _typeof(self, cdecl, consider_function_as_funcptr=False): try: result = self._parsed_types[cdecl] except KeyError: with self._lock: result = self._typeof_locked(cdecl) (btype, really_a_function_type) = result if (really_a_function_type and (not consider_function_as_funcptr)): raise CDefError(('the type %r is a function type, not a pointer-to-function type' % (cdecl,))) return btype def typeof(self, cdecl): if isinstance(cdecl, basestring): return self._typeof(cdecl) if isinstance(cdecl, self.CData): return self._backend.typeof(cdecl) if isinstance(cdecl, types.BuiltinFunctionType): res = _builtin_function_type(cdecl) if (res is not None): return res if (isinstance(cdecl, types.FunctionType) and hasattr(cdecl, '_cffi_base_type')): with self._lock: return self._get_cached_btype(cdecl._cffi_base_type) raise TypeError(type(cdecl)) def sizeof(self, cdecl): if isinstance(cdecl, basestring): BType = self._typeof(cdecl) return self._backend.sizeof(BType) else: return self._backend.sizeof(cdecl) def alignof(self, cdecl): if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl) return self._backend.alignof(cdecl) def offsetof(self, cdecl, *fields_or_indexes): if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl) return self._typeoffsetof(cdecl, *fields_or_indexes)[1] def new(self, cdecl, init=None): if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl) return self._backend.newp(cdecl, init) def cast(self, cdecl, source): if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl) return self._backend.cast(cdecl, source) def string(self, cdata, maxlen=(- 1)): return self._backend.string(cdata, maxlen) def buffer(self, cdata, size=(- 1)): return self._backend.buffer(cdata, size) def from_buffer(self, python_buffer): return self._backend.from_buffer(self.BCharA, python_buffer) def callback(self, cdecl, python_callable=None, error=None): def callback_decorator_wrap(python_callable): if (not callable(python_callable)): raise TypeError("the 'python_callable' argument is not callable") return self._backend.callback(cdecl, python_callable, error) if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) if (python_callable is None): return callback_decorator_wrap else: return callback_decorator_wrap(python_callable) def getctype(self, cdecl, replace_with=''): if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl) replace_with = replace_with.strip() if (replace_with.startswith('*') and ('&[' in self._backend.getcname(cdecl, '&'))): replace_with = ('(%s)' % replace_with) elif (replace_with and (not (replace_with[0] in '[('))): replace_with = (' ' + replace_with) return self._backend.getcname(cdecl, replace_with) def gc(self, cdata, destructor): with self._lock: try: gc_weakrefs = self.gc_weakrefs except AttributeError: from .gc_weakref import GcWeakrefs gc_weakrefs = self.gc_weakrefs = GcWeakrefs(self) return gc_weakrefs.build(cdata, destructor) def _get_cached_btype(self, type): assert (self._lock.acquire(False) is False) try: BType = self._cached_btypes[type] except KeyError: finishlist = [] BType = type.get_cached_btype(self, finishlist) for type in finishlist: type.finish_backend_type(self, finishlist) return BType def verify(self, source='', tmpdir=None, **kwargs): from .verifier import Verifier, _caller_dir_pycache if self._windows_unicode: self._apply_windows_unicode(kwargs) tmpdir = (tmpdir or _caller_dir_pycache()) self.verifier = Verifier(self, source, tmpdir, **kwargs) lib = self.verifier.load_library() self._libraries.append(lib) return lib def _get_errno(self): return self._backend.get_errno() def _set_errno(self, errno): self._backend.set_errno(errno) errno = property(_get_errno, _set_errno, None, "the value of 'errno' from/to the C calls") def getwinerror(self, code=(- 1)): return self._backend.getwinerror(code) def _pointer_to(self, ctype): from . import model with self._lock: return model.pointer_cache(self, ctype) def addressof(self, cdata, *fields_or_indexes): ctype = self._backend.typeof(cdata) if fields_or_indexes: (ctype, offset) = self._typeoffsetof(ctype, *fields_or_indexes) else: if (ctype.kind == 'pointer'): raise TypeError('addressof(pointer)') offset = 0 ctypeptr = self._pointer_to(ctype) return self._backend.rawaddressof(ctypeptr, cdata, offset) def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): (ctype, offset) = self._backend.typeoffsetof(ctype, field_or_index) for field1 in fields_or_indexes: (ctype, offset1) = self._backend.typeoffsetof(ctype, field1, 1) offset += offset1 return (ctype, offset) def include(self, ffi_to_include): with ffi_to_include._lock: with self._lock: self._parser.include(ffi_to_include._parser) self._cdefsources.append('[') self._cdefsources.extend(ffi_to_include._cdefsources) self._cdefsources.append(']') def new_handle(self, x): return self._backend.newp_handle(self.BVoidP, x) def from_handle(self, x): return self._backend.from_handle(x) def set_unicode(self, enabled_flag): if (self._windows_unicode is not None): raise ValueError('set_unicode() can only be called once') enabled_flag = bool(enabled_flag) if enabled_flag: self.cdef('typedef wchar_t TBYTE;typedef wchar_t TCHAR;typedef const wchar_t *LPCTSTR;typedef const wchar_t *PCTSTR;typedef wchar_t *LPTSTR;typedef wchar_t *PTSTR;typedef TBYTE *PTBYTE;typedef TCHAR *PTCHAR;') else: self.cdef('typedef char TBYTE;typedef char TCHAR;typedef const char *LPCTSTR;typedef const char *PCTSTR;typedef char *LPTSTR;typedef char *PTSTR;typedef TBYTE *PTBYTE;typedef TCHAR *PTCHAR;') self._windows_unicode = enabled_flag def _apply_windows_unicode(self, kwds): defmacros = kwds.get('define_macros', ()) if (not isinstance(defmacros, (list, tuple))): raise TypeError("'define_macros' must be a list or tuple") defmacros = (list(defmacros) + [('UNICODE', '1'), ('_UNICODE', '1')]) kwds['define_macros'] = defmacros
('calling_file, calling_module, config_path, expected', [('foo.py', None, None, None), ('foo/bar.py', None, None, None), ('foo/bar.py', None, 'conf', realpath('foo/conf')), ('foo/bar.py', None, '../conf', realpath('conf')), ('foo/bar.py', None, realpath('conf'), realpath('conf')), ('c:/foo/bar.py', None, 'conf', realpath('c:/foo/conf')), ('c:/foo/bar.py', None, '../conf', realpath('c:/conf')), (None, 'module', None, 'pkg://'), (None, 'package.module', None, 'pkg://package'), (None, 'package.module', 'conf', 'pkg://package/conf'), (None, 'package.module', '../conf', 'pkg://conf'), (None, 'package1.rename_package_to.module', '../conf', 'pkg://package1/conf'), ('foo', 'package1.rename_package_to.module', '../conf', realpath(os.path.join(os.getcwd(), '../conf'))), (None, 'package.module', 'pkg://some/conf', 'pkg://some/conf')]) def test_compute_search_path_dir(calling_file: str, calling_module: str, config_path: str, expected: str) -> None: res = compute_search_path_dir(calling_file, calling_module, config_path) assert (res == expected)
def test_pole_coeffs(): num_poles = 10 coeffs = np.random.random((4 * num_poles)) poles = DispersionFitter._coeffs_to_poles(coeffs) coeffs_ = DispersionFitter._poles_to_coeffs(poles) poles_ = DispersionFitter._coeffs_to_poles(coeffs_) assert np.allclose(coeffs, coeffs_) assert np.allclose(poles, poles_)
def test_rectangular_dielectric_validations(): with pytest.raises(ValidationError, match='.* gaps .*'): waveguide.RectangularDielectric(wavelength=1.55, core_width=(0.5, 0.5), core_thickness=0.22, core_medium=td.Medium(permittivity=(3.48 ** 2)), clad_medium=td.Medium(permittivity=(1.45 ** 2)), gap=(0.1, 0.1)) with pytest.raises(ValidationError, match='.* sidewall thickness .*'): waveguide.RectangularDielectric(wavelength=1.55, core_width=0.5, core_thickness=0.22, core_medium=td.Medium(permittivity=(3.48 ** 2)), clad_medium=td.Medium(permittivity=(1.45 ** 2)), sidewall_thickness=0.01) with pytest.raises(ValidationError, match='.* surface thickness .*'): waveguide.RectangularDielectric(wavelength=1.55, core_width=0.5, core_thickness=0.22, core_medium=td.Medium(permittivity=(3.48 ** 2)), clad_medium=td.Medium(permittivity=(1.45 ** 2)), surface_thickness=0.01)
def getosfullname(): try: with open('/etc/os-release') as f: for line in f: line = line.strip() if line.startswith('PRETTY_NAME'): pname = line.split('"') return pname[1] except: pass return ''
class TLSContextProtocolMaxVersion(AmbassadorTest): def init(self): self.target = HTTP() if EDGE_STACK: self.xfail = 'Not yet supported in Edge Stack' self.xfail = 'FIXME: IHA' def manifests(self) -> str: return (f''' --- apiVersion: v1 data: tls.crt: {TLSCerts['tls-context-host-1'].k8s_crt} tls.key: {TLSCerts['tls-context-host-1'].k8s_key} kind: Secret metadata: name: secret.max-version labels: kat-ambassador-id: tlscontextprotocolmaxversion type: kubernetes.io/tls ''' + super().manifests()) def config(self) -> Generator[(Union[(str, Tuple[(Node, str)])], None, None)]: (yield (self, self.format('\n---\napiVersion: getambassador.io/v3alpha1\nkind: Module\nname: ambassador\nconfig:\n defaults:\n tls_secret_namespacing: False\n---\napiVersion: getambassador.io/v3alpha1\nkind: Mapping\nname: {self.name}-same-prefix-1\nprefix: /tls-context-same/\nservice: tls-context-host-1\n---\napiVersion: getambassador.io/v3alpha1\nkind: TLSContext\nname: {self.name}-same-context-1\nhosts:\n- tls-context-host-1\nsecret: secret.max-version\nmin_tls_version: v1.1\nmax_tls_version: v1.2\n'))) def scheme(self) -> str: return ' def _go_close_connection_error(url): return 'Get {}: EOF'.format(url) def queries(self): (yield Query(self.url('tls-context-same/'), headers={'Host': 'tls-context-host-1'}, expected=200, insecure=True, sni=True, minTLSv='v1.2', maxTLSv='v1.2')) (yield Query(self.url('tls-context-same/'), headers={'Host': 'tls-context-host-1'}, expected=200, insecure=True, sni=True, minTLSv='v1.0', maxTLSv='v1.1')) (yield Query(self.url('tls-context-same/'), headers={'Host': 'tls-context-host-1'}, expected=200, insecure=True, sni=True, minTLSv='v1.3', maxTLSv='v1.3', error=['tls: server selected unsupported protocol version 303', 'tls: no supported versions satisfy MinVersion and MaxVersion', 'tls: protocol version not supported', 'read: connection reset by peer'])) def check(self): assert self.results[0].backend assert self.results[0].backend.request tls_0_version = self.results[0].backend.request.tls.negotiated_protocol_version assert self.results[1].backend assert self.results[1].backend.request tls_1_version = self.results[1].backend.request.tls.negotiated_protocol_version assert (tls_0_version == None), f'requesting TLS v1.2 got TLS {tls_0_version}' assert (tls_1_version == None), f'requesting TLS v1.0-v1.1 got TLS {tls_1_version}' def requirements(self): (yield ('url', Query(self.url('ambassador/v0/check_ready'), headers={'Host': 'tls-context-host-1'}, insecure=True, sni=True, minTLSv='v1.2'))) (yield ('url', Query(self.url('ambassador/v0/check_alive'), headers={'Host': 'tls-context-host-1'}, insecure=True, sni=True, minTLSv='v1.2')))
def test_tnr_test() -> None: test_dataset = pd.DataFrame({'target': ['a', 'a', 'b', 'b'], 'prediction': ['a', 'b', 'b', 'b']}) column_mapping = ColumnMapping(pos_label='a') suite = TestSuite(tests=[TestTNR(gt=0.8)]) suite.run(current_data=test_dataset, reference_data=None, column_mapping=column_mapping) suite._inner_suite.raise_for_error() assert suite assert suite.show() assert suite.json()
class OptionSeriesScatter3dDataMarkerStatesHover(Options): def animation(self) -> 'OptionSeriesScatter3dDataMarkerStatesHoverAnimation': return self._config_sub_data('animation', OptionSeriesScatter3dDataMarkerStatesHoverAnimation) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def fillColor(self): return self._config_get(None) def fillColor(self, text: str): self._config(text, js_type=False) def lineColor(self): return self._config_get(None) def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(None) def lineWidth(self, num: float): self._config(num, js_type=False) def lineWidthPlus(self): return self._config_get(1) def lineWidthPlus(self, num: float): self._config(num, js_type=False) def radius(self): return self._config_get(None) def radius(self, num: float): self._config(num, js_type=False) def radiusPlus(self): return self._config_get(2) def radiusPlus(self, num: float): self._config(num, js_type=False)
def test_estimate_semi_amplitude(seed=9502): np.random.seed(seed) t = np.sort(np.random.uniform(0, 10, 500)) period = 2.345 amp = 4.5 y = (amp * np.sin((((2 * np.pi) * t) / period))) est = estimate_semi_amplitude(period, t, y) assert np.allclose(est, amp) est = estimate_semi_amplitude(period, t, y, yerr=np.ones_like(t), t0s=(0.5 * period)) assert np.allclose(est, amp)
def render_info(totals, header): (columns, _) = header (yield 'totals:') for (_, colname, _) in columns: try: value = totals[colname] except KeyError: continue if isinstance(value, TinyTimeDelta): value = value.render('us') (yield f"{(colname + ':'):11} {value}")
_installed def test_flow(init_flow_config, source_root): shutil.copy((source_root / 'test-data/eclipse/SPE1.DATA'), 'SPE1.DATA') shutil.copy((source_root / 'test-data/eclipse/SPE1_ERROR.DATA'), 'SPE1_ERROR.DATA') flow_config = ecl_config.FlowConfig() sim = flow_config.sim() flow_run = ecl_run.EclRun('SPE1.DATA', sim) flow_run.runEclipse() ecl_run.run(flow_config, ['SPE1.DATA']) flow_run = ecl_run.EclRun('SPE1_ERROR.DATA', sim) with pytest.raises(CalledProcessError, match='returned non-zero exit status 1'): flow_run.runEclipse() ecl_run.run(flow_config, ['SPE1_ERROR.DATA', '--ignore-errors']) with pytest.raises(KeyError): ecl_run.run(flow_config, ['SPE1.DATA', '--version=no/such/version'])
def get_input(caller, prompt, callback, session=None, *args, **kwargs): if (not callable(callback)): raise RuntimeError('get_input: input callback is not callable.') caller.ndb._getinput = _Prompt() caller.ndb._getinput._callback = callback caller.ndb._getinput._prompt = prompt caller.ndb._getinput._session = session caller.ndb._getinput._args = args caller.ndb._getinput._kwargs = kwargs caller.cmdset.add(InputCmdSet, persistent=False) caller.msg(prompt, session=session)
def read_type_def(line: str): type_match = FRegex.TYPE_DEF.match(line) if (type_match is None): return None trailing_line = line[type_match.end(1):].split('!')[0] trailing_line = trailing_line.strip() keyword_match = FRegex.TATTR_LIST.match(trailing_line) keywords: list[str] = [] parent = None while keyword_match: keyword_strip = keyword_match.group(0).replace(',', ' ').strip().upper() extend_match = FRegex.EXTENDS.match(keyword_strip) if extend_match: parent = extend_match.group(1).lower() else: keywords.append(keyword_strip) trailing_line = trailing_line[keyword_match.end(0):] keyword_match = FRegex.TATTR_LIST.match(trailing_line) line_split = trailing_line.split('::') if (len(line_split) == 1): if ((len(keywords) > 0) and (parent is None)): return None else: if (trailing_line.split('(')[0].strip().lower() == 'is'): return None trailing_line = line_split[0] else: trailing_line = line_split[1] word_match = FRegex.WORD.match(trailing_line.strip()) if word_match: name: str = word_match.group(0) else: return None return ('typ', ClassInfo(name, parent, keywords))
def json_dumps(data: JSONInput, indent: Optional[int]=0, sort_keys: bool=False) -> str: if sort_keys: indent = (None if (indent == 0) else indent) result = _builtin_json.dumps(data, indent=indent, separators=(',', ':'), sort_keys=sort_keys) else: result = ujson.dumps(data, indent=indent, escape_forward_slashes=False) return result