code
stringlengths
281
23.7M
class QuerySelectField(SelectFieldBase): widget = Select2Widget() def __init__(self, label=None, validators=None, query_factory=None, get_pk=None, get_label=None, allow_blank=False, blank_text=u'', **kwargs): super(QuerySelectField, self).__init__(label, validators, **kwargs) self.query_factory = query_factory if (get_pk is None): self.get_pk = get_pk_from_identity else: self.get_pk = get_pk if (get_label is None): self.get_label = (lambda x: x) elif isinstance(get_label, string_types): self.get_label = operator.attrgetter(get_label) else: self.get_label = get_label self.allow_blank = allow_blank self.blank_text = blank_text self.query = None self._object_list = None def _get_data(self): if (self._formdata is not None): for (pk, obj) in self._get_object_list(): if (pk == self._formdata): self._set_data(obj) break return self._data def _set_data(self, data): self._data = data self._formdata = None data = property(_get_data, _set_data) def _get_object_list(self): if (self._object_list is None): query = (self.query or self.query_factory()) get_pk = self.get_pk self._object_list = [(text_type(get_pk(obj)), obj) for obj in query] return self._object_list def iter_choices(self): if self.allow_blank: (yield (u'__None', self.blank_text, (self.data is None))) for (pk, obj) in self._get_object_list(): (yield (pk, self.get_label(obj), (obj == self.data))) def process_formdata(self, valuelist): if valuelist: if (self.allow_blank and (valuelist[0] == u'__None')): self.data = None else: self._data = None self._formdata = valuelist[0] def pre_validate(self, form): if ((not self.allow_blank) or (self.data is not None)): for (pk, obj) in self._get_object_list(): if (self.data == obj): break else: raise ValidationError(self.gettext(u'Not a valid choice'))
def test_sibling_traverse(): tokens = MarkdownIt().parse(EXAMPLE_MARKDOWN) tree = SyntaxTreeNode(tokens) paragraph_inline_node = tree.children[1].children[0] text_node = paragraph_inline_node.children[0] assert (text_node.type == 'text') strong_node = text_node.next_sibling assert strong_node assert (strong_node.type == 'strong') another_text_node = strong_node.next_sibling assert another_text_node assert (another_text_node.type == 'text') assert (another_text_node.next_sibling is None) assert (another_text_node.previous_sibling.previous_sibling == text_node) assert (text_node.previous_sibling is None)
class OptionSeriesScatter3dTooltipDatetimelabelformats(Options): def day(self): return self._config_get('%A, %e %b %Y') def day(self, text: str): self._config(text, js_type=False) def hour(self): return self._config_get('%A, %e %b, %H:%M') def hour(self, text: str): self._config(text, js_type=False) def millisecond(self): return self._config_get('%A, %e %b, %H:%M:%S.%L') def millisecond(self, text: str): self._config(text, js_type=False) def minute(self): return self._config_get('%A, %e %b, %H:%M') def minute(self, text: str): self._config(text, js_type=False) def month(self): return self._config_get('%B %Y') def month(self, text: str): self._config(text, js_type=False) def second(self): return self._config_get('%A, %e %b, %H:%M:%S') def second(self, text: str): self._config(text, js_type=False) def week(self): return self._config_get('Week from %A, %e %b %Y') def week(self, text: str): self._config(text, js_type=False) def year(self): return self._config_get('%Y') def year(self, text: str): self._config(text, js_type=False)
def cmd_gen(args): filename = args.outfile def writeline(o, m): if (not args.quiet): print(m) o.write((m + '\n')) if (os.path.exists(filename) and (not args.no_append)): outfile = open(filename, 'r+') if (not args.quiet): print('appending to existing file') else: outfile = open(filename, 'w') while True: try: formatted = format_curve(*gen_unsafe_curve(128)) writeline(outfile, formatted) except KeyboardInterrupt: break except Exception as e: print('error: {}'.format(e)) break
class ProcessTaskManager(TaskManager): def __init__(self, nb_workers: int=DEFAULT_WORKERS_AMOUNT, is_lazy_pool_start: bool=True, logger: Optional[logging.Logger]=None) -> None: super().__init__(nb_workers=nb_workers, is_lazy_pool_start=is_lazy_pool_start, logger=logger, pool_mode=PROCESS_POOL_MODE)
def user_choose_players(initiator: CharacterChooser, actor: Character, candidates: List[Character], timeout: Optional[int]=None, trans: Optional[InputTransaction]=None) -> Optional[List[Character]]: (_, rst) = ask_for_action(initiator, [actor], (), candidates, timeout=timeout, trans=trans) if (not rst): return None return rst[1]
def test_test_selector_with_one_variable(): (X, y) = df_regression() sel = SelectByTargetMeanPerformance(variables=['cat_var_A'], bins=2, scoring='neg_root_mean_squared_error', regression=True, cv=2, strategy='equal_width', threshold=(- 0.2)) sel.fit(X, y) performance_dict = {'cat_var_A': 0.0} assert (sel.features_to_drop_ == []) assert (sel.feature_performance_ == performance_dict) pd.testing.assert_frame_equal(sel.transform(X), X) (X, y) = df_regression() sel = SelectByTargetMeanPerformance(variables=['cat_var_B'], bins=2, scoring='neg_root_mean_squared_error', regression=True, cv=2, strategy='equal_width', threshold=(- 0.2)) sel.fit(X, y) Xtransformed = X.drop(columns=['cat_var_B']) performance_dict = {'cat_var_B': (- 0.)} assert (sel.features_to_drop_ == ['cat_var_B']) assert (sel.feature_performance_ == performance_dict) pd.testing.assert_frame_equal(sel.transform(X), Xtransformed)
def create_nodes(motion_idx, motions, base_length, stride_length, compare_length, fps): res = [] frames_base = int((base_length * fps)) frames_compare = int((compare_length * fps)) frames_stride = int((stride_length * fps)) while (((frames_base % frames_stride) != 0) and (frames_stride > 1)): frames_stride -= 1 motion = motions[motion_idx] if (motion.length() < (base_length + compare_length)): return res frames_start = np.arange(0, (((motion.num_frames() - frames_base) - frames_compare) - 1), frames_stride) frames_end = (frames_start + frames_base) motion_idx_array = np.full(shape=frames_start.shape, fill_value=motion_idx) res = np.stack([motion_idx_array, frames_start, frames_end]).transpose() return res
class Alpha_Rename(LoopIR_Rewrite): def __init__(self, node): self.env = ChainMap() self.node = [] if isinstance(node, LoopIR.proc): self.node = self.apply_proc(node) else: assert isinstance(node, list) for n in node: if isinstance(n, LoopIR.stmt): self.node += self.apply_s(n) elif isinstance(n, LoopIR.expr): self.node += [self.apply_e(n)] else: assert False, 'expected stmt or expr' def result(self): return self.node def push(self): self.env = self.env.new_child() def pop(self): self.env = self.env.parents def map_fnarg(self, fa): nm = fa.name.copy() self.env[fa.name] = nm return fa.update(name=nm, type=(self.map_t(fa.type) or fa.type)) def map_s(self, s): if isinstance(s, (LoopIR.Assign, LoopIR.Reduce)): s2 = super().map_s(s) if (new_name := self.env.get(s.name)): return [((s2 and s2[0]) or s).update(name=new_name)] else: return s2 elif isinstance(s, LoopIR.Alloc): s2 = super().map_s(s) assert (s.name not in self.env) new_name = s.name.copy() self.env[s.name] = new_name return [((s2 and s2[0]) or s).update(name=new_name)] elif isinstance(s, LoopIR.WindowStmt): rhs = (self.map_e(s.rhs) or s.rhs) lhs = s.lhs.copy() self.env[s.lhs] = lhs return [s.update(lhs=lhs, rhs=rhs)] elif isinstance(s, LoopIR.If): self.push() stmts = super().map_s(s) self.pop() return stmts elif isinstance(s, LoopIR.For): lo = (self.map_e(s.lo) or s.lo) hi = (self.map_e(s.hi) or s.hi) self.push() itr = s.iter.copy() self.env[s.iter] = itr body = (self.map_stmts(s.body) or s.body) self.pop() return [s.update(iter=itr, lo=lo, hi=hi, body=body)] return super().map_s(s) def map_e(self, e): if isinstance(e, (LoopIR.Read, LoopIR.WindowExpr, LoopIR.StrideExpr)): e2 = super().map_e(e) if (new_name := self.env.get(e.name)): return (e2 or e).update(name=new_name) else: return e2 return super().map_e(e) def map_t(self, t): t2 = super().map_t(t) if isinstance(t, T.Window): if (src_buf := self.env.get(t.src_buf)): return (t2 or t).update(src_buf=src_buf) return t2
class DrQTest(absltest.TestCase): def test_drq_v2(self): environment = fakes.ContinuousVisualEnvironment(action_dim=2, observation_shape=(32, 32, 3), episode_length=10, bounded=True) spec = specs.make_environment_spec(environment) agent_networks = drq_v2.make_networks(spec, hidden_size=10, latent_size=10) agent = drq_v2.DrQV2(environment_spec=spec, networks=agent_networks, seed=0, config=drq_v2.DrQV2Config(batch_size=2, min_replay_size=10)) loop = acme.EnvironmentLoop(environment, agent, logger=loggers.make_default_logger(label='environment', save_data=False)) loop.run(num_episodes=2)
class WallHoleEdge(WallHoles): description = 'Edge (parallel slot wall Holes)' def __init__(self, boxes, wallHoles, **kw) -> None: super().__init__(boxes, wallHoles.settings, **kw) self.wallHoles = wallHoles def __call__(self, length, bedBolts=None, bedBoltSettings=None, **kw): dist = (self.wallHoles.settings.edge_width + (self.settings.thickness / 2)) with self.saved_context(): (px, angle) = ((0, 0) if self._reversed else (length, 180)) self.wallHoles(px, dist, length, angle) self.edge(length, tabs=2) def startwidth(self) -> float: return (self.wallHoles.settings.edge_width + self.settings.thickness) def margin(self) -> float: return 0.0
def extractWbnoveltranslationWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [("Spirit's Paradise", 'Spirits Paradise And The Idle Another World Life', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def _get_next_payload(raw, start_pos, payload_header_regex=FILE_HEADER, payload_footer_regex=FILE_FOOTER): payload_header = payload_header_regex.search(raw, start_pos) if payload_header: payload_footer = payload_footer_regex.search(raw, payload_header.end()) if payload_footer: return (raw[payload_header.end():payload_footer.end()], payload_footer.end()) logging.error('End of Payload could not be found!') return (None, len(raw)) return (None, len(raw))
def _check_charname(caller, raw_string, **kwargs): charname = raw_string.strip() charname = caller.account.normalize_username(charname) candidates = Character.objects.filter_family(db_key__iexact=charname) if len(candidates): return ('menunode_choose_name', {'error': f'''|w{charname}|n is unavailable. Enter a different name.'''}) else: caller.new_char.key = charname return 'menunode_confirm_name'
class RedisTransportServerCore(RedisTransportCore): log_messages_larger_than_bytes = attr.ib(default=DEFAULT_MAXIMUM_MESSAGE_BYTES_SERVER, converter=int) maximum_message_size_in_bytes = attr.ib(default=DEFAULT_MAXIMUM_MESSAGE_BYTES_SERVER, converter=int) chunk_messages_larger_than_bytes = attr.ib(default=(- 1), converter=int, validator=_valid_chunk_threshold) def __attrs_post_init__(self): super(RedisTransportServerCore, self).__attrs_post_init__() if (self.maximum_message_size_in_bytes < (self.chunk_messages_larger_than_bytes * 5)): raise ValueError('If chunk_messages_larger_than_bytes is enabled (non-negative), maximum_message_size_in_bytes must be at least 5 times larger to allow for multiple chunks to be sent.') def is_server(self): return True def _get_metric_name(self, name): return 'server.transport.redis_gateway.{name}'.format(name=name)
class ETMSystemChat(ETMChatMixin, SystemChat): chat_type_name = 'System' chat_type_emoji = Emoji.SYSTEM other: ETMSystemChatMember def __init__(self, db: 'DatabaseManager', *, channel: Optional[SlaveChannel]=None, middleware: Optional[Middleware]=None, module_name: str='', channel_emoji: str='', module_id: ModuleID=ModuleID(''), name: str='', alias: Optional[str]=None, uid: ChatID=ChatID(''), vendor_specific: Dict[(str, Any)]=None, description: str='', notification: ChatNotificationState=ChatNotificationState.ALL, with_self: bool=True): super().__init__(db, channel=channel, middleware=middleware, module_name=module_name, channel_emoji=channel_emoji, module_id=module_id, name=name, alias=alias, uid=uid, vendor_specific=vendor_specific, description=description, notification=notification, with_self=with_self)
def merge_protein(pProteinList, binSize, minPeak): newProteinList = [] for chromosome in pProteinList: newProteinList.append([]) currentBoundaryLeft = 0 currentBoundaryRight = binSize count = 0 for peak in chromosome: if (int(peak[1]) <= currentBoundaryRight): count += 1 else: if (count >= minPeak): newProteinList[(len(newProteinList) - 1)].append([peak[0], currentBoundaryLeft, currentBoundaryRight, count]) currentBoundaryLeft = currentBoundaryRight currentBoundaryRight = (currentBoundaryLeft + binSize) count = 0 if (int(peak[1]) < currentBoundaryRight): count += 1 else: while (int(peak[1]) > currentBoundaryLeft): currentBoundaryLeft += binSize currentBoundaryRight = (currentBoundaryLeft + binSize) count = 1 return newProteinList
class OptionSeriesLineMarkerStates(Options): def hover(self) -> 'OptionSeriesLineMarkerStatesHover': return self._config_sub_data('hover', OptionSeriesLineMarkerStatesHover) def normal(self) -> 'OptionSeriesLineMarkerStatesNormal': return self._config_sub_data('normal', OptionSeriesLineMarkerStatesNormal) def select(self) -> 'OptionSeriesLineMarkerStatesSelect': return self._config_sub_data('select', OptionSeriesLineMarkerStatesSelect)
class TestRequired(util.TestCase): MARKUP = '\n <form>\n <input id="1" type="name" required>\n <input id="2" type="checkbox" required>\n <input id="3" type="email">\n <textarea id="4" name="name" cols="30" rows="10" required></textarea>\n <select id="5" name="nm" required>\n <!-- options -->\n </select>\n </form>\n ' def test_required(self): self.assert_selector(self.MARKUP, ':required', ['1', '2', '4', '5'], flags=util.HTML) def test_specific_required(self): self.assert_selector(self.MARKUP, 'input:required', ['1', '2'], flags=util.HTML)
('flytekit.remote.remote.FlyteRemote') def test_mocking_remote(mock_remote) -> None: def t1() -> float: return 6.e-34 def t2() -> bool: return False mock_remote.return_value.fetch_task.side_effect = [t1, t2] from . import wf_with_remote x = wf_with_remote.hello_wf(a=3) assert (x == (6.e-34, False))
def lazy_import(): from fastly.model.mutual_authentication_response_data import MutualAuthenticationResponseData from fastly.model.mutual_authentications_response_all_of import MutualAuthenticationsResponseAllOf from fastly.model.pagination import Pagination from fastly.model.pagination_links import PaginationLinks from fastly.model.pagination_meta import PaginationMeta globals()['MutualAuthenticationResponseData'] = MutualAuthenticationResponseData globals()['MutualAuthenticationsResponseAllOf'] = MutualAuthenticationsResponseAllOf globals()['Pagination'] = Pagination globals()['PaginationLinks'] = PaginationLinks globals()['PaginationMeta'] = PaginationMeta
def ooba_call(prompt, state): stops = [AgentOobaVars['human-prefix'], '</s>'] generator = generate_reply(prompt, state, stopping_strings=stops) answer = '' for a in generator: if isinstance(a, str): answer = a else: answer = a[0] for stop in stops: if (stop in answer): answer = answer[:answer.find(stop)] if VERBOSE: print(f'''INPUT {prompt} ''', file=sys.stderr) print(f'''OUTPUT {answer} ''', file=sys.stderr) return answer
def download_file(url: str, *, dirpath: (PathIn | None)=None, filename: (str | None)=None, chunk_size: int=8192, **kwargs: Any) -> str: _require_requests_installed() kwargs['stream'] = True with requests.get(url, **kwargs) as response: response.raise_for_status() if (not filename): content_disposition = (response.headers.get('content-disposition', '') or '') filename_pattern = 'filename="(.*)"' filename_match = re.search(filename_pattern, content_disposition) if filename_match: filename = filename_match.group(1) if (not filename): filename = get_filename(url) if (not filename): filename_uuid = str(uuid.uuid4()) filename = f'download-{filename_uuid}' dirpath = (dirpath or tempfile.gettempdir()) dirpath = _get_path(dirpath) filepath = join_path(dirpath, filename) make_dirs_for_file(filepath) with open(filepath, 'wb') as file: for chunk in response.iter_content(chunk_size=chunk_size): if chunk: file.write(chunk) return filepath
class TestNull(util.ColorAsserts, unittest.TestCase): def test_null_input(self): c = Color('hsl', [NaN, 0.5, 1], 1) self.assertTrue(c.is_nan('hue')) def test_none_input(self): c = Color('hsl(none 0% 75% / 1)') self.assertTrue(c.is_nan('hue')) def test_null_normalization_min_sat(self): c = Color('hsl(270 0% 75% / 1)').normalize() self.assertTrue(c.is_nan('hue')) def test_null_normalization_max_light(self): c = Color('hsl(270 20% 100% / 1)').normalize() self.assertTrue(c.is_nan('hue')) def test_null_normalization_min_light(self): c = Color('hsl(270 20% 0% / 1)').normalize() self.assertTrue(c.is_nan('hue')) def test_corner_case_null(self): c = Color('color(srgb -2 0 2)').convert('hsl') self.assertTrue(c.is_nan('hue'))
def test_variable_in_phi(): phi = Phi(b, [a, c]) cond1 = Condition(OperationType.equal, [a, Constant(0)]) cond2 = Condition(OperationType.not_equal, [phi.destination, Constant(0)]) cfg = ControlFlowGraph() cfg.add_nodes_from([(n0 := BasicBlock(0, instructions=[phi, (branch := Branch(Condition(OperationType.bitwise_or, [cond1, cond2])))])), (n1 := BasicBlock(1, instructions=[Assignment(c, Constant(1)), (back_edge := Branch(Condition(OperationType.equal, [b, Constant(0)])))])), (n2 := BasicBlock(2, instructions=[])), (n3 := BasicBlock(3, instructions=[Return([Constant(1)])]))]) variable_of_block = {n0: a, n1: c} phi.update_phi_function(variable_of_block) cfg.add_edges_from([FalseCase(n0, n1), TrueCase(n0, n2), UnconditionalEdge(n2, n3), TrueCase(n1, n3), FalseCase(n1, n0)]) _run_dead_loop_elimination(cfg) assert (set(cfg.nodes) == {n0, n2, n3}) assert (n0.instructions == [phi]) assert isinstance(cfg.get_edge(n0, n2), UnconditionalEdge) assert isinstance(cfg.get_edge(n2, n3), UnconditionalEdge)
class RelationshipCustomer(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'customer': (RelationshipCustomerCustomer,)} _property def discriminator(): return None attribute_map = {'customer': 'customer'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class group_desc_stats_request(stats_request): version = 6 type = 18 stats_type = 7 def __init__(self, xid=None, flags=None, group_id=None): if (xid != None): self.xid = xid else: self.xid = None if (flags != None): self.flags = flags else: self.flags = 0 if (group_id != None): self.group_id = group_id else: self.group_id = 0 return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!H', self.stats_type)) packed.append(struct.pack('!H', self.flags)) packed.append(('\x00' * 4)) packed.append(struct.pack('!L', self.group_id)) packed.append(('\x00' * 4)) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = group_desc_stats_request() _version = reader.read('!B')[0] assert (_version == 6) _type = reader.read('!B')[0] assert (_type == 18) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] _stats_type = reader.read('!H')[0] assert (_stats_type == 7) obj.flags = reader.read('!H')[0] reader.skip(4) obj.group_id = reader.read('!L')[0] reader.skip(4) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.flags != other.flags): return False if (self.group_id != other.group_id): return False return True def pretty_print(self, q): q.text('group_desc_stats_request {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPSF_REQ_MORE'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('group_id = ') q.text(('%#x' % self.group_id)) q.breakable() q.text('}')
def test_get_transaction_formatters(w3): non_checksummed_addr = '0xB2930B35844A230F00E51431ACAE96FE543A0347' unformatted_transaction = {'blockHash': '0xa39aef90d62c3826bca8269c2716d7a38696b4f45e61d83', 'blockNumber': '0x1b4', 'transactionIndex': '0x0', 'nonce': '0x0', 'gas': '0x4c4b40', 'gasPrice': '0x1', 'maxFeePerGas': '0x1', 'maxPriorityFeePerGas': '0x1', 'value': '0x1', 'from': non_checksummed_addr, 'publicKey': '0x', 'r': '0xd148ae70c8cbef3a038e70e6d1639f0951e60a2965820f33bad19d0a6c2b8116', 'raw': '0x142ab034696c09dcfb2a8b086b494f3f4c419e67b6c04d95882f87156a3b6f35', 's': '0x6f5216fc207221a11efe2e4c3e3a881a0b5ca286ede538fc9dbc403b2009ea76', 'to': non_checksummed_addr, 'hash': '0x142ab034696c09dcfb2a8b086b494f3f4c419e67b6c04d95882f87156a3b6f35', 'v': '0x1', 'yParity': '0x1', 'standardV': '0x1', 'type': '0x2', 'chainId': '0x539', 'accessList': [{'address': non_checksummed_addr, 'storageKeys': ['0x', '0x']}, {'address': non_checksummed_addr, 'storageKeys': []}], 'input': '0x5b34b966', 'data': '0x5b34b966'} result_middleware = construct_result_generator_middleware({RPC.eth_getTransactionByHash: (lambda *_: unformatted_transaction)}) w3.middleware_onion.inject(result_middleware, 'result_middleware', layer=0) received_tx = w3.eth.get_transaction('') checksummed_addr = to_checksum_address(non_checksummed_addr) assert (non_checksummed_addr != checksummed_addr) expected = AttributeDict({'blockHash': HexBytes(unformatted_transaction['blockHash']), 'blockNumber': to_int(hexstr=unformatted_transaction['blockNumber']), 'transactionIndex': 0, 'nonce': 0, 'gas': to_int(hexstr=unformatted_transaction['gas']), 'gasPrice': 1, 'maxFeePerGas': 1, 'maxPriorityFeePerGas': 1, 'value': 1, 'from': checksummed_addr, 'publicKey': HexBytes(unformatted_transaction['publicKey']), 'r': HexBytes(unformatted_transaction['r']), 'raw': HexBytes(unformatted_transaction['raw']), 's': HexBytes(unformatted_transaction['s']), 'to': to_checksum_address(non_checksummed_addr), 'hash': HexBytes(unformatted_transaction['hash']), 'v': 1, 'yParity': 1, 'standardV': 1, 'type': 2, 'chainId': 1337, 'accessList': [AttributeDict({'address': checksummed_addr, 'storageKeys': [HexBytes('0x'), HexBytes('0x')]}), AttributeDict({'address': checksummed_addr, 'storageKeys': []})], 'input': HexBytes(unformatted_transaction['input']), 'data': HexBytes(unformatted_transaction['data'])}) assert (received_tx == expected) w3.middleware_onion.remove('result_middleware')
class SDPHY(LiteXModule): def __init__(self, pads, device, sys_clk_freq, cmd_timeout=0.01, data_timeout=0.01): use_emulator = (hasattr(pads, 'cmd_t') and hasattr(pads, 'dat_t')) self.card_detect = CSRStatus() self.comb += self.card_detect.status.eq(getattr(pads, 'cd', 0)) self.clocker = clocker = SDPHYClocker() self.init = init = SDPHYInit() self.cmdw = cmdw = SDPHYCMDW() self.cmdr = cmdr = SDPHYCMDR(sys_clk_freq, cmd_timeout, cmdw) self.dataw = dataw = SDPHYDATAW() self.datar = datar = SDPHYDATAR(sys_clk_freq, data_timeout) self.sdpads = sdpads = Record(_sdpads_layout) sdphy_cls = (SDPHYIOEmulator if use_emulator else SDPHYIOGen) self.io = sdphy_cls(clocker, sdpads, pads) self.comb += [sdpads.clk.eq(Reduce('OR', [m.pads_out.clk for m in [init, cmdw, cmdr, dataw, datar]])), sdpads.cmd.oe.eq(Reduce('OR', [m.pads_out.cmd.oe for m in [init, cmdw, cmdr, dataw, datar]])), sdpads.cmd.o.eq(Reduce('OR', [m.pads_out.cmd.o for m in [init, cmdw, cmdr, dataw, datar]])), sdpads.data.oe.eq(Reduce('OR', [m.pads_out.data.oe for m in [init, cmdw, cmdr, dataw, datar]])), sdpads.data.o.eq(Reduce('OR', [m.pads_out.data.o for m in [init, cmdw, cmdr, dataw, datar]]))] for m in [init, cmdw, cmdr, dataw, datar]: self.comb += m.pads_out.ready.eq(self.clocker.ce) self.comb += self.clocker.clk_en.eq(sdpads.clk) for m in [init, cmdw, cmdr, dataw, datar]: self.comb += m.pads_in.valid.eq(sdpads.data_i_ce) self.comb += m.pads_in.cmd.i.eq(sdpads.cmd.i) self.comb += m.pads_in.data.i.eq(sdpads.data.i) self.comb += clocker.stop.eq((dataw.stop | datar.stop)) self.card_detect_irq = Signal() card_detect_d = Signal() self.sync += card_detect_d.eq(self.card_detect.status) self.sync += self.card_detect_irq.eq((self.card_detect.status ^ card_detect_d))
class VideoCopyrightGeoGate(AbstractObject): def __init__(self, api=None): super(VideoCopyrightGeoGate, self).__init__() self._isVideoCopyrightGeoGate = True self._api = api class Field(AbstractObject.Field): excluded_countries = 'excluded_countries' included_countries = 'included_countries' _field_types = {'excluded_countries': 'list<string>', 'included_countries': 'list<string>'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
def get_histogram(df: SparkSeries, column_name: str, nbinsx: int, density: bool, *, dmax: Optional[float]=None, dmin: Optional[float]=None) -> Tuple[(np.ndarray, np.ndarray)]: if ((dmax is None) or (dmin is None)): (min_val, max_val) = calculate_stats(df, column_name, sf.min, sf.max) if (min_val == max_val): min_val -= 0.5 max_val += 0.5 else: (min_val, max_val) = (dmin, dmax) step = ((max_val - min_val) / nbinsx) hist = df.select(column_name, sf.floor(((sf.col(column_name) - min_val) / step)).alias('bucket')).select(column_name, sf.when((sf.col('bucket') >= nbinsx), (nbinsx - 1)).otherwise(sf.col('bucket')).alias('bucket')).groupby('bucket').count() hist_values = {r.bucket: r['count'] for r in hist.collect()} n = np.array([hist_values.get(i, 0) for i in range(nbinsx)]) bin_edges = np.array([(min_val + (step * i)) for i in range((nbinsx + 1))]) if density: db = np.array(np.diff(bin_edges), float) return (((n / db) / n.sum()).tolist(), bin_edges) return (n, bin_edges)
def rangestring_to_mask(rangestring: str, length: int) -> List[bool]: mask = ([False] * length) if (rangestring == ''): return mask for _range in rangestring.split(','): if ('-' in _range): if (len(_range.strip().split('-')) != 2): raise ValueError(f'Wrong range syntax {_range}') (start, end) = map(int, _range.strip().split('-')) if (end < start): raise ValueError(f'Range {start}-{end} has invalid direction') if ((end + 1) > length): raise ValueError(f'Range endpoint {end} is beyond the mask length {length} ') mask[start:(end + 1)] = ([True] * ((end + 1) - start)) elif _range: if ((int(_range) + 1) > length): raise ValueError(f'Realization index {_range} is beyond the mask length {length} ') mask[int(_range)] = True return mask
() def expected_final_report(): return pd.DataFrame([{OSCIGeneralRankingSchema.position: 4, OSCIGeneralRankingSchema.position_change_ytd: 20, OSCIGeneralRankingSchema.position_change_dtd: 2, OSCIGeneralRankingSchema.position_growth_speed: 0.05, OSCIGeneralRankingSchema.commits_ytd: 1000, OSCIGeneralRankingSchema.commits_mtd: 500, OSCIGeneralRankingSchema.total_ytd: 100, OSCIGeneralRankingSchema.total_mtd: 20, OSCIGeneralRankingSchema.total_dtd: 10, OSCIGeneralRankingSchema.total_change_ytd: 70, OSCIGeneralRankingSchema.total_change_dtd: 7, OSCIGeneralRankingSchema.total_growth_speed: 0.01, OSCIGeneralRankingSchema.active_ytd: 50, OSCIGeneralRankingSchema.active_mtd: 10, OSCIGeneralRankingSchema.active_dtd: 5, OSCIGeneralRankingSchema.active_change_ytd: 30, OSCIGeneralRankingSchema.active_change_dtd: 3, OSCIGeneralRankingSchema.active_growth_speed: 0.04, OSCIGeneralRankingSchema.company: 'Company'}]).sort_index(axis=1)
('rocm.reshape.func_call') ('rocm.flatten.func_call') def reshape_gen_function_call(func_attrs, indent=' '): func_name = func_attrs['name'] input_names = [shape._attrs['name'] for shape in func_attrs['inputs'][0]._attrs['shape']] output_names = [shape._attrs['name'] for shape in func_attrs['outputs'][0]._attrs['shape']] return FUNC_CALL_TEMPLATE.render(func_name=func_name, input_names=input_names, output_names_except_last=output_names[:(- 1)], last_output=output_names[(- 1)], indent=indent)
class MatchAny(MatchFirst): all_match_anys = [] def __init__(self, *args, **kwargs): super(MatchAny, self).__init__(*args, **kwargs) self.all_match_anys.append(weakref.ref(self)) def __or__(self, other): if isinstance(other, MatchAny): self = maybe_copy_elem(self, 'any_or') return self.append(other) else: return MatchFirst([self, other]) def copy(self): self = super(MatchAny, self).copy() self.all_match_anys.append(weakref.ref(self)) return self if (not use_fast_pyparsing_reprs): def __str__(self): return ((self.__class__.__name__ + ':') + super(MatchAny, self).__str__())
def preprocess_file(ncfile, tbl, product): logger.info("Reading '%s' from file '%s'", product, ncfile) scan = s5a.load_ncfile(ncfile, data_variable_name=product) logger.info("Filtering %s points by quality of file '%s'", len(scan), ncfile) scan = s5a.filter_by_quality(scan) if (len(scan) == 0): logger.warning("No points left after filtering of '%s'", ncfile) return logger.info("Apply H3 grid to '%s' points of file '%s'", len(scan), ncfile) scan = s5a.point_to_h3(scan, resolution=emissionsapi.db.resolution) scan = s5a.aggregate_h3(scan) scan = s5a.h3_to_point(scan) logger.info("Writing %s points from '%s' to database", len(scan), ncfile) write_to_database(scan, ncfile, tbl) logger.info("Finished writing points from '%s' to database", ncfile)
def test_child_dependencies_container_parent_name(): class Container(containers.DeclarativeContainer): dependencies_container = providers.DependenciesContainer() with raises(errors.Error, match='Dependency "Container.dependencies_container.dependency" is not defined'): Container.dependencies_container.dependency()
def make_new_code_method_from_source(source, func_name, cls_name): tokens = [] attributes = set() using_self = False g = tokenize(BytesIO(source.encode('utf-8')).readline) for (toknum, tokval, _, _, _) in g: if (using_self == 'self'): if ((toknum == OP) and (tokval == '.')): using_self = tokval continue elif ((toknum == OP) and (tokval in (',', ')'))): tokens.append((NAME, 'self')) using_self = False else: raise NotImplementedError(f'self{tokval} not supported by Transonic') if (using_self == '.'): if (toknum == NAME): using_self = False tokens.append((NAME, ('self_' + tokval))) attributes.add(tokval) continue else: raise NotImplementedError if ((toknum == NAME) and (tokval == 'self')): using_self = 'self' continue tokens.append((toknum, tokval)) attributes = sorted(attributes) attributes_self = [('self_' + attr) for attr in attributes] index_self = tokens.index((NAME, 'self')) tokens_attr = [] for (ind, attr) in enumerate(attributes_self): tokens_attr.append((NAME, attr)) tokens_attr.append((OP, ',')) if (tokens[(index_self + 1)] == (OP, ',')): del tokens[(index_self + 1)] tokens = ((tokens[:index_self] + tokens_attr) + tokens[(index_self + 1):]) index_func_name = tokens.index((NAME, func_name)) name_new_func = f'__for_method__{cls_name}__{func_name}' tokens[index_func_name] = (NAME, name_new_func) if (func_name in attributes): attributes.remove(func_name) index_rec_calls = [index for (index, (name, value)) in enumerate(tokens) if (value == ('self_' + func_name))] del tokens[(index_rec_calls[0] + 1)] del tokens[index_rec_calls[0]] offset = (- 2) for ind in index_rec_calls[1:]: ind += offset tokens[ind] = (tokens[ind][0], name_new_func) for attr in reversed(attributes): tokens.insert((ind + 2), (1, ',')) tokens.insert((ind + 2), (1, ('self_' + attr))) offset += (len(attributes) * 2) new_code = untokenize(tokens).decode('utf-8') return (new_code, attributes, name_new_func)
class SchurComplementBuilder(object): def __init__(self, prefix, Atilde, K, KT, pc, vidx, pidx, non_zero_saddle_mat=None): self.Atilde = Atilde self.K = K self.KT = KT self.vidx = vidx self.pidx = pidx self.prefix = (prefix + 'localsolve_') self._retrieve_options(pc) self.non_zero_saddle_mat = non_zero_saddle_mat all_fields = list(range(len(Atilde.arg_function_spaces[0]))) self.nfields = len(all_fields) self._split_mixed_operator() self.A00_inv_hat = self.build_A00_inv() if (self.nfields > 1): self.schur_approx = (self.retrieve_user_S_approx(pc, self.schur_approx) if self.schur_approx else None) self.inner_S = self.build_inner_S() self.inner_S_approx_inv_hat = self.build_Sapprox_inv() self.inner_S_inv_hat = self.build_inner_S_inv() def _split_mixed_operator(self): split_mixed_op = dict(split_form(self.Atilde.form)) (id0, id1) = (self.vidx, self.pidx) A00 = Tensor(split_mixed_op[(id0, id0)]) self.list_split_mixed_ops = [A00, None, None, None] if (self.nfields > 1): A01 = Tensor(split_mixed_op[(id0, id1)]) A10 = Tensor(split_mixed_op[(id1, id0)]) A11 = Tensor(split_mixed_op[(id1, id1)]) self.list_split_mixed_ops = [A00, A01, A10, A11] split_trace_op = dict(split_form(self.K.form)) K0 = Tensor(split_trace_op[(0, id0)]) K1 = Tensor(split_trace_op[(0, id1)]) self.list_split_trace_ops = [K0, K1] split_trace_op_transpose = dict(split_form(self.KT.form)) K0 = Tensor(split_trace_op_transpose[(id0, 0)]) K1 = Tensor(split_trace_op_transpose[(id1, 0)]) self.list_split_trace_ops_transpose = [K0, K1] def _check_options(self, valid): default = object() opts = PETSc.Options(self.prefix) for (key, supported) in valid: value = opts.getString(key, default=default) if ((value is not default) and (value not in supported)): raise ValueError(f"Unsupported value ({value}) for '{(self.prefix + key)}'. Should be one of {supported}") def _retrieve_options(self, pc): get_option = (lambda key: PETSc.Options(self.prefix).getString(key, default='')) self._check_options([('ksp_type', {'preonly'}), ('pc_type', {'fieldsplit'}), ('pc_fieldsplit_type', {'schur'})]) self.nested = ((get_option('ksp_type') == 'preonly') and (get_option('pc_type') == 'fieldsplit') and (get_option('pc_fieldsplit_type') == 'schur')) (fs0, fs1) = (('fieldsplit_' + str(idx)) for idx in (self.vidx, self.pidx)) self._check_options([((fs0 + 'ksp_type'), {'preonly', 'default'}), ((fs0 + 'pc_type'), {'jacobi'})]) self.preonly_A00 = (get_option((fs0 + '_ksp_type')) == 'preonly') self.jacobi_A00 = (get_option((fs0 + '_pc_type')) == 'jacobi') self._check_options([((fs1 + 'ksp_type'), {'preonly', 'default'}), ((fs1 + 'pc_type'), {'jacobi', 'python'})]) self.preonly_S = (get_option((fs1 + '_ksp_type')) == 'preonly') self.jacobi_S = (get_option((fs1 + '_pc_type')) == 'jacobi') self.schur_approx = (get_option((fs1 + '_pc_python_type')) if (get_option((fs1 + '_pc_type')) == 'python') else False) self._check_options([((fs1 + 'aux_ksp_type'), {'preonly', 'default'}), ((fs1 + 'aux_pc_type'), {'jacobi'})]) self.preonly_Shat = (get_option((fs1 + '_aux_ksp_type')) == 'preonly') self.jacobi_Shat = (get_option((fs1 + '_aux_pc_type')) == 'jacobi') if (self.jacobi_Shat or self.jacobi_A00): assert parameters['slate_compiler']['optimise'], 'Local systems should only get preconditioned with a preconditioning matrix if the Slate optimiser replaces inverses by solves.' def build_inner_S(self): (_, A01, A10, A11) = self.list_split_mixed_ops return (A11 - ((A10 * self.A00_inv_hat) * A01)) def inv(self, A, P, prec, preonly=False): return (P if (prec and preonly) else (((P * A).inv * P) if prec else A.inv)) def build_inner_S_inv(self): A = self.inner_S P = self.inner_S_approx_inv_hat prec = (bool(self.schur_approx) or self.jacobi_S) return self.inv(A, P, prec, self.preonly_S) def build_Sapprox_inv(self): prec = ((bool(self.schur_approx) and self.jacobi_Shat) or self.jacobi_S) A = (self.schur_approx if self.schur_approx else self.inner_S) P = DiagonalTensor(A).inv preonly = (self.preonly_Shat if self.schur_approx else True) return self.inv(A, P, prec, preonly) def build_A00_inv(self): (A, _, _, _) = self.list_split_mixed_ops P = DiagonalTensor(A).inv return self.inv(A, P, self.jacobi_A00, self.preonly_A00) def retrieve_user_S_approx(self, pc, usercode): (_, _, _, A11) = self.list_split_mixed_ops (test, trial) = A11.arguments() if (usercode != ''): (modname, funname) = usercode.rsplit('.', 1) mod = __import__(modname) fun = getattr(mod, funname) if isinstance(fun, type): fun = fun() return Tensor(fun.form(pc, test, trial)[0]) else: return None def build_schur(self, rhs, non_zero_saddle_rhs=None): if self.nested: (_, A01, A10, _) = self.list_split_mixed_ops (K0, K1) = self.list_split_trace_ops (KT0, KT1) = self.list_split_trace_ops_transpose R = [rhs.blocks[self.vidx], rhs.blocks[self.pidx]] K_Ainv_block1 = [K0, ((((- K0) * self.A00_inv_hat) * A01) + K1)] K_Ainv_block2 = [(K_Ainv_block1[0] * self.A00_inv_hat), (K_Ainv_block1[1] * self.inner_S_inv_hat)] K_Ainv_block3 = [(K_Ainv_block2[0] - ((K_Ainv_block2[1] * A10) * self.A00_inv_hat)), K_Ainv_block2[1]] schur_rhs = ((K_Ainv_block3[0] * R[0]) + (K_Ainv_block3[1] * R[1])) schur_comp = ((K_Ainv_block3[0] * KT0) + (K_Ainv_block3[1] * KT1)) else: P = DiagonalTensor(self.Atilde).inv Atildeinv = self.inv(self.Atilde, P, self.jacobi_A00, self.preonly_A00) schur_rhs = ((self.K * Atildeinv) * rhs) schur_comp = ((self.K * Atildeinv) * self.KT) if (self.non_zero_saddle_mat or non_zero_saddle_rhs): assert (self.non_zero_saddle_mat and non_zero_saddle_rhs), 'The problem is not a saddle point system and you missed to pass either A11 or the corresponding part in the rhs.' schur_rhs = (non_zero_saddle_rhs - schur_rhs) schur_comp = (self.non_zero_saddle_mat - schur_comp) return (schur_rhs, schur_comp)
class OptionPlotoptionsSplineDragdropGuideboxDefault(Options): def className(self): return self._config_get('highcharts-drag-box-default') def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get('rgba(0, 0, 0, 0.1)') def color(self, text: str): self._config(text, js_type=False) def cursor(self): return self._config_get('move') def cursor(self, text: str): self._config(text, js_type=False) def lineColor(self): return self._config_get('#888') def lineColor(self, text: str): self._config(text, js_type=False) def lineWidth(self): return self._config_get(1) def lineWidth(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(900) def zIndex(self, num: float): self._config(num, js_type=False)
def validate_models(provider: str, subfeature: str, constraints: dict, args: dict) -> Dict: models = (constraints.get('models') or constraints.get('voice_ids')) if (not models): if ('settings' in args): del args['settings'] return args settings = args.get('settings', {}) if any(((option in models) for option in ['MALE', 'FEMALE'])): voice_id = retreive_voice_id(provider, subfeature, args['language'], args['option'], settings) args['voice_id'] = voice_id else: if (settings and (provider in settings)): if (constraints and (settings[provider] in models)): selected_model = settings[provider] else: all_availaible_models = ', '.join(models) raise ProviderException(f'Wrong model name, availaible models for {provider} are : {all_availaible_models}') else: selected_model = constraints.get('default_model') args['model'] = selected_model args.pop('settings', None) return args
class DFIInjector(Module, AutoCSR): def __init__(self, addressbits, bankbits, nranks, databits, nphases=1, is_clam_shell=False): self.slave = dfi.Interface(addressbits, bankbits, nranks, databits, nphases) self.master = dfi.Interface(addressbits, bankbits, ((nranks * 2) if is_clam_shell else nranks), databits, nphases) csr_dfi = dfi.Interface(addressbits, bankbits, ((nranks * 2) if is_clam_shell else nranks), databits, nphases) self.ext_dfi = dfi.Interface(addressbits, bankbits, nranks, databits, nphases) self.ext_dfi_sel = Signal() self._control = CSRStorage(fields=[CSRField('sel', size=1, values=[('``0b0``', 'Software (CPU) control.'), ('``0b1``', 'Hardware control (default).')], reset=1), CSRField('cke', size=1, description='DFI clock enable bus'), CSRField('odt', size=1, description='DFI on-die termination bus'), CSRField('reset_n', size=1, description='DFI clock reset bus')], description='Control DFI signals common to all phases') for (n, phase) in enumerate(csr_dfi.phases): setattr(self.submodules, ('pi' + str(n)), PhaseInjector(phase)) self.comb += [If(self._control.fields.sel, If(self.ext_dfi_sel, self.ext_dfi.connect(self.master)).Else(self.slave.connect(self.master), If(is_clam_shell, [self.master.phases[i].cs_n.eq(Replicate(self.slave.phases[i].cs_n, 2)) for i in range(nphases)]))).Else(csr_dfi.connect(self.master))] for i in range(nranks): self.comb += [phase.cke[i].eq(self._control.fields.cke) for phase in csr_dfi.phases] self.comb += [phase.odt[i].eq(self._control.fields.odt) for phase in csr_dfi.phases if hasattr(phase, 'odt')] self.comb += [phase.reset_n.eq(self._control.fields.reset_n) for phase in csr_dfi.phases if hasattr(phase, 'reset_n')]
def strip_PKCS7_padding(s): if ((len(s) % 16) or (not s)): raise ValueError(("String of len %d can't be PCKS7-padded" % len(s))) numpads = ord(s[(- 1)]) if (numpads > 16): raise ValueError(("String ending with %r can't be PCKS7-padded" % s[(- 1)])) return s[:(- numpads)]
class TacMessage(Message): protocol_id = PublicId.from_str('fetchai/tac:1.1.7') protocol_specification_id = PublicId.from_str('fetchai/tac:1.0.0') ErrorCode = CustomErrorCode class Performative(Message.Performative): CANCELLED = 'cancelled' GAME_DATA = 'game_data' REGISTER = 'register' TAC_ERROR = 'tac_error' TRANSACTION = 'transaction' TRANSACTION_CONFIRMATION = 'transaction_confirmation' UNREGISTER = 'unregister' def __str__(self) -> str: return str(self.value) _performatives = {'cancelled', 'game_data', 'register', 'tac_error', 'transaction', 'transaction_confirmation', 'unregister'} __slots__: Tuple[(str, ...)] = tuple() class _SlotsCls(): __slots__ = ('agent_addr_to_name', 'agent_name', 'amount_by_currency_id', 'counterparty_address', 'counterparty_signature', 'currency_id_to_name', 'dialogue_reference', 'error_code', 'exchange_params_by_currency_id', 'fee_by_currency_id', 'good_id_to_name', 'info', 'ledger_id', 'message_id', 'nonce', 'performative', 'quantities_by_good_id', 'sender_address', 'sender_signature', 'target', 'transaction_id', 'utility_params_by_good_id', 'version_id') def __init__(self, performative: Performative, dialogue_reference: Tuple[(str, str)]=('', ''), message_id: int=1, target: int=0, **kwargs: Any): super().__init__(dialogue_reference=dialogue_reference, message_id=message_id, target=target, performative=TacMessage.Performative(performative), **kwargs) def valid_performatives(self) -> Set[str]: return self._performatives def dialogue_reference(self) -> Tuple[(str, str)]: enforce(self.is_set('dialogue_reference'), 'dialogue_reference is not set.') return cast(Tuple[(str, str)], self.get('dialogue_reference')) def message_id(self) -> int: enforce(self.is_set('message_id'), 'message_id is not set.') return cast(int, self.get('message_id')) def performative(self) -> Performative: enforce(self.is_set('performative'), 'performative is not set.') return cast(TacMessage.Performative, self.get('performative')) def target(self) -> int: enforce(self.is_set('target'), 'target is not set.') return cast(int, self.get('target')) def agent_addr_to_name(self) -> Dict[(str, str)]: enforce(self.is_set('agent_addr_to_name'), "'agent_addr_to_name' content is not set.") return cast(Dict[(str, str)], self.get('agent_addr_to_name')) def agent_name(self) -> str: enforce(self.is_set('agent_name'), "'agent_name' content is not set.") return cast(str, self.get('agent_name')) def amount_by_currency_id(self) -> Dict[(str, int)]: enforce(self.is_set('amount_by_currency_id'), "'amount_by_currency_id' content is not set.") return cast(Dict[(str, int)], self.get('amount_by_currency_id')) def counterparty_address(self) -> str: enforce(self.is_set('counterparty_address'), "'counterparty_address' content is not set.") return cast(str, self.get('counterparty_address')) def counterparty_signature(self) -> str: enforce(self.is_set('counterparty_signature'), "'counterparty_signature' content is not set.") return cast(str, self.get('counterparty_signature')) def currency_id_to_name(self) -> Dict[(str, str)]: enforce(self.is_set('currency_id_to_name'), "'currency_id_to_name' content is not set.") return cast(Dict[(str, str)], self.get('currency_id_to_name')) def error_code(self) -> CustomErrorCode: enforce(self.is_set('error_code'), "'error_code' content is not set.") return cast(CustomErrorCode, self.get('error_code')) def exchange_params_by_currency_id(self) -> Dict[(str, float)]: enforce(self.is_set('exchange_params_by_currency_id'), "'exchange_params_by_currency_id' content is not set.") return cast(Dict[(str, float)], self.get('exchange_params_by_currency_id')) def fee_by_currency_id(self) -> Dict[(str, int)]: enforce(self.is_set('fee_by_currency_id'), "'fee_by_currency_id' content is not set.") return cast(Dict[(str, int)], self.get('fee_by_currency_id')) def good_id_to_name(self) -> Dict[(str, str)]: enforce(self.is_set('good_id_to_name'), "'good_id_to_name' content is not set.") return cast(Dict[(str, str)], self.get('good_id_to_name')) def info(self) -> Optional[Dict[(str, str)]]: return cast(Optional[Dict[(str, str)]], self.get('info')) def ledger_id(self) -> str: enforce(self.is_set('ledger_id'), "'ledger_id' content is not set.") return cast(str, self.get('ledger_id')) def nonce(self) -> str: enforce(self.is_set('nonce'), "'nonce' content is not set.") return cast(str, self.get('nonce')) def quantities_by_good_id(self) -> Dict[(str, int)]: enforce(self.is_set('quantities_by_good_id'), "'quantities_by_good_id' content is not set.") return cast(Dict[(str, int)], self.get('quantities_by_good_id')) def sender_address(self) -> str: enforce(self.is_set('sender_address'), "'sender_address' content is not set.") return cast(str, self.get('sender_address')) def sender_signature(self) -> str: enforce(self.is_set('sender_signature'), "'sender_signature' content is not set.") return cast(str, self.get('sender_signature')) def transaction_id(self) -> str: enforce(self.is_set('transaction_id'), "'transaction_id' content is not set.") return cast(str, self.get('transaction_id')) def utility_params_by_good_id(self) -> Dict[(str, float)]: enforce(self.is_set('utility_params_by_good_id'), "'utility_params_by_good_id' content is not set.") return cast(Dict[(str, float)], self.get('utility_params_by_good_id')) def version_id(self) -> str: enforce(self.is_set('version_id'), "'version_id' content is not set.") return cast(str, self.get('version_id')) def _is_consistent(self) -> bool: try: enforce(isinstance(self.dialogue_reference, tuple), "Invalid type for 'dialogue_reference'. Expected 'tuple'. Found '{}'.".format(type(self.dialogue_reference))) enforce(isinstance(self.dialogue_reference[0], str), "Invalid type for 'dialogue_reference[0]'. Expected 'str'. Found '{}'.".format(type(self.dialogue_reference[0]))) enforce(isinstance(self.dialogue_reference[1], str), "Invalid type for 'dialogue_reference[1]'. Expected 'str'. Found '{}'.".format(type(self.dialogue_reference[1]))) enforce((type(self.message_id) is int), "Invalid type for 'message_id'. Expected 'int'. Found '{}'.".format(type(self.message_id))) enforce((type(self.target) is int), "Invalid type for 'target'. Expected 'int'. Found '{}'.".format(type(self.target))) enforce(isinstance(self.performative, TacMessage.Performative), "Invalid 'performative'. Expected either of '{}'. Found '{}'.".format(self.valid_performatives, self.performative)) actual_nb_of_contents = (len(self._body) - DEFAULT_BODY_SIZE) expected_nb_of_contents = 0 if (self.performative == TacMessage.Performative.REGISTER): expected_nb_of_contents = 1 enforce(isinstance(self.agent_name, str), "Invalid type for content 'agent_name'. Expected 'str'. Found '{}'.".format(type(self.agent_name))) elif (self.performative == TacMessage.Performative.UNREGISTER): expected_nb_of_contents = 0 elif (self.performative == TacMessage.Performative.TRANSACTION): expected_nb_of_contents = 10 enforce(isinstance(self.transaction_id, str), "Invalid type for content 'transaction_id'. Expected 'str'. Found '{}'.".format(type(self.transaction_id))) enforce(isinstance(self.ledger_id, str), "Invalid type for content 'ledger_id'. Expected 'str'. Found '{}'.".format(type(self.ledger_id))) enforce(isinstance(self.sender_address, str), "Invalid type for content 'sender_address'. Expected 'str'. Found '{}'.".format(type(self.sender_address))) enforce(isinstance(self.counterparty_address, str), "Invalid type for content 'counterparty_address'. Expected 'str'. Found '{}'.".format(type(self.counterparty_address))) enforce(isinstance(self.amount_by_currency_id, dict), "Invalid type for content 'amount_by_currency_id'. Expected 'dict'. Found '{}'.".format(type(self.amount_by_currency_id))) for (key_of_amount_by_currency_id, value_of_amount_by_currency_id) in self.amount_by_currency_id.items(): enforce(isinstance(key_of_amount_by_currency_id, str), "Invalid type for dictionary keys in content 'amount_by_currency_id'. Expected 'str'. Found '{}'.".format(type(key_of_amount_by_currency_id))) enforce((type(value_of_amount_by_currency_id) is int), "Invalid type for dictionary values in content 'amount_by_currency_id'. Expected 'int'. Found '{}'.".format(type(value_of_amount_by_currency_id))) enforce(isinstance(self.fee_by_currency_id, dict), "Invalid type for content 'fee_by_currency_id'. Expected 'dict'. Found '{}'.".format(type(self.fee_by_currency_id))) for (key_of_fee_by_currency_id, value_of_fee_by_currency_id) in self.fee_by_currency_id.items(): enforce(isinstance(key_of_fee_by_currency_id, str), "Invalid type for dictionary keys in content 'fee_by_currency_id'. Expected 'str'. Found '{}'.".format(type(key_of_fee_by_currency_id))) enforce((type(value_of_fee_by_currency_id) is int), "Invalid type for dictionary values in content 'fee_by_currency_id'. Expected 'int'. Found '{}'.".format(type(value_of_fee_by_currency_id))) enforce(isinstance(self.quantities_by_good_id, dict), "Invalid type for content 'quantities_by_good_id'. Expected 'dict'. Found '{}'.".format(type(self.quantities_by_good_id))) for (key_of_quantities_by_good_id, value_of_quantities_by_good_id) in self.quantities_by_good_id.items(): enforce(isinstance(key_of_quantities_by_good_id, str), "Invalid type for dictionary keys in content 'quantities_by_good_id'. Expected 'str'. Found '{}'.".format(type(key_of_quantities_by_good_id))) enforce((type(value_of_quantities_by_good_id) is int), "Invalid type for dictionary values in content 'quantities_by_good_id'. Expected 'int'. Found '{}'.".format(type(value_of_quantities_by_good_id))) enforce(isinstance(self.nonce, str), "Invalid type for content 'nonce'. Expected 'str'. Found '{}'.".format(type(self.nonce))) enforce(isinstance(self.sender_signature, str), "Invalid type for content 'sender_signature'. Expected 'str'. Found '{}'.".format(type(self.sender_signature))) enforce(isinstance(self.counterparty_signature, str), "Invalid type for content 'counterparty_signature'. Expected 'str'. Found '{}'.".format(type(self.counterparty_signature))) elif (self.performative == TacMessage.Performative.CANCELLED): expected_nb_of_contents = 0 elif (self.performative == TacMessage.Performative.GAME_DATA): expected_nb_of_contents = 9 enforce(isinstance(self.amount_by_currency_id, dict), "Invalid type for content 'amount_by_currency_id'. Expected 'dict'. Found '{}'.".format(type(self.amount_by_currency_id))) for (key_of_amount_by_currency_id, value_of_amount_by_currency_id) in self.amount_by_currency_id.items(): enforce(isinstance(key_of_amount_by_currency_id, str), "Invalid type for dictionary keys in content 'amount_by_currency_id'. Expected 'str'. Found '{}'.".format(type(key_of_amount_by_currency_id))) enforce((type(value_of_amount_by_currency_id) is int), "Invalid type for dictionary values in content 'amount_by_currency_id'. Expected 'int'. Found '{}'.".format(type(value_of_amount_by_currency_id))) enforce(isinstance(self.exchange_params_by_currency_id, dict), "Invalid type for content 'exchange_params_by_currency_id'. Expected 'dict'. Found '{}'.".format(type(self.exchange_params_by_currency_id))) for (key_of_exchange_params_by_currency_id, value_of_exchange_params_by_currency_id) in self.exchange_params_by_currency_id.items(): enforce(isinstance(key_of_exchange_params_by_currency_id, str), "Invalid type for dictionary keys in content 'exchange_params_by_currency_id'. Expected 'str'. Found '{}'.".format(type(key_of_exchange_params_by_currency_id))) enforce(isinstance(value_of_exchange_params_by_currency_id, float), "Invalid type for dictionary values in content 'exchange_params_by_currency_id'. Expected 'float'. Found '{}'.".format(type(value_of_exchange_params_by_currency_id))) enforce(isinstance(self.quantities_by_good_id, dict), "Invalid type for content 'quantities_by_good_id'. Expected 'dict'. Found '{}'.".format(type(self.quantities_by_good_id))) for (key_of_quantities_by_good_id, value_of_quantities_by_good_id) in self.quantities_by_good_id.items(): enforce(isinstance(key_of_quantities_by_good_id, str), "Invalid type for dictionary keys in content 'quantities_by_good_id'. Expected 'str'. Found '{}'.".format(type(key_of_quantities_by_good_id))) enforce((type(value_of_quantities_by_good_id) is int), "Invalid type for dictionary values in content 'quantities_by_good_id'. Expected 'int'. Found '{}'.".format(type(value_of_quantities_by_good_id))) enforce(isinstance(self.utility_params_by_good_id, dict), "Invalid type for content 'utility_params_by_good_id'. Expected 'dict'. Found '{}'.".format(type(self.utility_params_by_good_id))) for (key_of_utility_params_by_good_id, value_of_utility_params_by_good_id) in self.utility_params_by_good_id.items(): enforce(isinstance(key_of_utility_params_by_good_id, str), "Invalid type for dictionary keys in content 'utility_params_by_good_id'. Expected 'str'. Found '{}'.".format(type(key_of_utility_params_by_good_id))) enforce(isinstance(value_of_utility_params_by_good_id, float), "Invalid type for dictionary values in content 'utility_params_by_good_id'. Expected 'float'. Found '{}'.".format(type(value_of_utility_params_by_good_id))) enforce(isinstance(self.fee_by_currency_id, dict), "Invalid type for content 'fee_by_currency_id'. Expected 'dict'. Found '{}'.".format(type(self.fee_by_currency_id))) for (key_of_fee_by_currency_id, value_of_fee_by_currency_id) in self.fee_by_currency_id.items(): enforce(isinstance(key_of_fee_by_currency_id, str), "Invalid type for dictionary keys in content 'fee_by_currency_id'. Expected 'str'. Found '{}'.".format(type(key_of_fee_by_currency_id))) enforce((type(value_of_fee_by_currency_id) is int), "Invalid type for dictionary values in content 'fee_by_currency_id'. Expected 'int'. Found '{}'.".format(type(value_of_fee_by_currency_id))) enforce(isinstance(self.agent_addr_to_name, dict), "Invalid type for content 'agent_addr_to_name'. Expected 'dict'. Found '{}'.".format(type(self.agent_addr_to_name))) for (key_of_agent_addr_to_name, value_of_agent_addr_to_name) in self.agent_addr_to_name.items(): enforce(isinstance(key_of_agent_addr_to_name, str), "Invalid type for dictionary keys in content 'agent_addr_to_name'. Expected 'str'. Found '{}'.".format(type(key_of_agent_addr_to_name))) enforce(isinstance(value_of_agent_addr_to_name, str), "Invalid type for dictionary values in content 'agent_addr_to_name'. Expected 'str'. Found '{}'.".format(type(value_of_agent_addr_to_name))) enforce(isinstance(self.currency_id_to_name, dict), "Invalid type for content 'currency_id_to_name'. Expected 'dict'. Found '{}'.".format(type(self.currency_id_to_name))) for (key_of_currency_id_to_name, value_of_currency_id_to_name) in self.currency_id_to_name.items(): enforce(isinstance(key_of_currency_id_to_name, str), "Invalid type for dictionary keys in content 'currency_id_to_name'. Expected 'str'. Found '{}'.".format(type(key_of_currency_id_to_name))) enforce(isinstance(value_of_currency_id_to_name, str), "Invalid type for dictionary values in content 'currency_id_to_name'. Expected 'str'. Found '{}'.".format(type(value_of_currency_id_to_name))) enforce(isinstance(self.good_id_to_name, dict), "Invalid type for content 'good_id_to_name'. Expected 'dict'. Found '{}'.".format(type(self.good_id_to_name))) for (key_of_good_id_to_name, value_of_good_id_to_name) in self.good_id_to_name.items(): enforce(isinstance(key_of_good_id_to_name, str), "Invalid type for dictionary keys in content 'good_id_to_name'. Expected 'str'. Found '{}'.".format(type(key_of_good_id_to_name))) enforce(isinstance(value_of_good_id_to_name, str), "Invalid type for dictionary values in content 'good_id_to_name'. Expected 'str'. Found '{}'.".format(type(value_of_good_id_to_name))) enforce(isinstance(self.version_id, str), "Invalid type for content 'version_id'. Expected 'str'. Found '{}'.".format(type(self.version_id))) if self.is_set('info'): expected_nb_of_contents += 1 info = cast(Dict[(str, str)], self.info) enforce(isinstance(info, dict), "Invalid type for content 'info'. Expected 'dict'. Found '{}'.".format(type(info))) for (key_of_info, value_of_info) in info.items(): enforce(isinstance(key_of_info, str), "Invalid type for dictionary keys in content 'info'. Expected 'str'. Found '{}'.".format(type(key_of_info))) enforce(isinstance(value_of_info, str), "Invalid type for dictionary values in content 'info'. Expected 'str'. Found '{}'.".format(type(value_of_info))) elif (self.performative == TacMessage.Performative.TRANSACTION_CONFIRMATION): expected_nb_of_contents = 3 enforce(isinstance(self.transaction_id, str), "Invalid type for content 'transaction_id'. Expected 'str'. Found '{}'.".format(type(self.transaction_id))) enforce(isinstance(self.amount_by_currency_id, dict), "Invalid type for content 'amount_by_currency_id'. Expected 'dict'. Found '{}'.".format(type(self.amount_by_currency_id))) for (key_of_amount_by_currency_id, value_of_amount_by_currency_id) in self.amount_by_currency_id.items(): enforce(isinstance(key_of_amount_by_currency_id, str), "Invalid type for dictionary keys in content 'amount_by_currency_id'. Expected 'str'. Found '{}'.".format(type(key_of_amount_by_currency_id))) enforce((type(value_of_amount_by_currency_id) is int), "Invalid type for dictionary values in content 'amount_by_currency_id'. Expected 'int'. Found '{}'.".format(type(value_of_amount_by_currency_id))) enforce(isinstance(self.quantities_by_good_id, dict), "Invalid type for content 'quantities_by_good_id'. Expected 'dict'. Found '{}'.".format(type(self.quantities_by_good_id))) for (key_of_quantities_by_good_id, value_of_quantities_by_good_id) in self.quantities_by_good_id.items(): enforce(isinstance(key_of_quantities_by_good_id, str), "Invalid type for dictionary keys in content 'quantities_by_good_id'. Expected 'str'. Found '{}'.".format(type(key_of_quantities_by_good_id))) enforce((type(value_of_quantities_by_good_id) is int), "Invalid type for dictionary values in content 'quantities_by_good_id'. Expected 'int'. Found '{}'.".format(type(value_of_quantities_by_good_id))) elif (self.performative == TacMessage.Performative.TAC_ERROR): expected_nb_of_contents = 1 enforce(isinstance(self.error_code, CustomErrorCode), "Invalid type for content 'error_code'. Expected 'ErrorCode'. Found '{}'.".format(type(self.error_code))) if self.is_set('info'): expected_nb_of_contents += 1 info = cast(Dict[(str, str)], self.info) enforce(isinstance(info, dict), "Invalid type for content 'info'. Expected 'dict'. Found '{}'.".format(type(info))) for (key_of_info, value_of_info) in info.items(): enforce(isinstance(key_of_info, str), "Invalid type for dictionary keys in content 'info'. Expected 'str'. Found '{}'.".format(type(key_of_info))) enforce(isinstance(value_of_info, str), "Invalid type for dictionary values in content 'info'. Expected 'str'. Found '{}'.".format(type(value_of_info))) enforce((expected_nb_of_contents == actual_nb_of_contents), 'Incorrect number of contents. Expected {}. Found {}'.format(expected_nb_of_contents, actual_nb_of_contents)) if (self.message_id == 1): enforce((self.target == 0), "Invalid 'target'. Expected 0 (because 'message_id' is 1). Found {}.".format(self.target)) except (AEAEnforceError, ValueError, KeyError) as e: _default_logger.error(str(e)) return False return True
class Achromatic(metaclass=ABCMeta): L_IDX = 0 C_IDX = 1 H_IDX = 2 def __init__(self, data: Optional[List[Vector]]=None, threshold_upper: float=alg.inf, threshold_lower: float=alg.inf, threshold_cutoff: float=alg.inf, spline: str='linear', mirror: bool=False, **kwargs: Any) -> None: self.mirror = mirror self.threshold_upper = threshold_upper self.threshold_lower = threshold_lower self.threshold_cutoff = threshold_cutoff self.domain = [] self.min_colorfulness = .0 self.min_lightness = .0 self.spline_type = spline if (data is not None): self.setup_achromatic_response(data, **kwargs) def dump(self) -> Optional[List[Vector]]: if (self.spline_type == 'linear'): return list(zip(*self.spline.points)) else: return list(zip(*self.spline.points))[1:(- 1)] def convert(self, coords: Vector, **kwargs: Any) -> Vector: def calc_achromatic_response(self, parameters: List[Tuple[(int, int, int, float)]], **kwargs: Any) -> None: points = [] for segment in parameters: (start, end, step, scale) = segment for p in range(start, end, step): color = self.convert(([(p / scale)] * 3), **kwargs) (l, c, h) = (color[self.L_IDX], color[self.C_IDX], color[self.H_IDX]) if (l < self.min_lightness): self.min_lightness = l if (c < self.min_colorfulness): self.min_colorfulness = c self.domain.append(l) points.append([l, c, (h % 360)]) self.spline = alg.interpolate(points, method=self.spline_type) self.hue = (self.convert(([1] * 3), **kwargs)[self.H_IDX] % 360) self.ihue = ((self.hue - 180) % 360) def setup_achromatic_response(self, tuning: List[Vector], **kwargs: Any) -> None: points = [] for entry in tuning: (l, c, h) = entry if (l < self.min_lightness): self.min_lightness = l if (c < self.min_colorfulness): self.min_colorfulness = c points.append([l, c, h]) self.domain.append(l) self.spline = alg.interpolate(points, method=self.spline_type) self.hue = (self.convert(([1] * 3), **kwargs)[self.H_IDX] % 360) self.ihue = ((self.hue - 180) % 360) def scale(self, point: float) -> float: if (point <= self.domain[0]): point = ((point - self.domain[0]) / (self.domain[(- 1)] - self.domain[0])) elif (point >= self.domain[(- 1)]): point = (1.0 + ((point - self.domain[(- 1)]) / (self.domain[(- 1)] - self.domain[0]))) else: regions = (len(self.domain) - 1) size = (1 / regions) index = 0 adjusted = 0.0 index = (bisect.bisect(self.domain, point) - 1) (a, b) = self.domain[index:(index + 2)] l = (b - a) adjusted = (((point - a) / l) if l else 0.0) point = ((size * index) + (adjusted * size)) return point def get_ideal_chroma(self, l: float) -> float: if math.isnan(l): return 0.0 elif (self.mirror and (l < 0.0)): return self.spline(self.scale(abs(l)))[1] return self.spline(self.scale(l))[1] def get_ideal_hue(self, l: float) -> float: if math.isnan(l): return 0.0 elif (self.mirror and (l < 0.0)): return ((self.spline(self.scale(abs(l)))[2] - 180) % 360) return self.spline(self.scale(l))[2] def get_ideal_ab(self, l: float) -> Tuple[(float, float)]: if math.isnan(l): return (0.0, 0.0) return alg.polar_to_rect(self.get_ideal_chroma(l), self.get_ideal_hue(l)) def test(self, l: float, c: float, h: float) -> bool: if ((c > self.threshold_cutoff) or ((not self.mirror) and (l < 0.0))): return False flip = (self.mirror and (l < 0.0)) la = abs(l) point = self.scale((la if flip else l)) if ((la < self.min_lightness) and (c < self.min_colorfulness)): return True else: (c2, h2) = self.spline(point)[1:] if flip: h2 = ((h2 - 180) % 360) diff = (c2 - c) hdiff = abs(((h % 360) - h2)) if (hdiff > 180): hdiff = (360 - hdiff) return ((((diff >= 0) and (diff < self.threshold_upper)) or ((diff < 0) and (abs(diff) < self.threshold_lower))) and ((c2 < 1e-05) or (hdiff < 0.01)))
def MoveBack(kwargs: dict) -> OutgoingMessage: compulsory_params = ['id', 'distance', 'speed'] optional_params = [] utility.CheckKwargs(kwargs, compulsory_params) msg = OutgoingMessage() msg.write_int32(kwargs['id']) msg.write_string('MoveBack') msg.write_float32(kwargs['distance']) msg.write_float32(kwargs['speed']) return msg
def make_data_section(rom_start: int, rom_end: int, vram_start: int, name: str, rom_bytes: bytes, segment_rom_start: int, exclusive_ram_id) -> DisassemblerSection: section = make_disassembler_section() assert (section is not None) section.make_data_section(rom_start, rom_end, vram_start, name, rom_bytes, segment_rom_start, exclusive_ram_id) return section
def scan(tokens): literal_start_idx = None literal_started = None prev_was_escape = False lexeme = [] i = 0 while (i < len(tokens)): token = tokens[i] if literal_start_idx: if prev_was_escape: prev_was_escape = False lexeme.append(token) elif (token == literal_started): if ((literal_started == "'") and (len(tokens) > (i + 1)) and (tokens[(i + 1)] == "'")): i += 1 lexeme.append("'") else: (yield (i, Literal(literal_started, ''.join(lexeme)))) literal_start_idx = None literal_started = None lexeme = [] elif (token == '\\'): prev_was_escape = token else: prev_was_escape = False lexeme.append(token) elif (literal_start_idx is None): if (token in ["'", '"', '`']): literal_start_idx = i literal_started = token elif (token == '$'): try: closing_dollar_idx = tokens.index('$', (i + 1)) except ValueError: pass else: quote = tokens[i:(closing_dollar_idx + 1)] length = len(quote) closing_quote_idx = (closing_dollar_idx + 1) while True: try: closing_quote_idx = tokens.index('$', closing_quote_idx) except ValueError: break if (tokens[closing_quote_idx:(closing_quote_idx + length)] == quote): (yield (i, Literal(''.join(quote), ''.join(tokens[(closing_dollar_idx + 1):closing_quote_idx])))) i = (closing_quote_idx + length) break closing_quote_idx += 1 elif (token != ' '): (yield (i, token)) i += 1 if lexeme: (yield (i, lexeme))
class ContainerExample2(HasTraits): plot = Instance(VPlotContainer) traits_view = View(Item('plot', editor=ComponentEditor(), show_label=False), width=600, height=800, resizable=True) def __init__(self): x = linspace((- 14), 14, 100) y = (sin(x) * (x ** 3)) plotdata = ArrayPlotData(x=x, y=y) scatter = Plot(plotdata) scatter.plot(('x', 'y'), type='scatter', color='blue') line = Plot(plotdata) line.plot(('x', 'y'), type='line', color='blue') container = VPlotContainer(scatter, line) self.plot = container
class TestGroupedOOFModel(): def test_fit_predict(self): (X_, y) = gen_grouped_data(1000) model = GroupedOOFModel(GroupTestModel(), group_column='ticker', fold_cnt=5) for X in [X_, X_.set_index(['ticker', 'date'])]: model.fit(X, y['y']) pred = model.predict(X) assert (len(X) == len(pred)) assert (len(model.group_df) == 20) assert (len(model.group_df['fold_id'].unique()) == 5) info = X.copy() info['y'] = y['y'] info['pred'] = pred info = info.reset_index() info = pd.merge(info.rename({'ticker': 'group'}, axis=1), model.group_df, on='group', how='left') assert (info['y'] != info['pred']).min() folds_df = pd.merge(info.groupby('fold_id')['y'].unique(), info.groupby('fold_id')['pred'].unique(), on='fold_id', how='left') assert folds_df.apply((lambda x: (len(set(x['y']).intersection(set(x['pred']))) == 0)), axis=1).min() if ('ticker' in X.columns): X['ticker'] = 100500 pred = model.predict(X) assert (len(set(pred).intersection(set(folds_df.loc[0]['y']))) == 0) (X_, y) = gen_grouped_data(1000) model = GroupedOOFModel(lgbm.sklearn.LGBMClassifier(), group_column='ticker', fold_cnt=5) model.fit(X, (y['y'] > 5)) pred = model.predict(X) assert (pred >= 0).min() assert (pred <= 1).min()
def alignMinimalBounds_multi(): steps = 8 angle = 45 all_ob_bounds = multi_object_loop(getSelectionBBox, need_results=True) if (not any(all_ob_bounds)): return {'CANCELLED'} bboxPrevious = get_BBOX_multi(all_ob_bounds) for _ in range(0, steps): bpy.ops.transform.rotate(value=((angle * math.pi) / 180), orient_axis='Z', constraint_axis=(False, False, False), use_proportional_edit=False) all_ob_bounds = multi_object_loop(getSelectionBBox, need_results=True) bbox = get_BBOX_multi(all_ob_bounds) if (bbox['minLength'] < bboxPrevious['minLength']): bboxPrevious = bbox else: bpy.ops.transform.rotate(value=((((- angle) * 2) * math.pi) / 180), orient_axis='Z', constraint_axis=(False, False, False), use_proportional_edit=False) all_ob_bounds = multi_object_loop(getSelectionBBox, need_results=True) bbox = get_BBOX_multi(all_ob_bounds) if (bbox['minLength'] < bboxPrevious['minLength']): bboxPrevious = bbox else: bpy.ops.transform.rotate(value=((angle * math.pi) / 180), orient_axis='Z', constraint_axis=(False, False, False), use_proportional_edit=False) angle = (angle / 2)
def create_revision(manage_manually=False, using=None, atomic=True, request_creates_revision=None): request_creates_revision = (request_creates_revision or _request_creates_revision) def decorator(func): (func) def do_revision_view(request, *args, **kwargs): if request_creates_revision(request): with create_revision_base(manage_manually=manage_manually, using=using, atomic=atomic): response = func(request, *args, **kwargs) _set_user_from_request(request) return response return func(request, *args, **kwargs) return do_revision_view return decorator
class TestLaunch(BaseLaunchTestCase): .flaky(reruns=MAX_FLAKY_RERUNS_ETH) def test_exit_code_equal_to_zero(self): with self._cli_launch([self.agent_name_1, self.agent_name_2]) as process_launch: process_launch.expect_all([f'[{self.agent_name_1}] Start processing messages...', f'[{self.agent_name_2}] Start processing messages...'], timeout=DEFAULT_EXPECT_TIMEOUT) process_launch.control_c() process_launch.expect_all(['Exit cli. code: 0'], timeout=DEFAULT_EXPECT_TIMEOUT)
class OptionPlotoptionsFunnel3dSonificationContexttracksMapping(Options): def frequency(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingFrequency': return self._config_sub_data('frequency', OptionPlotoptionsFunnel3dSonificationContexttracksMappingFrequency) def gapBetweenNotes(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingGapbetweennotes': return self._config_sub_data('gapBetweenNotes', OptionPlotoptionsFunnel3dSonificationContexttracksMappingGapbetweennotes) def highpass(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingHighpass': return self._config_sub_data('highpass', OptionPlotoptionsFunnel3dSonificationContexttracksMappingHighpass) def lowpass(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingLowpass': return self._config_sub_data('lowpass', OptionPlotoptionsFunnel3dSonificationContexttracksMappingLowpass) def noteDuration(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingNoteduration': return self._config_sub_data('noteDuration', OptionPlotoptionsFunnel3dSonificationContexttracksMappingNoteduration) def pan(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingPan': return self._config_sub_data('pan', OptionPlotoptionsFunnel3dSonificationContexttracksMappingPan) def pitch(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingPitch': return self._config_sub_data('pitch', OptionPlotoptionsFunnel3dSonificationContexttracksMappingPitch) def playDelay(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingPlaydelay': return self._config_sub_data('playDelay', OptionPlotoptionsFunnel3dSonificationContexttracksMappingPlaydelay) def rate(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingRate': return self._config_sub_data('rate', OptionPlotoptionsFunnel3dSonificationContexttracksMappingRate) def text(self): return self._config_get(None) def text(self, text: str): self._config(text, js_type=False) def time(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingTime': return self._config_sub_data('time', OptionPlotoptionsFunnel3dSonificationContexttracksMappingTime) def tremolo(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingTremolo': return self._config_sub_data('tremolo', OptionPlotoptionsFunnel3dSonificationContexttracksMappingTremolo) def volume(self) -> 'OptionPlotoptionsFunnel3dSonificationContexttracksMappingVolume': return self._config_sub_data('volume', OptionPlotoptionsFunnel3dSonificationContexttracksMappingVolume)
def lock_file(file_descriptor: IO[bytes], logger: Logger=_default_logger) -> Generator: with exception_log_and_reraise(logger.error, f"Couldn't acquire lock for file {file_descriptor.name}: {{}}"): file_lock.lock(file_descriptor, file_lock.LOCK_EX) try: (yield) finally: file_lock.unlock(file_descriptor)
def test(): assert ([(ent.text, ent.label_) for ent in doc1.ents] == [('', 'GPE')]), 'doc1!' assert ([(ent.text, ent.label_) for ent in doc2.ents] == [('', 'GPE')]), 'doc2!' assert ([(ent.text, ent.label_) for ent in doc3.ents] == [('', 'GPE'), ('', 'GPE')]), 'doc3!' assert ([(ent.text, ent.label_) for ent in doc4.ents] == [('', 'GPE')]), 'doc3!' __msg__.good('Great work! GPEwiki')
(scope='function') def executable_consent_request(db, provided_identity_and_consent_request, consent_policy): provided_identity = provided_identity_and_consent_request[0] consent_request = provided_identity_and_consent_request[1] privacy_request = _create_privacy_request_for_policy(db, consent_policy) consent_request.privacy_request_id = privacy_request.id consent_request.save(db) provided_identity.privacy_request_id = privacy_request.id provided_identity.save(db) (yield privacy_request) privacy_request.delete(db)
class OptionPlotoptionsArcdiagramSonificationTracksPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)
def get_evolve_data(is_jp: bool) -> list[int]: file_data = game_data_getter.get_file_latest('DataLocal', 'nyankoPictureBookData.csv', is_jp) if (file_data is None): helper.error_text('Failed to get evolve data') return [] data = helper.parse_int_list_list(csv_handler.parse_csv(file_data.decode('utf-8'))) forms = helper.copy_first_n(data, 2) forms = helper.offset_list(forms, (- 1)) return forms
def convert_to_color(object, name, value): if (isinstance(value, SequenceTypes) and (len(value) == 3)): return (range_check(value[0]), range_check(value[1]), range_check(value[2])) if isinstance(value, int): return (((value / 65536) / 255.0), (((value // 256) & 255) / 255.0), ((value & 255) / 255.0)) raise TraitError
def ripemd160(evm: Evm) -> None: data = evm.message.data word_count = (ceil32(Uint(len(data))) // 32) charge_gas(evm, (GAS_RIPEMD160 + (GAS_RIPEMD160_WORD * word_count))) hash_bytes = hashlib.new('ripemd160', data).digest() padded_hash = left_pad_zero_bytes(hash_bytes, 32) evm.output = padded_hash
class SitemapCrawler(scrapy.spiders.SitemapSpider): name = 'SitemapCrawler' allowed_domains = None sitemap_urls = None original_url = None log = None config = None helper = None def __init__(self, helper, url, config, ignore_regex, *args, **kwargs): self.log = logging.getLogger(__name__) self.config = config self.helper = helper self.original_url = url self.allowed_domains = [self.helper.url_extractor.get_allowed_domain(url, config.section('Crawler')['sitemap_allow_subdomains'])] self.sitemap_urls = [self.helper.url_extractor.get_sitemap_url(url, config.section('Crawler')['sitemap_allow_subdomains'])] self.log.debug(self.sitemap_urls) super(SitemapCrawler, self).__init__(*args, **kwargs) def parse(self, response): if (not self.helper.parse_crawler.content_type(response)): return (yield self.helper.parse_crawler.pass_to_pipeline_if_article(response, self.allowed_domains[0], self.original_url)) def only_extracts_articles(): return True def supports_site(url): return UrlExtractor.sitemap_check(url)
def extractXixistranslationsCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class Session(BaseObject): def __init__(self, api=None, browser=None, city=None, country_code=None, country_name=None, end_date=None, id=None, ip=None, platform=None, region=None, start_date=None, user_agent=None, **kwargs): self.api = api self.browser = browser self.city = city self.country_code = country_code self.country_name = country_name self.end_date = end_date self.id = id self.ip = ip self.platform = platform self.region = region self.start_date = start_date self.user_agent = user_agent for (key, value) in kwargs.items(): setattr(self, key, value) for key in self.to_dict(): if (getattr(self, key) is None): try: self._dirty_attributes.remove(key) except KeyError: continue
class RiverBehindAwake(GenericAction): def apply_action(self): tgt = self.target assert tgt.has_skill(RiverBehind) tgt.skills.remove(RiverBehind) tgt.skills.append(Taichi) g = self.game g.process_action(MaxLifeChange(tgt, tgt, (- 1))) return True
class OptionPlotoptionsNetworkgraphMarkerStatesInactive(Options): def animation(self) -> 'OptionPlotoptionsNetworkgraphMarkerStatesInactiveAnimation': return self._config_sub_data('animation', OptionPlotoptionsNetworkgraphMarkerStatesInactiveAnimation) def opacity(self): return self._config_get(0.3) def opacity(self, num: float): self._config(num, js_type=False)
def split_into_formatters(compound): merged_segs = [] mergeable_prefixes = ['no', 'on', 'bright', 'on_bright'] for s in compound.split('_'): if (merged_segs and (merged_segs[(- 1)] in mergeable_prefixes)): merged_segs[(- 1)] += ('_' + s) else: merged_segs.append(s) return merged_segs
class Transaction(): deep = False states: List[Callable[([], None)]] = [] def __init__(self, deep, *targets): self.deep = deep self.targets = targets self.commit() def commit(self): self.states = [memento(target, self.deep) for target in self.targets] def rollback(self): for a_state in self.states: a_state()
def main(): global config, options parser = ArgumentParser() common.setup_global_opts(parser) options = parser.parse_args() config = common.read_config(options) if ('jarsigner' not in config): raise FDroidException(_('Java jarsigner not found! Install in standard location or set java_paths!')) repodirs = ['repo'] if (config['archive_older'] != 0): repodirs.append('archive') signed = [] for output_dir in repodirs: if (not os.path.isdir(output_dir)): raise FDroidException((("Missing output directory '" + output_dir) + "'")) unsigned = os.path.join(output_dir, 'index_unsigned.jar') if os.path.exists(unsigned): sign_jar(unsigned) index_jar = os.path.join(output_dir, 'index.jar') os.rename(unsigned, index_jar) logging.info(('Signed index in ' + output_dir)) signed.append(index_jar) json_name = 'index-v1.json' index_file = os.path.join(output_dir, json_name) if os.path.exists(index_file): sign_index(output_dir, json_name) logging.info(('Signed ' + index_file)) signed.append(index_file) json_name = 'entry.json' index_file = os.path.join(output_dir, json_name) if os.path.exists(index_file): sign_index(output_dir, json_name) logging.info(('Signed ' + index_file)) signed.append(index_file) if (not signed): logging.info(_('Nothing to do')) status_update_json(signed)
def extractSheeptranslator(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('Commushou no Ore ga, Koushou Skill ni Zenfurishite Tenseishita Kekka', 'Commushou no Ore ga, Koushou Skill ni Zenfurishite Tenseishita Kekka', 'translated'), ('Pretty Girl Wants to Become a Good Girl', 'Pretty Girl Wants to Become a Good Girl', 'translated'), ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) titlemap = [('How to Live in a Different World Chapter', 'How to Live in a Different World', 'translated'), ('That Day The World Changed Chapter', 'That Day The World Changed', 'translated'), ('That Day The World Changed Chapter', 'That Day The World Changed', 'translated'), ('Aim the Deepest Part of the Different World Labyrinth (WN)', 'Aim the Deepest Part of the Different World Labyrinth (WN)', 'translated'), ('Arcadias Labyrinth Chapter', "Arcadia's Labyrinth", 'translated')] for (titlecomponent, name, tl_type) in titlemap: if item['title'].lower().startswith(titlecomponent.lower()): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class PublicIdParameter(click.ParamType): def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) def get_metavar(self, param: Any) -> str: return 'PUBLIC_ID' def convert(self, value: str, param: Any, ctx: Optional[click.Context]) -> PublicId: try: return PublicId.from_str(value) except ValueError: self.fail(value, param, ctx)
def _download_from_s3_bucket(bucket_name, bucket_path, local_path, expected_size_in_bytes=None, progress_indicator=None): try: _fake_import_boto3() import boto3.s3.transfer except ImportError: console.error('S3 support is optional. Install it with `python -m pip install esrally[s3]`') raise class S3ProgressAdapter(): def __init__(self, size, progress): self._expected_size_in_bytes = size self._progress = progress self._bytes_read = 0 def __call__(self, bytes_amount): self._bytes_read += bytes_amount self._progress(self._bytes_read, self._expected_size_in_bytes) s3 = boto3.resource('s3') bucket = s3.Bucket(bucket_name) if (expected_size_in_bytes is None): expected_size_in_bytes = bucket.Object(bucket_path).content_length progress_callback = (S3ProgressAdapter(expected_size_in_bytes, progress_indicator) if progress_indicator else None) bucket.download_file(bucket_path, local_path, Callback=progress_callback, Config=boto3.s3.transfer.TransferConfig(use_threads=False))
def main(page: Page): page.title = 'Containers with different padding' c1 = Container(content=ElevatedButton('container_1'), bgcolor=colors.AMBER, padding=padding.all(10), width=150, height=150) c2 = Container(content=ElevatedButton('container_2'), bgcolor=colors.AMBER, padding=padding.all(20), width=150, height=150) c3 = Container(content=ElevatedButton('container_3'), bgcolor=colors.AMBER, padding=padding.symmetric(horizontal=10), width=150, height=150) c4 = Container(content=ElevatedButton('container_4'), bgcolor=colors.AMBER, padding=padding.only(left=10), width=150, height=150) r = Row([c1, c2, c3, c4]) page.add(r)
class Solution(): def validWordAbbreviation(self, word: str, abbr: str) -> bool: runner = 0 curr = 0 for (i, c) in enumerate(abbr): if c.isalpha(): runner += curr curr = 0 if ((runner >= len(word)) or (c != word[runner])): return False runner += 1 else: if ((curr == 0) and (c == '0')): return False curr = (((curr * 10) + ord(c)) - ord('0')) runner += curr if (runner != len(word)): return False return True
def create_archive(directory_path: str, filter_func: Callable[([str], bool)]=None, output_file: Optional[str]=None) -> Optional[bytes]: if (output_file is None): buffer = BytesIO() else: buffer = FileIO(output_file, mode='w+') compression_mode = None compression_mode = 'w:gz' with tarfile.open(fileobj=buffer, mode=compression_mode) as archive: for (root, _, files) in os.walk(directory_path): for _file in files: if (filter_func is not None): if (not filter_func(_file)): continue relative_path = os.path.relpath(os.path.join(root, _file), directory_path) archive.add(os.path.join(root, _file), arcname=relative_path) if (output_file is not None): buffer.close() return None buffer.seek(0) compressed_bytes = buffer.read() return compressed_bytes
class EmailSchema(BaseModel): third_party_vendor_name: str recipient_email_address: EmailStr test_email_address: Optional[EmailStr] advanced_settings: AdvancedSettings = AdvancedSettings(identity_types=IdentityTypes(email=True, phone_number=False)) class Config(): extra = Extra.forbid orm_mode = True _validator def validate_fields(cls, values: Dict[(str, Any)]) -> Dict[(str, Any)]: advanced_settings = values.get('advanced_settings') if (not advanced_settings): raise ValueError('Must supply advanced settings.') identities = advanced_settings.identity_types if ((not identities.email) and (not identities.phone_number)): raise ValueError('Must supply at least one identity_type.') return values
def extractSaikyotranslationBlogspotCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def _copy_mime_icons(): for (source, target) in [('mimetypes', 'mimetypes'), ('devices/audio-card.svg', 'firmware.svg'), ('devices/media-floppy.svg', 'filesystem.svg'), ('places/folder-brown.svg', 'folder.svg'), ('status/dialog-error.svg', 'not_analyzed.svg'), ('emblems/emblem-symbolic-link.svg', 'mimetypes/inode-symlink.svg')]: run_cmd_with_logging(f'cp -rL {(ICON_THEME_INSTALL_PATH / source)} {(MIME_ICON_DIR / target)}')
class getCounter_result(): thrift_spec = None thrift_field_annotations = None thrift_struct_annotations = None __init__ = None def isUnion(): return False def read(self, iprot): if ((isinstance(iprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)): fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0) self.checkRequired() return if ((isinstance(iprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(iprot, THeaderProtocol.THeaderProtocol) and (iprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and isinstance(iprot.trans, TTransport.CReadableTransport) and (self.thrift_spec is not None) and (fastproto is not None)): fastproto.decode(self, iprot.trans, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2) self.checkRequired() return iprot.readStructBegin() while True: (fname, ftype, fid) = iprot.readFieldBegin() if (ftype == TType.STOP): break if (fid == 0): if (ftype == TType.I64): self.success = iprot.readI64() else: iprot.skip(ftype) else: iprot.skip(ftype) iprot.readFieldEnd() iprot.readStructEnd() self.checkRequired() def checkRequired(self): return def write(self, oprot): if ((isinstance(oprot, TBinaryProtocol.TBinaryProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_BINARY_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)): oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=0)) return if ((isinstance(oprot, TCompactProtocol.TCompactProtocolAccelerated) or (isinstance(oprot, THeaderProtocol.THeaderProtocol) and (oprot.get_protocol_id() == THeaderProtocol.THeaderProtocol.T_COMPACT_PROTOCOL))) and (self.thrift_spec is not None) and (fastproto is not None)): oprot.trans.write(fastproto.encode(self, [self.__class__, self.thrift_spec, False], utf8strings=UTF8STRINGS, protoid=2)) return oprot.writeStructBegin('getCounter_result') if (self.success != None): oprot.writeFieldBegin('success', TType.I64, 0) oprot.writeI64(self.success) oprot.writeFieldEnd() oprot.writeFieldStop() oprot.writeStructEnd() def __repr__(self): L = [] padding = (' ' * 4) value = pprint.pformat(self.success, indent=0) value = padding.join(value.splitlines(True)) L.append((' success=%s' % value)) return ('%s(\n%s)' % (self.__class__.__name__, ',\n'.join(L))) def __eq__(self, other): if (not isinstance(other, self.__class__)): return False return (self.__dict__ == other.__dict__) def __ne__(self, other): return (not (self == other)) if (not six.PY2): __hash__ = object.__hash__
class TestModeTrackProcessor(TrackProcessor): def __init__(self, cfg): self.test_mode_enabled = cfg.opts('track', 'test.mode.enabled', mandatory=False, default_value=False) self.logger = logging.getLogger(__name__) def on_after_load_track(self, track): if (not self.test_mode_enabled): return track self.logger.info('Preparing track [%s] for test mode.', str(track)) for corpus in track.corpora: if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug('Reducing corpus size to 1000 documents for [%s]', corpus.name) for document_set in corpus.documents: if document_set.is_bulk: document_set.number_of_documents = 1000 if document_set.has_compressed_corpus(): (path, ext) = io.splitext(document_set.document_archive) (path_2, ext_2) = io.splitext(path) document_set.document_archive = f'{path_2}-1k{ext_2}{ext}' document_set.document_file = f'{path_2}-1k{ext_2}' elif document_set.has_uncompressed_corpus(): (path, ext) = io.splitext(document_set.document_file) document_set.document_file = f'{path}-1k{ext}' else: raise exceptions.RallyAssertionError(f'Document corpus [{corpus.name}] has neither compressed nor uncompressed corpus.') document_set.compressed_size_in_bytes = None document_set.uncompressed_size_in_bytes = None for challenge in track.challenges: for task in challenge.schedule: for leaf_task in task: if ((leaf_task.warmup_iterations is not None) and (leaf_task.warmup_iterations > leaf_task.clients)): count = leaf_task.clients if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug('Resetting warmup iterations to %d for [%s]', count, str(leaf_task)) leaf_task.warmup_iterations = count if ((leaf_task.iterations is not None) and (leaf_task.iterations > leaf_task.clients)): count = leaf_task.clients if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug('Resetting measurement iterations to %d for [%s]', count, str(leaf_task)) leaf_task.iterations = count if ((leaf_task.warmup_time_period is not None) and (leaf_task.warmup_time_period > 0)): leaf_task.warmup_time_period = 0 if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug('Resetting warmup time period for [%s] to [%d] seconds.', str(leaf_task), leaf_task.warmup_time_period) if ((leaf_task.time_period is not None) and (leaf_task.time_period > 10)): leaf_task.time_period = 10 if self.logger.isEnabledFor(logging.DEBUG): self.logger.debug('Resetting measurement time period for [%s] to [%d] seconds.', str(leaf_task), leaf_task.time_period) if leaf_task.target_throughput: original_throughput = leaf_task.target_throughput leaf_task.params.pop('target-throughput', None) leaf_task.params.pop('target-interval', None) leaf_task.params['target-throughput'] = f'{sys.maxsize} {original_throughput.unit}' return track
def test_depth_mode_1_dir_down_nospace(tmpdir, merge_lis_prs): fpath = os.path.join(str(tmpdir), 'depth-dir-down.lis') content = ((headers + ['data/lis/records/curves/dfsr-depth-spacing-no.lis.part']) + trailers) merge_lis_prs(fpath, content) with lis.load(fpath) as (f,): dfs = f.data_format_specs()[0] with pytest.raises(ValueError) as exc: _ = lis.curves(f, dfs) assert ('No spacing recorded' in str(exc.value))
.parametrize('data', (decode_hex('0x60eff3'), decode_hex('0x60eff3'), decode_hex('0x60eff3'), decode_hex('0x60eff3'))) def test_state_revert_on_reserved_0xEF_byte_for_create_transaction_post_london(london_plus_miner, funded_address, funded_address_private_key, data): chain = london_plus_miner vm = chain.get_vm() initial_block_header = chain.get_block().header initial_balance = vm.state.get_balance(funded_address) assert (initial_balance > 1000000) create_successful_contract_transaction = new_dynamic_fee_transaction(vm=vm, from_=funded_address, to=Address(b''), amount=0, private_key=funded_address_private_key, gas=53354, max_priority_fee_per_gas=100, max_fee_per_gas=, nonce=0, data=decode_hex('0x60fef3')) (block_import, _, computations) = chain.mine_all([create_successful_contract_transaction], gas_limit=84081) successful_create_computation = computations[0] successful_create_computation_state = successful_create_computation.state mined_header = block_import.imported_block.header gas_used = mined_header.gas_used mined_txn = block_import.imported_block.transactions[0] new_balance = successful_create_computation_state.get_balance(funded_address) assert successful_create_computation.is_success assert (successful_create_computation_state.get_nonce(funded_address) == 1) assert (gas_used == 53354) fees_consumed = ((mined_txn.max_priority_fee_per_gas * gas_used) + (initial_block_header.base_fee_per_gas * gas_used)) assert (new_balance == (initial_balance - fees_consumed)) create_contract_txn_reserved_byte = new_dynamic_fee_transaction(vm=vm, from_=funded_address, to=Address(b''), amount=0, private_key=funded_address_private_key, gas=60000, max_priority_fee_per_gas=100, max_fee_per_gas=, nonce=1, data=data) (block_import, _, computations) = chain.mine_all([create_contract_txn_reserved_byte], gas_limit=84082) reverted_computation = computations[0] mined_header = block_import.imported_block.header assert reverted_computation.is_error assert ('0xef' in repr(reverted_computation.error).lower()) assert (mined_header.gas_used == 60000)
class DocClassificationModule(pl.LightningModule): def __init__(self, transform: nn.Module, model: nn.Module, optim: Optimizer, num_classes: int) -> None: super().__init__() self.transform = transform self.model = model self.optim = optim self.loss = CrossEntropyLoss() self.accuracy = metrics.Accuracy() self.fbeta = metrics.FBetaScore(num_classes=num_classes, average='macro') _entry def from_config(transform: DictConfig, model: DictConfig, optim: DictConfig, num_classes: int) -> 'DocClassificationModule': transform = hydra.utils.instantiate(transform) model = hydra.utils.instantiate(model) optim = hydra.utils.instantiate(optim, model.parameters()) return DocClassificationModule(transform, model, optim, num_classes) def setup(self, stage: Optional[str]) -> None: pass def forward(self, batch: Dict[(str, Any)]) -> torch.Tensor: token_ids = self.transform(batch)['token_ids'] assert torch.jit.isinstance(token_ids, torch.Tensor) return self.model(token_ids) def configure_optimizers(self) -> Optimizer: return self.optim def training_step(self, batch: Mapping[(str, torch.Tensor)], batch_idx: int, *args: Any, **kwargs: Any) -> torch.Tensor: logits = self.model(batch['token_ids']) loss = self.loss(logits, batch['label_ids']) self.log('train_loss', loss) return loss def validation_step(self, batch: Mapping[(str, torch.Tensor)], batch_idx: int, *args: Any, **kwargs: Any) -> None: logits = self.model(batch['token_ids']) loss = self.loss(logits, batch['label_ids']) scores = F.softmax(logits) self.accuracy(scores, batch['label_ids']) self.fbeta(scores, batch['label_ids']) self.log('val_loss', loss) self.log('val_acc', self.accuracy) self.log('val_f1', self.fbeta) def test_step(self, batch: Mapping[(str, torch.Tensor)], batch_idx: int, *args: Any, **kwargs: Any) -> None: logits = self.model(batch['token_ids']) loss = self.loss(logits, batch['label_ids']) scores = F.softmax(logits) self.accuracy(scores, batch['label_ids']) self.fbeta(scores, batch['label_ids']) self.log('test_loss', loss) self.log('test_acc', self.accuracy) self.log('test_f1', self.fbeta)
def list_common(queryset: InvitationQuerySet, iterator: t.Optional[str], limit: int) -> InvitationListResponse: queryset = queryset.order_by('id') if (iterator is not None): iterator_obj = get_object_or_404(queryset, uid=iterator) queryset = queryset.filter(id__gt=iterator_obj.id) result = list(queryset[:(limit + 1)]) if (len(result) < (limit + 1)): done = True else: done = False result = result[:(- 1)] ret_data = result iterator = (ret_data[(- 1)].uid if (len(result) > 0) else None) return InvitationListResponse(data=ret_data, iterator=iterator, done=done)
class CssSelectOptionSelected(CssStyle.Style): classname = 'dropdown-menu > .active > a, .dropdown-menu > .active > a:hover, .dropdown-menu > .active > a:focus' def customize(self): self.css({'background-color': self.page.theme.notch((- 1)), 'color': self.page.theme.greys[0]}, important=True)
def get_apk_id_androguard(apkfile): if (not os.path.exists(apkfile)): raise FDroidException(_("Reading packageName/versionCode/versionName failed, APK invalid: '{apkfilename}'").format(apkfilename=apkfile)) from androguard.core.bytecodes.axml import AXMLParser, format_value, START_TAG, END_TAG, TEXT, END_DOCUMENT appid = None versionCode = None versionName = None with zipfile.ZipFile(apkfile) as apk: with apk.open('AndroidManifest.xml') as manifest: axml = AXMLParser(manifest.read()) count = 0 while axml.is_valid(): _type = next(axml) count += 1 if (_type == START_TAG): for i in range(0, axml.getAttributeCount()): name = axml.getAttributeName(i) _type = axml.getAttributeValueType(i) _data = axml.getAttributeValueData(i) value = format_value(_type, _data, (lambda _: axml.getAttributeValue(i))) if ((appid is None) and (name == 'package')): appid = value elif ((versionCode is None) and (name == 'versionCode')): if value.startswith('0x'): versionCode = int(value, 16) else: versionCode = int(value) elif ((versionName is None) and (name == 'versionName')): versionName = value if (axml.getName() == 'manifest'): break elif (_type in (END_TAG, TEXT, END_DOCUMENT)): raise RuntimeError('{path}: <manifest> must be the first element in AndroidManifest.xml'.format(path=apkfile)) if ((not versionName) or (versionName[0] == '')): a = _get_androguard_APK(apkfile) versionName = ensure_final_value(a.package, a.get_android_resources(), a.get_androidversion_name()) if (not versionName): versionName = '' return (appid, versionCode, versionName.strip('\x00'))
def test_reject_incorrect_temperature(): with pytest.raises(ValueError, match='softmax temperature.*zero'): Softmax_v2(normalize_outputs=False, temperature=0.0) model = Softmax_v2(normalize_outputs=False) model.attrs['softmax_temperature'] = 0.0 model.initialize(inputs, outputs) with pytest.raises(ValueError, match='softmax temperature.*zero'): model(inputs, is_train=False)
def test_chain(parser): function = MockFunction([MockBlock(0, [MockEdge(0, 1, BranchType.UnconditionalBranch)]), MockBlock(1, [MockEdge(1, 2, BranchType.UnconditionalBranch)]), MockBlock(2, [])]) cfg = parser.parse(function) assert ([v.name for v in cfg.nodes] == [0, 1, 2]) assert ([(edge.source.name, edge.sink.name) for edge in cfg.edges] == [(0, 1), (1, 2)]) assert (len(list(cfg.instructions)) == 0) assert (set([edge.condition_type for edge in cfg.edges]) == {BasicBlockEdgeCondition.unconditional})
def cleanup_slice(key: Union[(int, slice)], upper_bound) -> Union[(slice, int)]: if isinstance(key, int): return key if isinstance(key, np.ndarray): return cleanup_array_slice(key, upper_bound) if isinstance(key, tuple): if isinstance(key[0], slice): key = key[0] else: raise TypeError('Tuple slicing must have slice object in first position') if (not isinstance(key, slice)): raise TypeError('Must pass slice or int object') start = key.start stop = key.stop step = key.step for attr in [start, stop, step]: if (attr is None): continue if (attr < 0): raise IndexError('Negative indexing not supported.') if (start is None): start = 0 if (stop is None): stop = upper_bound elif (stop > upper_bound): raise IndexError(f'Index: `{stop}` out of bounds for feature array of size: `{upper_bound}`') step = key.step if (step is None): step = 1 return slice(start, stop, step)
def test_drift_allan_with_drift(): measurements = np.arange(1, 11) rng = default_rng(0) noise = rng.normal(0, 0.5, (100, 10)) drift = (np.arange(1, 101) / 100).reshape((1, (- 1))) measurements = ((measurements + noise) + drift.T) (t2, adevs) = calc_drift(measurements, sampling_rate=1, method='Allan') correct_t2 = np.arange(1, 50) correct_adevs_0 = np.array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.1549156, 0., 0., 0.1684516, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.249568, 0.2585531, 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]) np.testing.assert_array_equal(t2, correct_t2) np.testing.assert_array_almost_equal(adevs[0], correct_adevs_0)
def handle_dynamodb(operation_name, service, instance, args, kwargs, context): span_type = 'db' span_subtype = 'dynamodb' span_action = 'query' if ((len(args) > 1) and ('TableName' in args[1])): table = args[1]['TableName'] else: table = '' signature = f'DynamoDB {operation_name} {table}'.rstrip() context['db'] = {'type': 'dynamodb', 'instance': instance.meta.region_name} if ((operation_name == 'Query') and (len(args) > 1) and ('KeyConditionExpression' in args[1])): context['db']['statement'] = args[1]['KeyConditionExpression'] context['destination']['service'] = {'name': span_subtype, 'resource': table, 'type': span_type} return HandlerInfo(signature, span_type, span_subtype, span_action, context)
class VelokStation(BikeShareStation): def __init__(self, data): props = data['properties'] name = props['name'] (longitude, latitude) = map(float, data['geometry']['coordinates']) bikes = (props['available_bikes'] + props['available_ebikes']) free = props['available_docks'] extra = {'uid': props['id'].split(':')[(- 1)], 'address': props['address'], 'photo': props['photo'], 'slots': props['docks'], 'online': props['open'], 'ebikes': props['available_ebikes']} super(VelokStation, self).__init__(name=name, latitude=latitude, longitude=longitude, bikes=bikes, free=free, extra=extra)
def file_upload(data): file_name = 'train.csv' u = url(('items/file_upload/' + file_name)) files = {file_name: open(('tests/smoke/' + file_name)).read()} r = data['session'].post(u, files=files) assert_ret_code(r, 200) print(('File uploaded:' + r.content.decode())) data['file_id'] = r.json()['id'] return data
def test_instructions_only_with_tags(): cfg = ControlFlowGraph() var_eax = Variable('eax', Integer.int32_t()) var_ecx = Variable('ecx', Integer.int32_t()) const_1 = Constant(1, Integer.int32_t()) const_10 = Constant(10, Integer.int32_t()) const_0 = Constant(0, Integer.int32_t()) tags = [Tag('compiler_idiom: division', 'ecx,10')] cfg.add_node((node := BasicBlock(0, instructions=[Assignment(var_eax, var_ecx, tags=tags), Assignment(var_eax, BinaryOperation(OperationType.plus, [var_eax, const_1]), tags=tags), Assignment(var_eax, BinaryOperation(OperationType.plus, [var_eax, const_0]), tags=tags)]))) task = MockTask(cfg) CompilerIdiomHandling().run(task) assert (node.instructions == [Assignment(var_eax, var_ecx, tags=tags), Assignment(var_eax, BinaryOperation(OperationType.plus, [var_eax, const_1]), tags=tags), Assignment(var_eax, BinaryOperation(OperationType.divide, [var_ecx, const_10]))])
(scope='function') def twilio_conversations_secrets(saas_config): return {'domain': (pydash.get(saas_config, 'twilio_conversations.domain') or secrets['domain']), 'account_id': (pydash.get(saas_config, 'twilio_conversations.account_id') or secrets['account_id']), 'password': (pydash.get(saas_config, 'twilio_conversations.password') or secrets['password']), 'twilio_user_id': {'dataset': 'twilio_postgres', 'field': 'twilio_users.twilio_user_id', 'direction': 'from'}}
def process_args(): parser = argparse.ArgumentParser(prog='Tools Validator', description='This script will convert the tool and resources table to a yaml file while injecting bio.tools and FAIRsharing IDs where needed.') parser.add_argument('--username', help='Specify the FAIRsharing username') parser.add_argument('--password', help='Specify the FAIRsharing password') parser.add_argument('--reg', default=False, action='store_true', help='Enable TeSS, bio.tools and FAIRsharing lookup') args = parser.parse_args() return args
class TicketField(BaseObject): def __init__(self, api=None, active=None, collapsed_for_agents=None, created_at=None, description=None, editable_in_portal=None, id=None, position=None, raw_description=None, raw_title=None, raw_title_in_portal=None, regexp_for_validation=None, required=None, required_in_portal=None, tag=None, title=None, title_in_portal=None, type=None, updated_at=None, url=None, visible_in_portal=None, **kwargs): self.api = api self.active = active self.collapsed_for_agents = collapsed_for_agents self.created_at = created_at self.description = description self.editable_in_portal = editable_in_portal self.id = id self.position = position self.raw_description = raw_description self.raw_title = raw_title self.raw_title_in_portal = raw_title_in_portal self.regexp_for_validation = regexp_for_validation self.required = required self.required_in_portal = required_in_portal self.tag = tag self.title = title self.title_in_portal = title_in_portal self.type = type self.updated_at = updated_at self.url = url self.visible_in_portal = visible_in_portal for (key, value) in kwargs.items(): setattr(self, key, value) for key in self.to_dict(): if (getattr(self, key) is None): try: self._dirty_attributes.remove(key) except KeyError: continue def created(self): if self.created_at: return dateutil.parser.parse(self.created_at) def created(self, created): if created: self.created_at = created def updated(self): if self.updated_at: return dateutil.parser.parse(self.updated_at) def updated(self, updated): if updated: self.updated_at = updated
class GCRARCNode(): def __init__(self, offset, file_bytes, string_table_bytes): self.resource_type = file_bytes[(offset + 0):(offset + 4)].decode('utf-8') self.name_offset = struct.unpack_from('>I', file_bytes, (offset + 4))[0] self.name_hash = struct.unpack_from('>H', file_bytes, (offset + 8))[0] self.file_entry_count = struct.unpack_from('>H', file_bytes, (offset + 10))[0] self.first_file_entry_index = struct.unpack_from('>I', file_bytes, (offset + 12))[0] self.name = read_string_from_bytes(self.name_offset, string_table_bytes) self.entries = [] self.parent: Optional[GCRARCNode] = None self.children = [] def get_entries(self, file_entry_offset, file_bytes, string_table_bytes): for i in range(self.file_entry_count): entry_offset = (file_entry_offset + ((self.first_file_entry_index + i) * 20)) new_entry = GCRARCFileEntry(entry_offset, file_bytes, string_table_bytes) new_entry.parent_node = self self.entries.append(new_entry) def emit_to_filesystem_recursive(self, root_path: Path, file_data_offset, file_bytes): dir_path = (root_path / self.get_full_directory_path()) dir_path.mkdir(parents=True, exist_ok=True) for n in self.children: n.emit_to_filesystem_recursive(root_path, file_data_offset, file_bytes) for e in self.entries: e.emit_to_filesystem(root_path, file_data_offset, file_bytes) def emit_config(self, level): lines = [] lines.append(((' ' * level) + f'''- res_type: "{self.resource_type}" ''')) lines.append(((' ' * level) + f''' name: "{self.name}" ''')) if (len(self.entries) != 0): lines.append(((' ' * level) + ' entries:\n')) for e in self.entries: entry_config = e.emit_config((level + 1)) if (entry_config != None): lines.extend(entry_config) if (len(self.children) != 0): lines.append(((' ' * level) + ' subdirs:\n')) for n in self.children: node_config = n.emit_config((level + 1)) if (node_config != None): lines.extend(node_config) return lines def print_recursive(self, level): print(((' ' * level) + self.name)) for n in self.children: n.print_recursive((level + 1)) def get_full_directory_path(self): path_components: List[str] = [] node: Optional[GCRARCNode] = self while (node is not None): path_components.insert(0, node.name) node = node.parent return Path(*path_components)
class OptionSeriesScatter(Options): def accessibility(self) -> 'OptionSeriesScatterAccessibility': return self._config_sub_data('accessibility', OptionSeriesScatterAccessibility) def allowPointSelect(self): return self._config_get(False) def allowPointSelect(self, flag: bool): self._config(flag, js_type=False) def animation(self): return self._config_get(True) def animation(self, flag: bool): self._config(flag, js_type=False) def animationLimit(self): return self._config_get(None) def animationLimit(self, num: float): self._config(num, js_type=False) def boostBlending(self): return self._config_get('undefined') def boostBlending(self, value: Any): self._config(value, js_type=False) def boostThreshold(self): return self._config_get(5000) def boostThreshold(self, num: float): self._config(num, js_type=False) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def clip(self): return self._config_get(True) def clip(self, flag: bool): self._config(flag, js_type=False) def cluster(self) -> 'OptionSeriesScatterCluster': return self._config_sub_data('cluster', OptionSeriesScatterCluster) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def colorAxis(self): return self._config_get(0) def colorAxis(self, num: float): self._config(num, js_type=False) def colorIndex(self): return self._config_get(None) def colorIndex(self, num: float): self._config(num, js_type=False) def colorKey(self): return self._config_get('y') def colorKey(self, text: str): self._config(text, js_type=False) def connectEnds(self): return self._config_get(None) def connectEnds(self, flag: bool): self._config(flag, js_type=False) def connectNulls(self): return self._config_get(False) def connectNulls(self, flag: bool): self._config(flag, js_type=False) def crisp(self): return self._config_get(True) def crisp(self, flag: bool): self._config(flag, js_type=False) def cursor(self): return self._config_get(None) def cursor(self, text: str): self._config(text, js_type=False) def custom(self): return self._config_get(None) def custom(self, value: Any): self._config(value, js_type=False) def dashStyle(self): return self._config_get('Solid') def dashStyle(self, text: str): self._config(text, js_type=False) def data(self) -> 'OptionSeriesScatterData': return self._config_sub_data('data', OptionSeriesScatterData) def dataLabels(self) -> 'OptionSeriesScatterDatalabels': return self._config_sub_data('dataLabels', OptionSeriesScatterDatalabels) def dataSorting(self) -> 'OptionSeriesScatterDatasorting': return self._config_sub_data('dataSorting', OptionSeriesScatterDatasorting) def description(self): return self._config_get(None) def description(self, text: str): self._config(text, js_type=False) def dragDrop(self) -> 'OptionSeriesScatterDragdrop': return self._config_sub_data('dragDrop', OptionSeriesScatterDragdrop) def enableMouseTracking(self): return self._config_get(True) def enableMouseTracking(self, flag: bool): self._config(flag, js_type=False) def events(self) -> 'OptionSeriesScatterEvents': return self._config_sub_data('events', OptionSeriesScatterEvents) def findNearestPointBy(self): return self._config_get('xy') def findNearestPointBy(self, text: str): self._config(text, js_type=False) def getExtremesFromAll(self): return self._config_get(False) def getExtremesFromAll(self, flag: bool): self._config(flag, js_type=False) def id(self): return self._config_get(None) def id(self, text: str): self._config(text, js_type=False) def inactiveOtherPoints(self): return self._config_get(False) def inactiveOtherPoints(self, flag: bool): self._config(flag, js_type=False) def includeInDataExport(self): return self._config_get(None) def includeInDataExport(self, flag: bool): self._config(flag, js_type=False) def index(self): return self._config_get(None) def index(self, num: float): self._config(num, js_type=False) def jitter(self) -> 'OptionSeriesScatterJitter': return self._config_sub_data('jitter', OptionSeriesScatterJitter) def keys(self): return self._config_get(None) def keys(self, value: Any): self._config(value, js_type=False) def label(self) -> 'OptionSeriesScatterLabel': return self._config_sub_data('label', OptionSeriesScatterLabel) def legendIndex(self): return self._config_get(None) def legendIndex(self, num: float): self._config(num, js_type=False) def legendSymbol(self): return self._config_get('rectangle') def legendSymbol(self, text: str): self._config(text, js_type=False) def linecap(self): return self._config_get(round) def linecap(self, value: Any): self._config(value, js_type=False) def lineWidth(self): return self._config_get(0) def lineWidth(self, num: float): self._config(num, js_type=False) def linkedTo(self): return self._config_get(None) def linkedTo(self, text: str): self._config(text, js_type=False) def marker(self) -> 'OptionSeriesScatterMarker': return self._config_sub_data('marker', OptionSeriesScatterMarker) def name(self): return self._config_get(None) def name(self, text: str): self._config(text, js_type=False) def negativeColor(self): return self._config_get(None) def negativeColor(self, text: str): self._config(text, js_type=False) def onPoint(self) -> 'OptionSeriesScatterOnpoint': return self._config_sub_data('onPoint', OptionSeriesScatterOnpoint) def opacity(self): return self._config_get(1) def opacity(self, num: float): self._config(num, js_type=False) def point(self) -> 'OptionSeriesScatterPoint': return self._config_sub_data('point', OptionSeriesScatterPoint) def pointDescriptionFormat(self): return self._config_get(None) def pointDescriptionFormat(self, value: Any): self._config(value, js_type=False) def pointDescriptionFormatter(self): return self._config_get(None) def pointDescriptionFormatter(self, value: Any): self._config(value, js_type=False) def pointInterval(self): return self._config_get(1) def pointInterval(self, num: float): self._config(num, js_type=False) def pointIntervalUnit(self): return self._config_get(None) def pointIntervalUnit(self, value: Any): self._config(value, js_type=False) def pointPlacement(self): return self._config_get(None) def pointPlacement(self, text: str): self._config(text, js_type=False) def pointStart(self): return self._config_get(0) def pointStart(self, num: float): self._config(num, js_type=False) def relativeXValue(self): return self._config_get(False) def relativeXValue(self, flag: bool): self._config(flag, js_type=False) def selected(self): return self._config_get(False) def selected(self, flag: bool): self._config(flag, js_type=False) def shadow(self): return self._config_get(False) def shadow(self, flag: bool): self._config(flag, js_type=False) def showCheckbox(self): return self._config_get(False) def showCheckbox(self, flag: bool): self._config(flag, js_type=False) def showInLegend(self): return self._config_get(None) def showInLegend(self, flag: bool): self._config(flag, js_type=False) def skipKeyboardNavigation(self): return self._config_get(None) def skipKeyboardNavigation(self, flag: bool): self._config(flag, js_type=False) def softThreshold(self): return self._config_get(True) def softThreshold(self, flag: bool): self._config(flag, js_type=False) def sonification(self) -> 'OptionSeriesScatterSonification': return self._config_sub_data('sonification', OptionSeriesScatterSonification) def stack(self): return self._config_get(None) def stack(self, num: float): self._config(num, js_type=False) def stacking(self): return self._config_get(None) def stacking(self, text: str): self._config(text, js_type=False) def states(self) -> 'OptionSeriesScatterStates': return self._config_sub_data('states', OptionSeriesScatterStates) def step(self): return self._config_get(None) def step(self, value: Any): self._config(value, js_type=False) def stickyTracking(self): return self._config_get(False) def stickyTracking(self, flag: bool): self._config(flag, js_type=False) def threshold(self): return self._config_get(0) def threshold(self, num: float): self._config(num, js_type=False) def tooltip(self) -> 'OptionSeriesScatterTooltip': return self._config_sub_data('tooltip', OptionSeriesScatterTooltip) def turboThreshold(self): return self._config_get(1000) def turboThreshold(self, num: float): self._config(num, js_type=False) def type(self): return self._config_get(None) def type(self, text: str): self._config(text, js_type=False) def visible(self): return self._config_get(True) def visible(self, flag: bool): self._config(flag, js_type=False) def xAxis(self): return self._config_get(0) def xAxis(self, num: float): self._config(num, js_type=False) def yAxis(self): return self._config_get(0) def yAxis(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(None) def zIndex(self, num: float): self._config(num, js_type=False) def zoneAxis(self): return self._config_get('y') def zoneAxis(self, text: str): self._config(text, js_type=False) def zones(self) -> 'OptionSeriesScatterZones': return self._config_sub_data('zones', OptionSeriesScatterZones)
_ExtendedCommunity.register_type(_ExtendedCommunity.TWO_OCTET_AS_SPECIFIC) class BGPTwoOctetAsSpecificExtendedCommunity(_ExtendedCommunity): _VALUE_PACK_STR = '!BHI' _VALUE_FIELDS = ['subtype', 'as_number', 'local_administrator'] def __init__(self, **kwargs): super(BGPTwoOctetAsSpecificExtendedCommunity, self).__init__() self.do_init(BGPTwoOctetAsSpecificExtendedCommunity, self, kwargs)