code
stringlengths
281
23.7M
class NotEmptyVulnArrayAnalyzer(Analyzer): def __init__(self): self.is_remote = False self.mongoDbDriver = Mock() self.dockerDriver = Mock() self.mongoDbDriver.get_vulnerabilities.return_value = ['CVE-2002-2001', 'CVE-2002-2002', 'BID-1', 'BID-2', 'EXPLOIT_DB_ID-3', 'EXPLOIT_DB_ID-4'] self.mongoDbDriver.is_fp.return_value = False
def run_with_teleport(code: str, teleport_info: TeleportInfo, locations: DataLocation, config: RuntimeConfig, local_packages: Optional[bytes]=None) -> str: fal_scripts_path = str(get_fal_scripts_path(config)) if (local_packages is not None): if fal_scripts_path.exists(): import shutil shutil.rmtree(fal_scripts_path) fal_scripts_path.parent.mkdir(parents=True, exist_ok=True) zip_file = zipfile.ZipFile(io.BytesIO(local_packages)) zip_file.extractall(fal_scripts_path) with extra_path(fal_scripts_path): main = retrieve_symbol(code, 'main') return main(read_df=_prepare_for_teleport(_teleport_df_from_external_storage, teleport_info, locations), write_df=_prepare_for_teleport(_teleport_df_to_external_storage, teleport_info, locations))
def pesq(reference, estimation, sample_rate, mode=None): try: import pesq except ImportError: raise AssertionError('To use this pesq implementation, install pesq from\n istall it with `pip install pesq`') (estimation, reference) = np.broadcast_arrays(estimation, reference) if (mode is None): mode = {8000: 'nb', 16000: 'wb'}[sample_rate] elif (sample_rate == 16000): assert (mode in ['nb', 'wb']), (mode, sample_rate) elif (sample_rate == 8000): assert (mode == 'nb'), (mode, sample_rate) else: raise ValueError(sample_rate) assert (reference.shape == estimation.shape), (reference.shape, estimation.shape) if (reference.ndim >= 2): for i in range((reference.ndim - 1)): assert (reference.shape[i] < 30), (i, reference.shape, estimation.shape) return np.array([pesq.pesq(ref=reference[i], deg=estimation[i], fs=sample_rate, mode=mode) for i in np.ndindex(*reference.shape[:(- 1)])]).reshape(reference.shape[:(- 1)]) elif (reference.ndim == 1): return pesq.pesq(ref=reference, deg=estimation, fs=sample_rate, mode=mode) else: raise NotImplementedError(reference.ndim)
('xtb') def test_get_optimal_bias(): geom = geom_loader('lib:uracil_dimer.xyz', coord_type='redund') def calc_getter(): return XTB(gfn='ff') opt_key = 'lbfgs' opt_kwargs = {'mu_reg': 0.1, 'dump': True, 'max_cycles': 750} (_, k_opt, valid_k) = get_optimal_bias(geom, calc_getter, opt_key, opt_kwargs, k_max=0.1, rmsd_target=0.05) assert (k_opt == pytest.approx((- 0.0375))) assert valid_k
class StarData(ModelNormal): allowed_values = {} validations = {} _property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type) _nullable = False _property def openapi_types(): lazy_import() return {'type': (TypeStar,), 'relationships': (RelationshipsForStar,)} _property def discriminator(): return None attribute_map = {'type': 'type', 'relationships': 'relationships'} read_only_vars = {} _composed_schemas = {} _js_args_to_python_args def _from_openapi_data(cls, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) return self required_properties = set(['_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes']) _js_args_to_python_args def __init__(self, *args, **kwargs): _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError(('Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments.' % (args, self.__class__.__name__)), path_to_item=_path_to_item, valid_classes=(self.__class__,)) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = (_visited_composed_classes + (self.__class__,)) for (var_name, var_value) in kwargs.items(): if ((var_name not in self.attribute_map) and (self._configuration is not None) and self._configuration.discard_unknown_keys and (self.additional_properties_type is None)): continue setattr(self, var_name, var_value) if (var_name in self.read_only_vars): raise ApiAttributeError(f'`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate class with read only attributes.')
class InjectContentTransform(SphinxTransform): default_priority = 500 def apply(self): for node in self.document.findall(ItemsWithRubricMarkerNode): self.replace_node(node) def replace_node(self, node): items = get_storage(self.env)[node['rubric']] if (not items): warn(f"no items with rubric {node['rubric']}", node) node.parent.remove(node) return items.sort(key=(lambda item: (item.section_title, item.section_id))) bullet_list = nodes.bullet_list() for item in items: if is_document_informational(self.env, item.document): continue if is_section_informational(self.env, item.document, f'#{item.section_id}'): continue paragraph = nodes.paragraph() paragraph += sphinx.util.nodes.make_refnode(self.app.builder, self.env.docname, item.document, item.section_id, nodes.Text(item.section_title)) paragraph += nodes.Text(' (in ') paragraph += sphinx.util.nodes.make_refnode(self.app.builder, self.env.docname, item.document, '', nodes.Text(self.env.titles[item.document].astext())) paragraph += nodes.Text(')') list_item = nodes.list_item() list_item += paragraph bullet_list += list_item node.replace_self(bullet_list)
('ecs_deploy.cli.get_client') def test_deploy_with_errors(get_client, runner): get_client.return_value = EcsTestClient('acces_key', 'secret_key', deployment_errors=True) result = runner.invoke(cli.deploy, (CLUSTER_NAME, SERVICE_NAME)) assert (result.exit_code == 1) assert (u'Deployment failed' in result.output) assert (u'ERROR: Service was unable to Lorem Ipsum' in result.output)
def create_extended_message_config_set_mask(first_ssid, last_ssid, *masks): diag_log_config_mask_header = struct.pack('<BBHHH', DIAG_EXT_MSG_CONFIG_F, 4, first_ssid, last_ssid, 0) ext_msg_config_levels = ([0] * ((last_ssid - first_ssid) + 1)) ext_msg_config_mask_payload = b'' for mask in masks: subsys_id = mask[0] if ((subsys_id < first_ssid) or (subsys_id > last_ssid)): continue rel_subsys_id = (subsys_id - first_ssid) log_level = mask[1] ext_msg_config_levels[rel_subsys_id] = log_level for x in ext_msg_config_levels: ext_msg_config_mask_payload += struct.pack('<L', x) return (diag_log_config_mask_header + ext_msg_config_mask_payload)
def parse_maintainers(repo=None): maint_file = '../MAINTAINERS.md' with open(maint_file) as f: m = f.read().splitlines() (para, repos) = parse(m) paragraph = '\n'.join(para).strip() if repo: repos = [r for r in repos if (r[0] == repo)] return (repos, paragraph)
() ('--duration', default=1, help='Run time in seconds.') ('--runtime_mode', default='async', help='Runtime mode: async or threaded.') ('--runner_mode', default='async', help='Runtime mode: async or threaded.') ('--start_messages', default=100, help='Amount of messages to prepopulate.') ('--num_of_agents', default=2, help='Amount of agents to run.') _of_runs_deco _format_deco def main(duration: int, runtime_mode: str, runner_mode: str, start_messages: int, num_of_agents: int, number_of_runs: int, output_format: str) -> Any: parameters = {'Duration(seconds)': duration, 'Runtime mode': runtime_mode, 'Runner mode': runner_mode, 'Start messages': start_messages, 'Number of agents': num_of_agents, 'Number of runs': number_of_runs} def result_fn() -> List[Tuple[(str, Any, Any, Any)]]: return multi_run(int(number_of_runs), run, (duration, runtime_mode, runner_mode, start_messages, num_of_agents)) return print_results(output_format, parameters, result_fn)
def ovlp3d_32(ax, da, A, bx, db, B): result = numpy.zeros((10, 6), dtype=float) x0 = ((ax + bx) ** (- 1.0)) x1 = (x0 * ((ax * A[0]) + (bx * B[0]))) x2 = (- x1) x3 = (x2 + B[0]) x4 = (x3 ** 2) x5 = (3.0 * x0) x6 = (x2 + A[0]) x7 = (x3 * x6) x8 = (x5 + (4.0 * x7)) x9 = (x0 * ((((- 2.0) * x1) + A[0]) + B[0])) x10 = (x0 + (2.0 * x7)) x11 = (x10 * x3) x12 = (x11 + x9) x13 = (2.0 * x6) x14 = ((x0 * ((2.0 * x4) + x8)) + (x12 * x13)) x15 = (x10 * x6) x16 = (2.0 * x0) x17 = 2. x18 = ((ax * bx) * x0) x19 = (((5. * da) * db) * numpy.exp(((- x18) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2))))) x20 = ((x0 ** 1.5) * x19) x21 = (0. * x20) x22 = (x17 * x21) x23 = (x0 * ((ax * A[1]) + (bx * B[1]))) x24 = (- x23) x25 = (x24 + B[1]) x26 = (x6 ** 2) x27 = (x15 + x9) x28 = 3. x29 = (x21 * x28) x30 = (x29 * ((x0 * ((2.0 * x26) + x8)) + (x13 * x27))) x31 = (x0 * ((ax * A[2]) + (bx * B[2]))) x32 = (- x31) x33 = (x32 + B[2]) x34 = (x25 ** 2) x35 = (0.5 * x0) x36 = (x34 + x35) x37 = (0. * x17) x38 = (x36 * x37) x39 = (1.5 * x0) x40 = (x26 + x39) x41 = ((x0 ** 1.5) * x19) x42 = (x41 * x6) x43 = (x40 * x42) x44 = (x20 * x33) x45 = (0. * x28) x46 = (x33 ** 2) x47 = (x35 + x46) x48 = (x37 * x47) x49 = (x24 + A[1]) x50 = (0. * x20) x51 = (x14 * x50) x52 = (x25 * x49) x53 = (x35 + x52) x54 = 1. x55 = (0. * x41) x56 = (x54 * x55) x57 = (x27 * x56) x58 = (x44 * x49) x59 = (0. * x54) x60 = (x27 * x59) x61 = (x0 * ((((- 2.0) * x23) + A[1]) + B[1])) x62 = (x0 + (2.0 * x52)) x63 = (x25 * x62) x64 = (x61 + x63) x65 = (x26 + x35) x66 = (x55 * x65) x67 = (0. * x54) x68 = (x33 * x67) x69 = (x41 * x65) x70 = (0. * x47) x71 = (x41 * x49) x72 = (x32 + A[2]) x73 = (x20 * x72) x74 = (x25 * x73) x75 = (x33 * x72) x76 = (x35 + x75) x77 = (0. * x36) x78 = (x41 * x72) x79 = (x25 * x67) x80 = (x0 * ((((- 2.0) * x31) + A[2]) + B[2])) x81 = (x0 + (2.0 * x75)) x82 = (x33 * x81) x83 = (x80 + x82) x84 = (x49 ** 2) x85 = (x35 + x84) x86 = (x12 * x55) x87 = (x49 * x62) x88 = (x61 + x87) x89 = (x35 + x7) x90 = (x56 * x89) x91 = (x41 * x89) x92 = (x5 + (4.0 * x52)) x93 = (2.0 * x49) x94 = ((x0 * ((2.0 * x34) + x92)) + (x64 * x93)) x95 = (x50 * x6) x96 = (x59 * x6) x97 = (x59 * x73) x98 = (x20 * x96) x99 = (x72 ** 2) x100 = (x35 + x99) x101 = (x72 * x81) x102 = (x101 + x80) x103 = (x5 + (4.0 * x75)) x104 = (2.0 * x72) x105 = ((x0 * (x103 + (2.0 * x46))) + (x104 * x83)) x106 = (x39 + x84) x107 = (x106 * x71) x108 = (x35 + x4) x109 = (x108 * x37) x110 = (x29 * ((x0 * ((2.0 * x84) + x92)) + (x88 * x93))) x111 = (x3 * x45) x112 = (0. * x108) x113 = ((x3 * x41) * x67) x114 = (x39 + x99) x115 = (x114 * x78) x116 = (x29 * ((x0 * (x103 + (2.0 * x99))) + (x102 * x104))) result[(0, 0)] = numpy.sum(((- x22) * ((x14 * x6) + (x16 * ((x11 + x15) + (2.0 * x9)))))) result[(0, 1)] = numpy.sum(((- x25) * x30)) result[(0, 2)] = numpy.sum(((- x30) * x33)) result[(0, 3)] = numpy.sum(((- x38) * x43)) result[(0, 4)] = numpy.sum((((((- x25) * x40) * x44) * x45) * x6)) result[(0, 5)] = numpy.sum(((- x43) * x48)) result[(1, 0)] = numpy.sum(((- x49) * x51)) result[(1, 1)] = numpy.sum(((- x53) * x57)) result[(1, 2)] = numpy.sum(((- x58) * x60)) result[(1, 3)] = numpy.sum(((- x64) * x66)) result[(1, 4)] = numpy.sum((((- x53) * x68) * x69)) result[(1, 5)] = numpy.sum((((- x65) * x70) * x71)) result[(2, 0)] = numpy.sum(((- x51) * x72)) result[(2, 1)] = numpy.sum(((- x60) * x74)) result[(2, 2)] = numpy.sum(((- x57) * x76)) result[(2, 3)] = numpy.sum((((- x65) * x77) * x78)) result[(2, 4)] = numpy.sum((((- x69) * x76) * x79)) result[(2, 5)] = numpy.sum(((- x66) * x83)) result[(3, 0)] = numpy.sum(((- x85) * x86)) result[(3, 1)] = numpy.sum(((- x88) * x90)) result[(3, 2)] = numpy.sum((((- x68) * x85) * x91)) result[(3, 3)] = numpy.sum(((- x94) * x95)) result[(3, 4)] = numpy.sum((((- x44) * x88) * x96)) result[(3, 5)] = numpy.sum((((- x42) * x70) * x85)) result[(4, 0)] = numpy.sum((((- x12) * x49) * x97)) result[(4, 1)] = numpy.sum((((- x53) * x72) * x91)) result[(4, 2)] = numpy.sum((((- x49) * x76) * x91)) result[(4, 3)] = numpy.sum((((- x64) * x73) * x96)) result[(4, 4)] = numpy.sum((((- x42) * x53) * x76)) result[(4, 5)] = numpy.sum((((- x49) * x83) * x98)) result[(5, 0)] = numpy.sum(((- x100) * x86)) result[(5, 1)] = numpy.sum((((- x100) * x79) * x91)) result[(5, 2)] = numpy.sum(((- x102) * x90)) result[(5, 3)] = numpy.sum((((- x100) * x42) * x77)) result[(5, 4)] = numpy.sum((((- x102) * x25) * x98)) result[(5, 5)] = numpy.sum(((- x105) * x95)) result[(6, 0)] = numpy.sum(((- x107) * x109)) result[(6, 1)] = numpy.sum(((- x110) * x3)) result[(6, 2)] = numpy.sum((((- x106) * x111) * x58)) result[(6, 3)] = numpy.sum(((- x22) * ((x16 * (((2.0 * x61) + x63) + x87)) + (x49 * x94)))) result[(6, 4)] = numpy.sum(((- x110) * x33)) result[(6, 5)] = numpy.sum(((- x107) * x48)) result[(7, 0)] = numpy.sum((((- x112) * x78) * x85)) result[(7, 1)] = numpy.sum((((- x3) * x88) * x97)) result[(7, 2)] = numpy.sum((((- x113) * x76) * x85)) result[(7, 3)] = numpy.sum((((- x50) * x72) * x94)) result[(7, 4)] = numpy.sum((((- x56) * x76) * x88)) result[(7, 5)] = numpy.sum((((- x55) * x83) * x85)) result[(8, 0)] = numpy.sum((((- x100) * x112) * x71)) result[(8, 1)] = numpy.sum((((- x100) * x113) * x53)) result[(8, 2)] = numpy.sum((((((- x102) * x20) * x3) * x49) * x59)) result[(8, 3)] = numpy.sum((((- x100) * x55) * x64)) result[(8, 4)] = numpy.sum((((- x102) * x53) * x56)) result[(8, 5)] = numpy.sum((((- x105) * x49) * x50)) result[(9, 0)] = numpy.sum(((- x109) * x115)) result[(9, 1)] = numpy.sum((((- x111) * x114) * x74)) result[(9, 2)] = numpy.sum(((- x116) * x3)) result[(9, 3)] = numpy.sum(((- x115) * x38)) result[(9, 4)] = numpy.sum(((- x116) * x25)) result[(9, 5)] = numpy.sum(((- x22) * ((x105 * x72) + (x16 * ((x101 + (2.0 * x80)) + x82))))) return result
def group_by_parent(iterable): parent = None children = [] for element in iterable: if ((parent is None) or (element.tree_depth == parent.tree_depth)): if parent: (yield (parent, children)) parent = None children = [] parent = element else: children.append(element) if parent: (yield (parent, children))
def extractNotransWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [("Knight's & Magic (LN)", "Knight's & Magic (LN)", 'translated'), ('Roku de Nashi Majutsu Koushi to Kinki Kyouten (LN)', 'Roku de Nashi Majutsu Koushi to Kinki Kyouten (LN)', 'translated')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
class ConfigYamlValidationError(ValueError, ConfigYamlBaseException): def __init__(self, class_name: str, cause: str, remediation: str) -> None: msg = f'{class_name} (specified in your config.yml) failed validation. Cause: {cause}. Suggested remediation: {remediation}' super().__init__(msg)
class ToolkitEditorFactory(BaseColorToolkitEditorFactory): def to_qt_color(self, editor): try: color = getattr(editor.object, (editor.name + '_')) except AttributeError: color = getattr(editor.object, editor.name) c = QtGui.QColor() c.setRgbF(color[0], color[1], color[2]) return c def from_qt_color(self, color): return (color.redF(), color.greenF(), color.blueF()) def str_color(self, color): if (type(color) in SequenceTypes): return ('(%d,%d,%d)' % (int((color[0] * 255.0)), int((color[1] * 255.0)), int((color[2] * 255.0)))) return color
def register_events_publisher(hass: HomeAssistant, homeconnect: HomeConnect): device_reg = dr.async_get(hass) last_event = {'key': None, 'value': None} async def async_handle_event(appliance: Appliance, key: str, value: str): if ((key != last_event['key']) or (value != last_event['value'])): last_event['key'] = key last_event['value'] = value device = device_reg.async_get_device({(DOMAIN, appliance.haId.lower().replace('-', '_'))}) event_data = {'device_id': device.id, 'key': key, 'value': value} hass.bus.async_fire(f'{DOMAIN}_event', event_data) _LOGGER.debug('Published event to Home Assistant event bus: %s = %s', key, str(value)) else: _LOGGER.debug('Skipped publishing of duplicate event to Home Assistant event bus: %s = %s', key, str(value)) def register_appliance(appliance: Appliance): for event in PUBLISHED_EVENTS: appliance.register_callback(async_handle_event, event) homeconnect.register_callback(register_appliance, [Events.PAIRED, Events.CONNECTED]) for appliance in homeconnect.appliances.values(): register_appliance(appliance)
def _fetch_reference_data(): global SUBTIER_AGENCY_LIST_CACHE with psycopg2.connect(dsn=get_database_dsn_string()) as connection: with connection.cursor(cursor_factory=psycopg2.extras.DictCursor) as cursor: sql = 'SELECT * FROM subtier_agency JOIN agency ON subtier_agency.subtier_agency_id = agency.subtier_agency_id' cursor.execute(sql) SUBTIER_AGENCY_LIST_CACHE = {result['subtier_code']: result for result in cursor.fetchall()}
class Solution(object): def largeGroupPositions(self, S): if (not S): return [] ret = [] (p, n) = (S[0], 1) for (i, c) in enumerate(S): if (i == 0): continue if (c == p): n += 1 else: if (n >= 3): ret.append([(i - n), (i - 1)]) n = 1 p = c if (n >= 3): ret.append([((i - n) + 1), i]) return ret
class ListField(models.TextField): def __init__(self, *args, **kwargs): super(ListField, self).__init__(*args, **kwargs) def to_python(self, value): if (not value): value = [] if isinstance(value, list): return value return ast.literal_eval(value) def get_prep_value(self, value): if (value is None): return value return str(value) def value_to_string(self, obj): value = self.value_from_object(obj) return self.get_db_prep_value(value)
class ArtNet(): def __init__(self, ip='192.168.1.255', port=6454): self.s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) self.ip = ip self.port = port def broadcastDMX(self, dmxdata, address): content = [] content.append(b'Art-Net\x00') content.append(struct.pack('<H', 20480)) content.append(struct.pack('>H', 14)) content.append(b'\x00') content.append(b'\x01') (net, subnet, universe) = address content.append(struct.pack('<H', (((net << 8) | (subnet << 4)) | universe))) content.append(struct.pack('>H', len(dmxdata))) for d in dmxdata: content.append(struct.pack('B', d)) content = b''.join(content) self.s.sendto(content, (self.ip, self.port)) def close(self): self.s.close()
def are_files_equal(file1, file2): equal = True with open(file1) as textfile1, open(file2) as textfile2: for (x, y) in zip(textfile1, textfile2): if x.startswith('File'): continue if (x != y): equal = False break return equal
def test_config_in_dir() -> None: with initialize(version_base=None, config_path='../some_namespace/namespace_test/dir'): config_loader = GlobalHydra.instance().config_loader() assert ('cifar10' in config_loader.get_group_options('dataset')) assert ('imagenet' in config_loader.get_group_options('dataset')) assert ('level1' in config_loader.list_groups('')) assert ('level2' in config_loader.list_groups('level1')) assert ('nested1' in config_loader.get_group_options('level1/level2')) assert ('nested2' in config_loader.get_group_options('level1/level2'))
class OneOfParameter(parser.ParameterWithValue): def __init__(self, values, **kwargs): super().__init__(**kwargs) self.values = values def coerce_value(self, arg, ba): if (arg == 'list'): raise _ShowList elif (arg in self.values): return arg else: raise errors.BadArgumentFormat(arg) def read_argument(self, ba, i): try: super(OneOfParameter, self).read_argument(ba, i) except _ShowList: ba.func = self.show_list ba.args[:] = [] ba.kwargs.clear() ba.sticky = parser.IgnoreAllArguments() ba.posarg_only = True def show_list(self): for val in self.values: print(val) def help_parens(self): for s in super(OneOfParameter, self).help_parens(): (yield s) (yield 'use "list" for options')
def test_recurse_check_structure_missingitem(): sample = dict(string='Foobar', list=['Foo', 'Bar'], dict={'foo': 'Bar'}, none=None, true=True, false=False) to_check = dict(string='Foobar', list=['Foo', 'Bar'], dict={'foo': 'Bar'}, none=None, true=True) with pytest.raises(ValidationException): recurse_check_structure(sample, to_check)
def insert_library_functions(): readint = LibraryFunctionCompiler('readint', Token.INT, list(), get_readint_code()) insert_function_object(readint) printint = LibraryFunctionCompiler('printint', Token.VOID, [Token.INT], get_printint_code()) insert_function_object(printint) readchar = LibraryFunctionCompiler('readchar', Token.INT, list(), get_readchar_code()) insert_function_object(readchar) printchar = LibraryFunctionCompiler('printchar', Token.VOID, [Token.INT], get_printchar_code()) insert_function_object(printchar)
def get_predictions(single_stream, class_mapping_dict, ip, port, model_name): image_byte_stream = b64_uri_to_bytes(single_stream['image']) encoded_image_io = io.BytesIO(image_byte_stream) image = Image.open(encoded_image_io) (width, height) = image.size filename = str(single_stream['meta']['file']) file_extension = filename.split('.')[1].lower() if (file_extension == 'png'): image_format = b'png' elif (file_extension in ('jpg', 'jpeg')): image_format = b'jpg' else: log("Only 'png', 'jpeg' or 'jpg' files are supported by ODAPI. Got {}. Thus treating it as `jpg` file. Might cause errors".format(file_extension)) image_format = b'jpg' filename = filename.encode('utf-8') tf_example = tf.train.Example(features=tf.train.Features(feature={'image/height': dataset_util.int64_feature(height), 'image/width': dataset_util.int64_feature(width), 'image/filename': dataset_util.bytes_feature(filename), 'image/source_id': dataset_util.bytes_feature(filename), 'image/encoded': dataset_util.bytes_feature(image_byte_stream), 'image/format': dataset_util.bytes_feature(image_format)})) (boxes, class_ids, scores) = tf_odapi_client(tf_example.SerializeToString(), ip, port, model_name, 'serving_default', input_name='serialized_example', timeout=300) class_names = np.array([class_mapping_dict[class_id] for class_id in class_ids]) return (class_ids, class_names, scores, boxes)
def test_join(): ctx = FlyteContextManager.current_context() fs = ctx.file_access.get_filesystem('file') f = ctx.file_access.join('a', 'b', 'c', unstrip=False) assert (f == fs.sep.join(['a', 'b', 'c'])) fs = ctx.file_access.get_filesystem('s3') f = ctx.file_access.join('s3://a', 'b', 'c', fs=fs) assert (f == fs.sep.join(['s3://a', 'b', 'c']))
def extractLightnoveltranslationsWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
() def system_with_arn2() -> Generator: system = System(fides_key='database-3', organization_fides_key='default_organization', name='database-3', fidesctl_meta=SystemMetadata(resource_id='arn:aws:rds:us-east-1::cluster:database-3'), system_type='rds_cluster', privacy_declarations=[]) (yield system)
def extractHikokitranslationsWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None titlemap = [('Isekai Mahou wa Okureteru! (LN)', 'Isekai Mahou wa Okureteru! (LN)', 'translated'), ('Isekai Mahou wa Okureteru!', 'Isekai Mahou wa Okureteru!', 'translated')] for (titlecomponent, name, tl_type) in titlemap: if (titlecomponent.lower() in item['title'].lower()): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def starmatch_to_regex(pattern: str) -> Pattern: options = re.DOTALL if pattern.startswith('(?-i)'): pattern = pattern[5:] else: options |= re.IGNORECASE (i, n) = (0, len(pattern)) res = [] while (i < n): c = pattern[i] i = (i + 1) if (c == '*'): res.append('.*') else: res.append(re.escape(c)) return re.compile(('(?:%s)\\Z' % ''.join(res)), options)
def _debug_commandline_args(parser: argparse.ArgumentParser) -> None: HIDE_DEBUG = True if any((('--debug_' in arg) for arg in sys.argv)): HIDE_DEBUG = False def hide_opt(help: str) -> str: if (not HIDE_DEBUG): return help else: return argparse.SUPPRESS group = parser.add_argument_group(hide_opt('DEBUG'), hide_opt('Options for debugging language server')) group.add_argument('--debug_filepath', type=str, help=hide_opt('File path for language server tests')) group.add_argument('--debug_rootpath', type=str, help=hide_opt('Root path for language server tests')) group.add_argument('--debug_parser', action='store_true', help=hide_opt('Test source code parser on specified file')) group.add_argument('--debug_hover', action='store_true', help=hide_opt('Test `textDocument/hover` request for specified file and position')) group.add_argument('--debug_rename', type=str, metavar='RENAME_STRING', help=hide_opt('Test `textDocument/rename` request for specified file and position')) group.add_argument('--debug_actions', action='store_true', help=hide_opt('Test `textDocument/codeAction` request for specified file and position')) group.add_argument('--debug_symbols', action='store_true', help=hide_opt('Test `textDocument/documentSymbol` request for specified file')) group.add_argument('--debug_completion', action='store_true', help=hide_opt('Test `textDocument/completion` request for specified file and position')) group.add_argument('--debug_signature', action='store_true', help=hide_opt('Test `textDocument/signatureHelp` request for specified file and position')) group.add_argument('--debug_definition', action='store_true', help=hide_opt('Test `textDocument/definition` request for specified file and position')) group.add_argument('--debug_references', action='store_true', help=hide_opt('Test `textDocument/references` request for specified file and position')) group.add_argument('--debug_diagnostics', action='store_true', help=hide_opt('Test diagnostic notifications for specified file')) group.add_argument('--debug_implementation', action='store_true', help=hide_opt('Test `textDocument/implementation` request for specified file and position')) group.add_argument('--debug_workspace_symbols', type=str, metavar='QUERY_STRING', help=hide_opt('Test `workspace/symbol` request')) group.add_argument('--debug_line', type=int, metavar='INTEGER', help=hide_opt('Line position for language server tests (1-indexed)')) group.add_argument('--debug_char', type=int, metavar='INTEGER', help=hide_opt('Character position for language server tests (1-indexed)')) group.add_argument('--debug_full_result', action='store_true', help=hide_opt('Print full result object instead of condensed version'))
def set_invoiced(item, method, ref_invoice=None): invoiced = False if (method == 'on_submit'): validate_invoiced_on_submit(item) invoiced = True if (item.reference_dt == 'Clinical Procedure'): service_item = frappe.db.get_single_value('Healthcare Settings', 'clinical_procedure_consumable_item') if (service_item == item.item_code): frappe.db.set_value(item.reference_dt, item.reference_dn, 'consumption_invoiced', invoiced) else: frappe.db.set_value(item.reference_dt, item.reference_dn, 'invoiced', invoiced) elif (item.reference_dt not in ['Service Request', 'Medication Request']): frappe.db.set_value(item.reference_dt, item.reference_dn, 'invoiced', invoiced) if (item.reference_dt == 'Patient Appointment'): if frappe.db.get_value('Patient Appointment', item.reference_dn, 'procedure_template'): dt_from_appointment = 'Clinical Procedure' else: dt_from_appointment = 'Patient Encounter' manage_doc_for_appointment(dt_from_appointment, item.reference_dn, invoiced) elif (item.reference_dt == 'Lab Prescription'): manage_prescriptions(invoiced, item.reference_dt, item.reference_dn, 'Lab Test', 'lab_test_created') elif (item.reference_dt == 'Procedure Prescription'): manage_prescriptions(invoiced, item.reference_dt, item.reference_dn, 'Clinical Procedure', 'procedure_created') elif (item.reference_dt in ['Service Request', 'Medication Request']): hso = frappe.get_doc(item.reference_dt, item.reference_dn) if invoiced: hso.update_invoice_details(item.qty) else: hso.update_invoice_details((item.qty * (- 1))) if (item.reference_dt == 'Service Request'): template_map = {'Clinical Procedure Template': 'Clinical Procedure', 'Therapy Type': 'Therapy Session', 'Lab Test Template': 'Lab Test'}
class QuickbooksException(Exception): def __init__(self, message, error_code=0, detail=''): super(QuickbooksException, self).__init__(message) self.error_code = error_code self.detail = detail self.message = message def __str__(self) -> str: return f'''QB Exception {self.error_code}: {self.message} {self.detail}''' def __iter__(self): (yield ('error_code', self.error_code)) (yield ('detail', self.detail)) (yield ('message', self.message))
class Link(AbstractCrudObject): def __init__(self, fbid=None, parent_id=None, api=None): self._isLink = True super(Link, self).__init__(fbid, parent_id, api) class Field(AbstractObject.Field): caption = 'caption' created_time = 'created_time' description = 'description' field_from = 'from' icon = 'icon' id = 'id' link = 'link' message = 'message' multi_share_optimized = 'multi_share_optimized' name = 'name' privacy = 'privacy' via = 'via' def api_get(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Link, api_type='NODE', response_parser=ObjectParser(reuse_object=self)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def create_comment(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') from facebook_business.adobjects.comment import Comment param_types = {'attachment_id': 'string', 'attachment_share_url': 'string', 'attachment_url': 'string', 'comment_privacy_value': 'comment_privacy_value_enum', 'facepile_mentioned_ids': 'list<string>', 'feedback_source': 'string', 'is_offline': 'bool', 'message': 'string', 'nectar_module': 'string', 'object_id': 'string', 'parent_comment_id': 'Object', 'text': 'string', 'tracking': 'string'} enums = {'comment_privacy_value_enum': Comment.CommentPrivacyValue.__dict__.values()} request = FacebookRequest(node_id=self['id'], method='POST', endpoint='/comments', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Comment, api_type='EDGE', response_parser=ObjectParser(target_class=Comment, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() def get_likes(self, fields=None, params=None, batch=None, success=None, failure=None, pending=False): from facebook_business.utils import api_utils if ((batch is None) and ((success is not None) or (failure is not None))): api_utils.warning('`success` and `failure` callback only work for batch call.') from facebook_business.adobjects.profile import Profile param_types = {} enums = {} request = FacebookRequest(node_id=self['id'], method='GET', endpoint='/likes', api=self._api, param_checker=TypeChecker(param_types, enums), target_class=Profile, api_type='EDGE', response_parser=ObjectParser(target_class=Profile, api=self._api)) request.add_params(params) request.add_fields(fields) if (batch is not None): request.add_to_batch(batch, success=success, failure=failure) return request elif pending: return request else: self.assure_call() return request.execute() _field_types = {'caption': 'string', 'created_time': 'datetime', 'description': 'string', 'from': 'Object', 'icon': 'string', 'id': 'string', 'link': 'string', 'message': 'string', 'multi_share_optimized': 'bool', 'name': 'string', 'privacy': 'Privacy', 'via': 'Object'} def _get_field_enum_info(cls): field_enum_info = {} return field_enum_info
def lazy_import(): from fastly.model.mutual_authentication_data import MutualAuthenticationData from fastly.model.mutual_authentication_response_attributes import MutualAuthenticationResponseAttributes from fastly.model.mutual_authentication_response_data_all_of import MutualAuthenticationResponseDataAllOf from fastly.model.relationships_for_mutual_authentication import RelationshipsForMutualAuthentication from fastly.model.type_mutual_authentication import TypeMutualAuthentication globals()['MutualAuthenticationData'] = MutualAuthenticationData globals()['MutualAuthenticationResponseAttributes'] = MutualAuthenticationResponseAttributes globals()['MutualAuthenticationResponseDataAllOf'] = MutualAuthenticationResponseDataAllOf globals()['RelationshipsForMutualAuthentication'] = RelationshipsForMutualAuthentication globals()['TypeMutualAuthentication'] = TypeMutualAuthentication
.integrationtest .skipif((not has_postgres_configured), reason='PostgresSQL not configured') def test_psycopg2_call_stored_procedure(instrument, postgres_connection, elasticapm_client): cursor = postgres_connection.cursor() cursor.execute('\n CREATE OR REPLACE FUNCTION squareme(me INT)\n RETURNS INTEGER\n LANGUAGE SQL\n AS $$\n SELECT me*me;\n $$;\n ') elasticapm_client.begin_transaction('test') cursor.callproc('squareme', [2]) result = cursor.fetchall() assert (result[0][0] == 4) elasticapm_client.end_transaction('test', 'OK') transactions = elasticapm_client.events[TRANSACTION] span = elasticapm_client.spans_for_transaction(transactions[0])[0] assert (span['name'] == 'squareme()') assert (span['action'] == 'exec')
class TestLoadClass(): .parametrize('provider', sorted(global_providers())) def test_load_class_with_all_provider(self, provider: Optional[str]): klass = load_class(provider) assert (klass is not None) def test_load_class_with_bad_provider(self): with pytest.raises(ValueError, match='No ProviderInterface class implemented for provider:'): load_class('NotAProvider') def test_load_class_with_none_provider(self): klass = load_class() len_klass = len(klass) nb_providers = len(list_providers()) assert (len_klass == nb_providers)
class BaseURL(_URLTuple): __slots__ = () def replace(self, **kwargs): return self._replace(**kwargs) def host(self): return self._split_host()[0] def ascii_host(self): rv = self.host if ((rv is not None) and isinstance(rv, str)): try: rv = _encode_idna(rv) except UnicodeError: rv = rv.encode('ascii', 'ignore') return to_unicode(rv, 'ascii', 'ignore') def port(self): try: rv = int(to_unicode(self._split_host()[1])) if (0 <= rv <= 65535): return rv except (ValueError, TypeError): pass def auth(self): return self._split_netloc()[0] def username(self): rv = self._split_auth()[0] if (rv is not None): return url_unquote(rv) def raw_username(self): return self._split_auth()[0] def password(self): rv = self._split_auth()[1] if (rv is not None): return url_unquote(rv) def raw_password(self): return self._split_auth()[1] def to_url(self): return url_unparse(self) def decode_netloc(self): rv = _decode_idna((self.host or '')) if (':' in rv): rv = ('[%s]' % rv) port = self.port if (port is not None): rv = ('%s:%d' % (rv, port)) auth = ':'.join(filter(None, [url_unquote((self.raw_username or ''), errors='strict', unsafe='/:%'), url_unquote((self.raw_password or ''), errors='strict', unsafe='/:%')])) if auth: rv = ('%%s' % (auth, rv)) return rv def to_uri_tuple(self): return url_parse(iri_to_uri(self).encode('ascii')) def to_iri_tuple(self): return url_parse(uri_to_iri(self)) def get_file_location(self, pathformat=None): if (self.scheme != 'file'): return (None, None) path = url_unquote(self.path) host = (self.netloc or None) if (pathformat is None): if (os.name == 'nt'): pathformat = 'windows' else: pathformat = 'posix' if (pathformat == 'windows'): if ((path[:1] == '/') and path[1:2].isalpha() and (path[2:3] in '|:')): path = ((path[1:2] + ':') + path[3:]) windows_share = (path[:3] in (('\\' * 3), ('/' * 3))) import ntpath path = ntpath.normpath(path) if (windows_share and (host is None)): parts = path.lstrip('\\').split('\\', 1) if (len(parts) == 2): (host, path) = parts else: host = parts[0] path = '' elif (pathformat == 'posix'): import posixpath path = posixpath.normpath(path) else: raise TypeError(('Invalid path format %s' % repr(pathformat))) if (host in ('127.0.0.1', '::1', 'localhost')): host = None return (host, path) def _split_netloc(self): if (self._at in self.netloc): return self.netloc.split(self._at, 1) return (None, self.netloc) def _split_auth(self): auth = self._split_netloc()[0] if (not auth): return (None, None) if (self._colon not in auth): return (auth, None) return auth.split(self._colon, 1) def _split_host(self): rv = self._split_netloc()[1] if (not rv): return (None, None) if (not rv.startswith(self._lbracket)): if (self._colon in rv): return rv.split(self._colon, 1) return (rv, None) idx = rv.find(self._rbracket) if (idx < 0): return (rv, None) host = rv[1:idx] rest = rv[(idx + 1):] if rest.startswith(self._colon): return (host, rest[1:]) return (host, None)
def _create_plot_component(): pd = ArrayPlotData(x=random(100), y=random(100)) plot = Plot(pd) scatterplot = plot.plot(('x', 'y'), color='blue', type='scatter')[0] plot.padding = 50 plot.tools.append(PanTool(plot, drag_button='right')) plot.overlays.append(ZoomTool(plot)) regression = RegressionLasso(scatterplot, selection_datasource=scatterplot.index) scatterplot.tools.append(regression) scatterplot.overlays.append(RegressionOverlay(scatterplot, lasso_selection=regression)) return plot
def editUpdate(tdb, cmdenv, stationID): cmdenv.DEBUG0("'update' mode with editor. editor:{} station:{}", cmdenv.editor, cmdenv.origin) (editor, editorArgs) = (cmdenv.editor, []) if (cmdenv.editing == 'sublime'): cmdenv.DEBUG0('Sublime mode') editor = (editor or getEditorPaths(cmdenv, 'sublime', 'SUBLIME_EDITOR', ['Sublime Text 3', 'Sublime Text 2'], 'sublime_text.exe', 'subl')) editorArgs += ['--wait'] elif (cmdenv.editing == 'npp'): cmdenv.DEBUG0('Notepad++ mode') editor = (editor or getEditorPaths(cmdenv, 'notepad++', 'NOTEPADPP_EDITOR', ['Notepad++'], 'notepad++.exe', 'notepad++')) if (not cmdenv.quiet): print("NOTE: You'll need to exit Notepad++ when you are done.") elif (cmdenv.editing == 'vim'): cmdenv.DEBUG0('VI iMproved mode') if (not editor): vimDirs = ['Git\\share\\vim\\vim{}'.format(vimVer) for vimVer in range(70, 75)] editor = getEditorPaths(cmdenv, 'vim', 'EDITOR', vimDirs, 'vim.exe', 'vim') elif (cmdenv.editing == 'notepad'): cmdenv.DEBUG0('Notepad mode') editor = (editor or 'notepad.exe') try: envArgs = os.environ['EDITOR_ARGS'] if envArgs: editorArgs += envArgs.split(' ') except KeyError: pass tmpPath = getTemporaryPath(cmdenv) absoluteFilename = None dbFilename = tdb.dbFilename try: elementMask = (prices.Element.basic | prices.Element.supply) if cmdenv.timestamps: elementMask |= prices.Element.timestamp if cmdenv.all: elementMask |= prices.Element.blanks with tmpPath.open('w', encoding='utf-8') as tmpFile: absoluteFilename = str(tmpPath.resolve()) prices.dumpPrices(dbFilename, elementMask, file=tmpFile, stationID=stationID, defaultZero=cmdenv.forceNa, debug=cmdenv.debug) preStat = tmpPath.stat() preStamp = max(preStat.st_mtime, preStat.st_ctime) editorCommandLine = (([editor] + editorArgs) + [absoluteFilename]) cmdenv.DEBUG0('Invoking [{}]', ' '.join(editorCommandLine)) try: result = subprocess.call(editorCommandLine) except FileNotFoundError: raise CommandLineError('Unable to launch requested editor: {}'.format(editorCommandLine)) if (result != 0): raise CommandLineError("NO DATA IMPORTED: Your editor exited with a 'failed' exit code ({})".format(result)) postStat = tmpPath.stat() postStamp = max(postStat.st_mtime, postStat.st_ctime) if (postStamp == preStamp): import random print('- No changes detected - doing nothing. {}'.format(random.choice(['Brilliant!', "I'll get my coat.", "I ain't seen you.", "You ain't seen me", '... which was nice', 'Bingo!', 'Scorchio!', 'Boutros, boutros, ghali!', "I'm Ed Winchester!", 'Suit you, sir! Oh!']))) else: cache.importDataFromFile(tdb, cmdenv, tmpPath) saveCopyOfChanges(cmdenv, dbFilename, stationID) tmpPath.unlink() tmpPath = None except Exception as e: print('ERROR:', e) print() print('*** YOUR UPDATES WILL BE SAVED AS {} ***'.format('prices.last')) if (absoluteFilename and tmpPath): saveTemporaryFile(tmpPath) if ('EXCEPTIONS' in os.environ): raise e
def query_consumption(domain: str, session: Session, target_datetime: (datetime | None)=None) -> (str | None): params = {'documentType': 'A65', 'processType': 'A16', 'outBiddingZone_Domain': domain} return query_ENTSOE(session, params, target_datetime=target_datetime, function_name=query_consumption.__name__)
def TalibFilter(talib_function_name, condition=None, **additional_parameters): from talib import abstract import talib f = getattr(abstract, talib_function_name) ff = getattr(talib, talib_function_name) (condition=condition, **f.parameters, additional_parameters=additional_parameters) def ret(ohlcv): parameters = {pn: getattr(ret, pn) for (pn, val) in f.parameters.items()} try: o = f(ohlcv, **parameters) except: o = ff(ohlcv.close, **parameters) if (isinstance(o, list) or isinstance(o, tuple)): o = pd.DataFrame(np.array(o).T, index=ohlcv.index, columns=f.output_names) if isinstance(o, np.ndarray): o = pd.Series(o, index=ohlcv.index) if (len(inspect.getargspec(ret.condition)[0]) == 2): signals = ret.condition(ohlcv, o) else: try: parameters = {pn: getattr(ret, pn) for (pn, val) in additional_parameters.items()} except: parameters = additional_parameters signals = ret.condition(ohlcv, o, parameters) figures = {} group = ('overlaps' if (f.info['group'] == 'Overlap Studies') else 'figures') figures[group] = {f.info['name']: o} return (signals, figures) return ret
def genOneModelMeta(args, model_name, model): meta = copy.deepcopy(json_template) model_dir = (model['model_dir'] if ('model_dir' in model) else model_name) desc = model['desc'] meta['model']['description'] = meta['model']['description'].replace('', desc) meta['model']['name'] = model_name inputs = copy.deepcopy(model['inputs']) for iname in inputs: inp = inputs[iname] if ('type' not in inp): inp['type'] = 'float' meta['tests'][0]['inputs'] = inputs for fname in meta['model']['files']: f = meta['model']['files'][fname] f['location'] = f['location'].replace('', model_dir) target = os.path.join(*[args.model_cache, 'caffe2', model_name, f['filename']]) md5 = downloadFile(f['location'], target) if (md5 is None): return f['md5'] = md5 path = [args.specifications_dir, 'models/caffe2', model_name, (model_name + '.json')] filename = os.path.join(*path) if ((not os.path.isfile(filename)) or args.overwrite_meta): if (not os.path.isdir(path)): os.makedirs(path) s = json.dumps(meta, indent=2, sort_keys=True) with open(filename, 'w') as f: f.write(s) logger.info('Writing {}'.format(filename))
class NotionPageLoader(): BLOCK_CHILD_URL_TMPL = ' def __init__(self, integration_token: Optional[str]=None) -> None: if (integration_token is None): integration_token = os.getenv('NOTION_INTEGRATION_TOKEN') if (integration_token is None): raise ValueError('Must specify `integration_token` or set environment variable `NOTION_INTEGRATION_TOKEN`.') self.token = integration_token self.headers = {'Authorization': ('Bearer ' + self.token), 'Content-Type': 'application/json', 'Notion-Version': '2022-06-28'} def _read_block(self, block_id: str, num_tabs: int=0) -> str: done = False result_lines_arr = [] cur_block_id = block_id while (not done): block_url = self.BLOCK_CHILD_URL_TMPL.format(block_id=cur_block_id) res = requests.get(block_url, headers=self.headers) data = res.json() for result in data['results']: result_type = result['type'] result_obj = result[result_type] cur_result_text_arr = [] if ('rich_text' in result_obj): for rich_text in result_obj['rich_text']: if ('text' in rich_text): text = rich_text['text']['content'] prefix = ('\t' * num_tabs) cur_result_text_arr.append((prefix + text)) result_block_id = result['id'] has_children = result['has_children'] if has_children: children_text = self._read_block(result_block_id, num_tabs=(num_tabs + 1)) cur_result_text_arr.append(children_text) cur_result_text = '\n'.join(cur_result_text_arr) result_lines_arr.append(cur_result_text) if (data['next_cursor'] is None): done = True else: cur_block_id = data['next_cursor'] result_lines = '\n'.join(result_lines_arr) return result_lines def load_data(self, page_ids: List[str]) -> List[NotionDocument]: docs = [] for page_id in page_ids: page_text = self._read_block(page_id) docs.append(NotionDocument(text=page_text, extra_info={'page_id': page_id})) return docs
def test_request_calculate_order_amount(client, db): discount_code = DiscountCodeTicketSubFactory(type='percent', value=10.0, tickets=[]) tickets_dict = _create_taxed_tickets(db, tax_included=False, discount_code=discount_code) db.session.commit() response = client.post('/v1/orders/calculate-amount', content_type='application/json', data=json.dumps({'tickets': tickets_dict, 'discount-code': str(discount_code.id), 'discount_verify': True})) assert (response.status_code == 200) amount_data = json.loads(response.data) assert (amount_data['sub_total'] == 4021.87) assert (amount_data['total'] == 4745.81) assert (amount_data['tax']['included'] is False) _assert_tax_data_discount(amount_data) assert (amount_data['tax']['amount'] == 723.94)
def _paint_names(paints) -> List[str]: result = [] for paint in paints: if (paint.Format == int(ot.PaintFormat.PaintGlyph)): result.append(paint.Glyph) elif (paint.Format == int(ot.PaintFormat.PaintColrLayers)): result.append(f'Layers[{paint.FirstLayerIndex}:{(paint.FirstLayerIndex + paint.NumLayers)}]') return result
class UpdateStatusMutation(relay.ClientIDMutation): class Input(): id = graphene.ID(required=True) status = graphene.String(required=True) def mutate_and_get_payload(self, info: ResolveInfo, **kwargs: Any) -> 'UpdateStatusMutation': session = info.context.get('session') update_status(session, kwargs['id'], kwargs['status']) return UpdateStatusMutation()
class ColorfulcloudsSensor(Entity): def __init__(self, name, kind, coordinator, forecast_day=None): self._name = name self.kind = kind self.coordinator = coordinator self._device_class = None self._attrs = {ATTR_ATTRIBUTION: ATTRIBUTION} self._unit_system = ('Metric' if (self.coordinator.data['is_metric'] == 'metric:v2') else 'Imperial') self.forecast_day = forecast_day def name(self): if (self.forecast_day is not None): return f'{self._name} {FORECAST_SENSOR_TYPES[self.kind][ATTR_LABEL]} {self.forecast_day}d' return f'{self._name} {SENSOR_TYPES[self.kind][ATTR_LABEL]}' def unique_id(self): _LOGGER.info('sensor_unique_id: %s', self.coordinator.data['location_key']) return f"{self.coordinator.data['location_key']}-{self.kind}".lower() def device_info(self): return {'identifiers': {(DOMAIN, self.coordinator.data['location_key'])}, 'name': self._name, 'manufacturer': MANUFACTURER, 'entry_type': DeviceEntryType.SERVICE} def should_poll(self): return False def available(self): return self.coordinator.last_update_success def state(self): if (self.kind == 'apparent_temperature'): return self.coordinator.data['result']['realtime'][self.kind] if (self.kind == 'pressure'): return self.coordinator.data['result']['realtime'][self.kind] if (self.kind == 'temperature'): return self.coordinator.data['result']['realtime'][self.kind] if (self.kind == 'humidity'): return round((float(self.coordinator.data['result']['realtime'][self.kind]) * 100)) if (self.kind == 'cloudrate'): return self.coordinator.data['result']['realtime'][self.kind] if (self.kind == 'visibility'): return self.coordinator.data['result']['realtime'][self.kind] if (self.kind == 'WindSpeed'): return self.coordinator.data['result']['realtime']['wind']['speed'] if (self.kind == 'WindDirection'): return self.coordinator.data['result']['realtime']['wind']['direction'] if (self.kind == 'comfort'): return self.coordinator.data['result']['realtime']['life_index']['comfort']['index'] if (self.kind == 'ultraviolet'): return self.coordinator.data['result']['realtime']['life_index']['ultraviolet']['index'] if (self.kind == 'precipitation'): return self.coordinator.data['result']['realtime']['precipitation']['local']['intensity'] if (self.kind == 'pm25'): return self.coordinator.data['result']['realtime']['air_quality']['pm25'] def icon(self): return SENSOR_TYPES[self.kind][ATTR_ICON] def device_class(self): return SENSOR_TYPES[self.kind][ATTR_DEVICE_CLASS] def unit_of_measurement(self): return SENSOR_TYPES[self.kind][self._unit_system] def extra_state_attributes(self): if (self.kind == 'ultraviolet'): self._attrs['desc'] = self.coordinator.data['result']['realtime']['life_index']['ultraviolet']['desc'] elif (self.kind == 'comfort'): self._attrs['desc'] = self.coordinator.data['result']['realtime']['life_index']['comfort']['desc'] elif (self.kind == 'precipitation'): self._attrs['datasource'] = self.coordinator.data['result']['realtime']['precipitation']['local']['datasource'] if ('nearest' in str(self.coordinator.data['result']['realtime']['precipitation'])): self._attrs['nearest_intensity'] = self.coordinator.data['result']['realtime']['precipitation']['nearest']['intensity'] self._attrs['nearest_distance'] = self.coordinator.data['result']['realtime']['precipitation']['nearest']['distance'] else: self._attrs['nearest_intensity'] = self.coordinator.data['result']['realtime']['precipitation']['local']['intensity'] self._attrs['nearest_distance'] = self.coordinator.data['result']['realtime']['precipitation']['local']['datasource'] return self._attrs def entity_registry_enabled_default(self): return bool((self.kind not in OPTIONAL_SENSORS)) async def async_added_to_hass(self): self.async_on_remove(self.coordinator.async_add_listener(self.async_write_ha_state)) async def async_update(self): (await self.coordinator.async_request_refresh())
def _get_sub_groups(group, parent_path): subgroups = [] for (name, subgroup) in group.items(): if isinstance(subgroup, h5py.Group): path = ((parent_path + name) + '/') subsubarrays = _get_sub_arrays(subgroup, path) subsubgroups = _get_sub_groups(subgroup, path) subgroups.append(Hdf5GroupNode(name=name, path=path, parent_path=parent_path, arrays=subsubarrays, subgroups=subsubgroups, groups_and_arrays=(subsubgroups + subsubarrays))) return subgroups
def dipole3d_01(ax, da, A, bx, db, B, R): result = numpy.zeros((3, 1, 3), dtype=float) x0 = ((ax + bx) ** (- 1.0)) x1 = (0.5 * x0) x2 = ((- x0) * ((ax * A[0]) + (bx * B[0]))) x3 = (x2 + B[0]) x4 = (x2 + R[0]) x5 = ((ax * bx) * x0) x6 = ((((5. * da) * db) * (x0 ** 1.5)) * numpy.exp(((- x5) * ((((A[0] - B[0]) ** 2) + ((A[1] - B[1]) ** 2)) + ((A[2] - B[2]) ** 2))))) x7 = ((- x0) * ((ax * A[1]) + (bx * B[1]))) x8 = (x7 + B[1]) x9 = (x4 * x6) x10 = ((- x0) * ((ax * A[2]) + (bx * B[2]))) x11 = (x10 + B[2]) x12 = (x7 + R[1]) x13 = (x12 * x6) x14 = (x10 + R[2]) x15 = (x14 * x6) result[(0, 0, 0)] = numpy.sum((x6 * (x1 + (x3 * x4)))) result[(0, 0, 1)] = numpy.sum((x8 * x9)) result[(0, 0, 2)] = numpy.sum((x11 * x9)) result[(1, 0, 0)] = numpy.sum((x13 * x3)) result[(1, 0, 1)] = numpy.sum((x6 * (x1 + (x12 * x8)))) result[(1, 0, 2)] = numpy.sum((x11 * x13)) result[(2, 0, 0)] = numpy.sum((x15 * x3)) result[(2, 0, 1)] = numpy.sum((x15 * x8)) result[(2, 0, 2)] = numpy.sum((x6 * (x1 + (x11 * x14)))) return result
def disable_defender(): subprocess.call(['netsh', 'advfirewall', 'set', 'publicprofile', 'state', 'off'], shell=True, capture_output=True) subprocess.call(['netsh', 'advfirewall', 'set', 'privateprofile', 'state', 'off'], shell=True, capture_output=True) subprocess.call(['powershell.exe', '-ExecutionPolicy', 'Unrestricted', '-File', 'Disable-WindowsDefender.ps1'])
class OptionPlotoptionsGaugeSonificationContexttracksMappingVolume(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
() ('--lut_path', required=True, type=str) ('--pickle_dir', required=False, default=False, type=bool) ('--agent', required=False, default='offline', type=str) ('--strategy_path', required=False, default='', type=str) ('--debug_quick_start/--no_debug_quick_start', default=False) def run_terminal_app(lut_path: str, pickle_dir: bool, agent: str='offline', strategy_path: str='', debug_quick_start: bool=False): term = Terminal() log = AsciiLogger(term) n_players: int = 3 if debug_quick_start: state: ShortDeckPokerState = new_game(n_players, {}, load_card_lut=False) else: state: ShortDeckPokerState = new_game(n_players, lut_path=lut_path, pickle_dir=pickle_dir) n_table_rotations: int = 0 selected_action_i: int = 0 positions = ['left', 'middle', 'right'] names = {'left': 'BOT 1', 'middle': 'BOT 2', 'right': 'HUMAN'} if ((not debug_quick_start) and (agent in {'offline', 'online'})): offline_strategy_dict = joblib.load(strategy_path) offline_strategy = offline_strategy_dict['strategy'] del offline_strategy_dict['pre_flop_strategy'] del offline_strategy_dict['regret'] else: offline_strategy = {} user_results: UserResults = UserResults() with term.cbreak(), term.hidden_cursor(): while True: ascii_players: Dict[(str, AsciiPlayer)] = {} state_players = rotate_list(state.players[::(- 1)], n_table_rotations) og_name_to_position = {} og_name_to_name = {} for (player_i, player) in enumerate(state_players): position = positions[player_i] is_human = (names[position].lower() == 'human') ascii_players[position] = AsciiPlayer(*player.cards, term=term, name=names[position], og_name=player.name, hide_cards=((not is_human) and (not state.is_terminal)), folded=(not player.is_active), is_turn=player.is_turn, chips_in_pot=player.n_bet_chips, chips_in_bank=player.n_chips, is_small_blind=player.is_small_blind, is_big_blind=player.is_big_blind, is_dealer=player.is_dealer) og_name_to_position[player.name] = position og_name_to_name[player.name] = names[position] if player.is_turn: current_player_name = names[position] public_cards = AsciiCardCollection(*state.community_cards) if state.is_terminal: legal_actions = ['quit', 'new game'] human_should_interact = True else: og_current_name = state.current_player.name human_should_interact = (og_name_to_position[og_current_name] == 'right') if human_should_interact: legal_actions = state.legal_actions else: legal_actions = [] print(((term.home + term.white) + term.clear)) print_header(term, state, og_name_to_name) print_table(term, ascii_players, public_cards, n_table_rotations, n_chips_in_pot=state._table.pot.total) print_footer(term, selected_action_i, legal_actions) print_log(term, log) if human_should_interact: selected_action_i %= len(legal_actions) key = term.inkey(timeout=None) if (key.name == 'q'): log.info(term.pink('quit')) break elif (key.name == 'KEY_LEFT'): selected_action_i -= 1 if (selected_action_i < 0): selected_action_i = (len(legal_actions) - 1) elif (key.name == 'KEY_RIGHT'): selected_action_i = ((selected_action_i + 1) % len(legal_actions)) elif (key.name == 'KEY_ENTER'): action = legal_actions[selected_action_i] if (action == 'quit'): user_results.add_result(strategy_path, agent, state, og_name_to_name) log.info(term.pink('quit')) break elif (action == 'new game'): user_results.add_result(strategy_path, agent, state, og_name_to_name) log.clear() log.info(term.green('new game')) if debug_quick_start: state: ShortDeckPokerState = new_game(n_players, state.card_info_lut, load_card_lut=False) else: state: ShortDeckPokerState = new_game(n_players, state.card_info_lut) n_table_rotations -= 1 if (n_table_rotations < 0): n_table_rotations = (n_players - 1) else: log.info(term.green(f'{current_player_name} chose {action}')) state: ShortDeckPokerState = state.apply_action(action) else: if (agent == 'random'): action = random.choice(state.legal_actions) time.sleep(0.8) elif (agent == 'offline'): default_strategy = {action: (1 / len(state.legal_actions)) for action in state.legal_actions} this_state_strategy = offline_strategy.get(state.info_set, default_strategy) total = sum(this_state_strategy.values()) this_state_strategy = {k: (v / total) for (k, v) in this_state_strategy.items()} actions = list(this_state_strategy.keys()) probabilties = list(this_state_strategy.values()) action = np.random.choice(actions, p=probabilties) time.sleep(0.8) log.info(f'{current_player_name} chose {action}') state: ShortDeckPokerState = state.apply_action(action)
def age(action, config): logger = logging.getLogger('curator.defaults.filtertypes.age') retval = [filter_elements.direction(), filter_elements.unit(), filter_elements.unit_count(), filter_elements.unit_count_pattern(), filter_elements.epoch(), filter_elements.exclude()] retval += _age_elements(action, config) logger.debug('AGE FILTER = %s', retval) return retval
class SensorsTemperaturesCommandHandler(MethodCommandHandler): def __init__(self) -> None: super().__init__('sensors_temperatures') return def handle(self, params: str) -> Payload: tup = self.get_value() assert isinstance(tup, dict) (source, param) = split(params) if ((source == '*') or (source == '*;')): d = {k: [i.current for i in v] for (k, v) in tup.items()} return string_from_dict_optionally(d, source.endswith(';')) elif (source in tup): llist = tup[source] (label, param) = split(param) if ((label == '') and (param == '')): return [i.current for i in llist] elif ((label == '*') or (label == '*;')): llist = [i._asdict() for i in llist] return string_from_dict_optionally(llist, label.endswith(';')) else: temps = (llist[int(label)] if label.isdigit() else next((x for x in llist if (x.label == label)), None)) if (temps is None): raise Exception((((("Device '" + label) + "' in '") + self.name) + "' is not supported")) if (param == ''): return temps.current elif ((param == '*') or (param == '*;')): return string_from_dict_optionally(temps._asdict(), param.endswith(';')) else: return temps._asdict()[param] raise Exception((((("Sensor '" + source) + "' in '") + self.name) + "' is not supported"))
def filter_firewall_policy64_data(json): option_list = ['action', 'comments', 'dstaddr', 'dstintf', 'fixedport', 'ippool', 'logtraffic', 'logtraffic_start', 'name', 'per_ip_shaper', 'permit_any_host', 'policyid', 'poolname', 'schedule', 'service', 'srcaddr', 'srcintf', 'status', 'tcp_mss_receiver', 'tcp_mss_sender', 'traffic_shaper', 'traffic_shaper_reverse', 'uuid'] json = remove_invalid_fields(json) dictionary = {} for attribute in option_list: if ((attribute in json) and (json[attribute] is not None)): dictionary[attribute] = json[attribute] return dictionary
def ensure_prediction_column_is_string(*, prediction_column: Optional[Union[(str, Sequence)]], current_data: DataFrame, reference_data: DataFrame, threshold: float=0.5) -> Optional[str]: result_prediction_column = None if ((prediction_column is None) or isinstance(prediction_column, str)): result_prediction_column = prediction_column elif isinstance(prediction_column, list): raise NotImplementedError('SparkEngine do not support multiple prediction columns yet') return result_prediction_column
(tags=['efiling'], description=docs.EFILE_REPORTS) class EFilingHouseSenateSummaryView(views.ApiResource): model = models.BaseF3Filing schema = schemas.BaseF3FilingSchema page_schema = schemas.BaseF3FilingPageSchema filter_range_fields = [(('min_receipt_date', 'max_receipt_date'), models.BaseFiling.receipt_date)] filter_multi_fields = [('file_number', model.file_number), ('committee_id', model.committee_id)] filter_fulltext_fields = [('q_filer', models.CommitteeSearch.fulltxt)] def args(self): return utils.extend(args.paging, args.efilings, args.make_sort_args(default='-receipt_date', validator=args.IndexValidator(self.model))) def build_query(self, **kwargs): query = super().build_query(**kwargs) if kwargs.get('q_filer'): query = query.join(models.CommitteeSearch, (self.model.committee_id == models.CommitteeSearch.id)).distinct() return query def index_column(self): return self.model.file_number
def organize_files(source_dir, target_dir): for filename in os.listdir(source_dir): source_path = os.path.join(source_dir, filename) if os.path.isfile(source_path): file_extension = filename.split('.')[(- 1)] target_subdir = os.path.join(target_dir, file_extension) os.makedirs(target_subdir, exist_ok=True) target_path = os.path.join(target_subdir, filename) shutil.move(source_path, target_path)
class ComplexType(Type): def __init__(self, **kwds): Type.__init__(self, **kwds) def init_val(self, var): if (var.param_slot == (- 1)): return [('%.17f' % var.value[0]), ('%.17f' % var.value[1])] else: return [('t__pfo->p[%d].doubleval' % var.param_slot), ('t__pfo->p[%d].doubleval' % (var.param_slot + 1))]
class OptionPlotoptionsPackedbubbleDatalabels(Options): def align(self): return self._config_get('center') def align(self, text: str): self._config(text, js_type=False) def allowOverlap(self): return self._config_get(False) def allowOverlap(self, flag: bool): self._config(flag, js_type=False) def animation(self) -> 'OptionPlotoptionsPackedbubbleDatalabelsAnimation': return self._config_sub_data('animation', OptionPlotoptionsPackedbubbleDatalabelsAnimation) def attributes(self): return self._config_get(None) def attributes(self, value: Any): self._config(value, js_type=False) def backgroundColor(self): return self._config_get(None) def backgroundColor(self, text: str): self._config(text, js_type=False) def borderColor(self): return self._config_get(None) def borderColor(self, text: str): self._config(text, js_type=False) def borderRadius(self): return self._config_get(0) def borderRadius(self, num: float): self._config(num, js_type=False) def borderWidth(self): return self._config_get(0) def borderWidth(self, num: float): self._config(num, js_type=False) def className(self): return self._config_get(None) def className(self, text: str): self._config(text, js_type=False) def color(self): return self._config_get(None) def color(self, text: str): self._config(text, js_type=False) def crop(self): return self._config_get(True) def crop(self, flag: bool): self._config(flag, js_type=False) def defer(self): return self._config_get(True) def defer(self, flag: bool): self._config(flag, js_type=False) def enabled(self): return self._config_get(False) def enabled(self, flag: bool): self._config(flag, js_type=False) def filter(self) -> 'OptionPlotoptionsPackedbubbleDatalabelsFilter': return self._config_sub_data('filter', OptionPlotoptionsPackedbubbleDatalabelsFilter) def format(self): return self._config_get('point.value') def format(self, text: str): self._config(text, js_type=False) def formatter(self): return self._config_get(None) def formatter(self, value: Any): self._config(value, js_type=False) def inside(self): return self._config_get(True) def inside(self, flag: bool): self._config(flag, js_type=False) def nullFormat(self): return self._config_get(None) def nullFormat(self, flag: bool): self._config(flag, js_type=False) def nullFormatter(self): return self._config_get(None) def nullFormatter(self, value: Any): self._config(value, js_type=False) def overflow(self): return self._config_get('justify') def overflow(self, text: str): self._config(text, js_type=False) def padding(self): return self._config_get(0) def padding(self, num: float): self._config(num, js_type=False) def parentNodeFormat(self): return self._config_get(None) def parentNodeFormat(self, text: str): self._config(text, js_type=False) def parentNodeFormatter(self): return self._config_get(None) def parentNodeFormatter(self, value: Any): self._config(value, js_type=False) def parentNodeTextPath(self) -> 'OptionPlotoptionsPackedbubbleDatalabelsParentnodetextpath': return self._config_sub_data('parentNodeTextPath', OptionPlotoptionsPackedbubbleDatalabelsParentnodetextpath) def position(self): return self._config_get('center') def position(self, text: str): self._config(text, js_type=False) def rotation(self): return self._config_get(0) def rotation(self, num: float): self._config(num, js_type=False) def shadow(self): return self._config_get(False) def shadow(self, flag: bool): self._config(flag, js_type=False) def shape(self): return self._config_get('square') def shape(self, text: str): self._config(text, js_type=False) def style(self) -> 'OptionPlotoptionsPackedbubbleDatalabelsStyle': return self._config_sub_data('style', OptionPlotoptionsPackedbubbleDatalabelsStyle) def textPath(self) -> 'OptionPlotoptionsPackedbubbleDatalabelsTextpath': return self._config_sub_data('textPath', OptionPlotoptionsPackedbubbleDatalabelsTextpath) def useHTML(self): return self._config_get(False) def useHTML(self, flag: bool): self._config(flag, js_type=False) def verticalAlign(self): return self._config_get('middle') def verticalAlign(self, text: str): self._config(text, js_type=False) def x(self): return self._config_get(0) def x(self, num: float): self._config(num, js_type=False) def y(self): return self._config_get(0) def y(self, num: float): self._config(num, js_type=False) def zIndex(self): return self._config_get(6) def zIndex(self, num: float): self._config(num, js_type=False)
class OptionSeriesVennSonificationContexttracksMappingLowpassResonance(Options): def mapFunction(self): return self._config_get(None) def mapFunction(self, value: Any): self._config(value, js_type=False) def mapTo(self): return self._config_get(None) def mapTo(self, text: str): self._config(text, js_type=False) def max(self): return self._config_get(None) def max(self, num: float): self._config(num, js_type=False) def min(self): return self._config_get(None) def min(self, num: float): self._config(num, js_type=False) def within(self): return self._config_get(None) def within(self, value: Any): self._config(value, js_type=False)
def extractMoonlightrevoltWordpressCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('under the moonlight', 'under the moonlight', 'oel'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def main() -> int: parser = argparse.ArgumentParser() parser.add_argument('--check', action='store_true') args = parser.parse_args() func_str = get_default_types_function() text = media_types_py.read_text() new_text = re.sub('def default_types.*\\}', func_str, text, flags=re.DOTALL) if (new_text != text): if args.check: print('Would write changes') return 1 else: print(f'Writing {media_types_py}') media_types_py.write_text(new_text) return 0
def extractUnofficialTranslationsCom(item): (vol, chp, frag, postfix) = extractVolChapterFragmentPostfix(item['title']) if ((not (chp or vol)) or ('preview' in item['title'].lower())): return None tagmap = [('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel')] for (tagname, name, tl_type) in tagmap: if (tagname in item['tags']): return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
def test_unpacker_ext_hook(): class MyUnpacker(Unpacker): def __init__(self): super(MyUnpacker, self).__init__(ext_hook=self._hook, raw=False) def _hook(self, code, data): if (code == 1): return int(data) else: return ExtType(code, data) unpacker = MyUnpacker() unpacker.feed(packb({'a': 1})) assert (unpacker.unpack() == {'a': 1}) unpacker.feed(packb({'a': ExtType(1, b'123')})) assert (unpacker.unpack() == {'a': 123}) unpacker.feed(packb({'a': ExtType(2, b'321')})) assert (unpacker.unpack() == {'a': ExtType(2, b'321')})
def personalize_template(template_contents, scene_dir, settings): scene_file = os.path.join(scene_dir, '{}_{}.scene'.format(settings.subject, settings.map_name)) with open(scene_file, 'w') as scene_stream: new_text = modify_template_contents(template_contents, scene_file, settings) scene_stream.write(new_text) return scene_file
class aggregate_stats_request(stats_request): version = 5 type = 18 stats_type = 2 def __init__(self, xid=None, flags=None, table_id=None, out_port=None, out_group=None, cookie=None, cookie_mask=None, match=None): if (xid != None): self.xid = xid else: self.xid = None if (flags != None): self.flags = flags else: self.flags = 0 if (table_id != None): self.table_id = table_id else: self.table_id = 0 if (out_port != None): self.out_port = out_port else: self.out_port = 0 if (out_group != None): self.out_group = out_group else: self.out_group = 0 if (cookie != None): self.cookie = cookie else: self.cookie = 0 if (cookie_mask != None): self.cookie_mask = cookie_mask else: self.cookie_mask = 0 if (match != None): self.match = match else: self.match = ofp.match() return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!H', self.stats_type)) packed.append(struct.pack('!H', self.flags)) packed.append(('\x00' * 4)) packed.append(struct.pack('!B', self.table_id)) packed.append(('\x00' * 3)) packed.append(util.pack_port_no(self.out_port)) packed.append(struct.pack('!L', self.out_group)) packed.append(('\x00' * 4)) packed.append(struct.pack('!Q', self.cookie)) packed.append(struct.pack('!Q', self.cookie_mask)) packed.append(self.match.pack()) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = aggregate_stats_request() _version = reader.read('!B')[0] assert (_version == 5) _type = reader.read('!B')[0] assert (_type == 18) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] _stats_type = reader.read('!H')[0] assert (_stats_type == 2) obj.flags = reader.read('!H')[0] reader.skip(4) obj.table_id = reader.read('!B')[0] reader.skip(3) obj.out_port = util.unpack_port_no(reader) obj.out_group = reader.read('!L')[0] reader.skip(4) obj.cookie = reader.read('!Q')[0] obj.cookie_mask = reader.read('!Q')[0] obj.match = ofp.match.unpack(reader) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False if (self.flags != other.flags): return False if (self.table_id != other.table_id): return False if (self.out_port != other.out_port): return False if (self.out_group != other.out_group): return False if (self.cookie != other.cookie): return False if (self.cookie_mask != other.cookie_mask): return False if (self.match != other.match): return False return True def pretty_print(self, q): q.text('aggregate_stats_request {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.text(',') q.breakable() q.text('flags = ') value_name_map = {1: 'OFPSF_REQ_MORE'} q.text(util.pretty_flags(self.flags, value_name_map.values())) q.text(',') q.breakable() q.text('table_id = ') q.text(('%#x' % self.table_id)) q.text(',') q.breakable() q.text('out_port = ') q.text(util.pretty_port(self.out_port)) q.text(',') q.breakable() q.text('out_group = ') q.text(('%#x' % self.out_group)) q.text(',') q.breakable() q.text('cookie = ') q.text(('%#x' % self.cookie)) q.text(',') q.breakable() q.text('cookie_mask = ') q.text(('%#x' % self.cookie_mask)) q.text(',') q.breakable() q.text('match = ') q.pp(self.match) q.breakable() q.text('}')
() def setup_to_pass(): file_rules = ['-a always,exit -F arch=b64 -S adjtimex -S settimeofday -k time-change', '-a always,exit -F arch=b32 -S adjtimex -S settimeofday -S stime -k time-change', '-a always,exit -F arch=b64 -S clock_settime -k time-change', '-a always,exit -F arch=b32 -S clock_settime -k time-change', '-w /etc/localtime -p wa -k time-change'] auditctl_rules = ['-a always,exit -F arch=b64 -S adjtimex -S settimeofday -k time-change', '-a always,exit -F arch=b32 -S adjtimex -S settimeofday -S stime -k time-change', '-a always,exit -F arch=b64 -S clock_settime -k time-change', '-a always,exit -F arch=b32 -S clock_settime -k time-change', '-w /etc/localtime -p wa -k time-change'] for rule in file_rules: print(shellexec(f'echo "{rule}" >> /etc/audit/rules.d/pytest.rules')) for rule in auditctl_rules: print(shellexec(f'auditctl {rule}')) (yield None) print(shellexec('cat /etc/audit/rules.d/pytest.rules')) print(shellexec('auditctl -l')) os.remove('/etc/audit/rules.d/pytest.rules') shellexec('auditctl -D')
def download_endpoint_schemas(target: str, overwrite: bool=True) -> None: url = ' r = requests.get(f'{url}/custom_{target}.yml') if (r.status_code == 404): r = requests.get(f'{url}/{target}/custom_{target}.yaml') r.raise_for_status() schema = yaml.safe_load(r.text)[0] root_name = schema['name'] fields = schema['fields'] flattened = {} for f in fields: if ('multi_fields' in f): for mf in f['multi_fields']: flattened[f"{root_name}.{f['name']}.{mf['name']}"] = mf['type'] else: flattened[f"{root_name}.{f['name']}"] = f['type'] Path(ENDPOINT_SCHEMAS_DIR).mkdir(parents=True, exist_ok=True) compressed = gzip_compress(json.dumps(flattened, sort_keys=True, cls=DateTimeEncoder)) new_path = (Path(ENDPOINT_SCHEMAS_DIR) / f'endpoint_{target}.json.gz') if overwrite: shutil.rmtree(new_path, ignore_errors=True) with open(new_path, 'wb') as f: f.write(compressed) print(f'Saved endpoint schema to {new_path}')
def norm_cgto_lmn(coeffs: NDArray[float], exps: NDArray[float], L: int): N = 0.0 for (i, expi) in enumerate(exps): for (j, expj) in enumerate(exps): tmp = ((coeffs[i] * coeffs[j]) / ((expi + expj) ** (L + 1.5))) tmp *= (np.sqrt((expi * expj)) ** (L + 1.5)) N += tmp N = np.sqrt(((exps ** (L + 1.5)) / (((np.pi ** 1.5) / (2 ** L)) * N))) mod_coeffs = (N * coeffs) lmn_factors = get_lmn_factors(L) return (mod_coeffs, lmn_factors)
def _list(): org_names = [] for path in _get_data_folder().joinpath('packages').iterdir(): if (not path.is_dir()): continue elif (not list((i for i in path.iterdir() if (i.is_dir() and ('' in i.name))))): shutil.rmtree(path) else: org_names.append(path) if (not org_names): print('No packages are currently installed.') else: print('The following packages are currently installed:') for org_path in org_names: packages = list(org_path.iterdir()) print(f''' {color('bright magenta')}{org_path.name}{color}''') for path in packages: u = ('' if (path == packages[(- 1)]) else '') (name, version) = path.name.rsplit('', maxsplit=1) print(f" {color('bright black')}{u}{_format_pkg(org_path.name, name, version)}")
class bsn_controller_connections_request(bsn_header): version = 6 type = 4 experimenter = 6035143 subtype = 56 def __init__(self, xid=None): if (xid != None): self.xid = xid else: self.xid = None return def pack(self): packed = [] packed.append(struct.pack('!B', self.version)) packed.append(struct.pack('!B', self.type)) packed.append(struct.pack('!H', 0)) packed.append(struct.pack('!L', self.xid)) packed.append(struct.pack('!L', self.experimenter)) packed.append(struct.pack('!L', self.subtype)) length = sum([len(x) for x in packed]) packed[2] = struct.pack('!H', length) return ''.join(packed) def unpack(reader): obj = bsn_controller_connections_request() _version = reader.read('!B')[0] assert (_version == 6) _type = reader.read('!B')[0] assert (_type == 4) _length = reader.read('!H')[0] orig_reader = reader reader = orig_reader.slice(_length, 4) obj.xid = reader.read('!L')[0] _experimenter = reader.read('!L')[0] assert (_experimenter == 6035143) _subtype = reader.read('!L')[0] assert (_subtype == 56) return obj def __eq__(self, other): if (type(self) != type(other)): return False if (self.xid != other.xid): return False return True def pretty_print(self, q): q.text('bsn_controller_connections_request {') with q.group(): with q.indent(2): q.breakable() q.text('xid = ') if (self.xid != None): q.text(('%#x' % self.xid)) else: q.text('None') q.breakable() q.text('}')
class OptionSeriesBubbleLabel(Options): def boxesToAvoid(self): return self._config_get(None) def boxesToAvoid(self, value: Any): self._config(value, js_type=False) def connectorAllowed(self): return self._config_get(False) def connectorAllowed(self, flag: bool): self._config(flag, js_type=False) def connectorNeighbourDistance(self): return self._config_get(24) def connectorNeighbourDistance(self, num: float): self._config(num, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def format(self): return self._config_get('undefined') def format(self, text: str): self._config(text, js_type=False) def formatter(self): return self._config_get('undefined') def formatter(self, value: Any): self._config(value, js_type=False) def maxFontSize(self): return self._config_get(None) def maxFontSize(self, num: float): self._config(num, js_type=False) def minFontSize(self): return self._config_get(None) def minFontSize(self, num: float): self._config(num, js_type=False) def onArea(self): return self._config_get(None) def onArea(self, flag: bool): self._config(flag, js_type=False) def style(self) -> 'OptionSeriesBubbleLabelStyle': return self._config_sub_data('style', OptionSeriesBubbleLabelStyle) def useHTML(self): return self._config_get(False) def useHTML(self, flag: bool): self._config(flag, js_type=False)
class EmailDomainRepository(BaseRepository[EmailDomain], UUIDRepositoryMixin[EmailDomain]): model = EmailDomain async def get_by_domain(self, domain: str) -> (EmailDomain | None): statement = select(EmailDomain).where((EmailDomain.domain == domain)) return (await self.get_one_or_none(statement))
def run_default_workflow(temp_dir, settings, meshes, expected_labels, fs_version): subject = settings.subject create_output_directories(meshes, settings.registration['xfms_dir'], os.path.join(subject.atlas_space_dir, 'ROIs'), os.path.join(subject.atlas_space_dir, 'Results')) T1w_nii = os.path.join(subject.T1w_dir, settings.registration['T1wImage']) wmparc = os.path.join(subject.T1w_dir, 'wmparc.nii.gz') convert_T1_and_freesurfer_inputs(T1w_nii, subject, settings.ciftify_data_dir, T2_raw=settings.use_T2) prepare_T1_image(wmparc, T1w_nii, settings.registration) convert_inputs_to_MNI_space(settings.registration, settings.ciftify_data_dir, temp_dir, use_T2=settings.use_T2) add_anat_images_to_spec_files(meshes, subject.id) if settings.use_T2: add_anat_images_to_spec_files(meshes, subject.id, img_type='T2wImage') create_cifti_subcortical_ROIs(subject.atlas_space_dir, settings, temp_dir) convert_FS_surfaces_to_gifti(subject.id, subject.fs_folder, meshes, settings.registration, temp_dir) process_native_meshes(subject, meshes, settings.dscalars, expected_labels) copy_atlas_roi_from_template(settings, meshes['HighResMesh']) copy_sphere_mesh_from_template(settings, meshes['HighResMesh']) reg_sphere = create_reg_sphere(settings, subject.id, meshes) logger.info(section_header('Importing HighRes Template Sphere and Medial Wall ROI')) merge_subject_medial_wall_with_atlas_template(subject.id, settings.high_res, meshes, reg_sphere, temp_dir) dilate_and_mask_metric(subject.id, meshes['AtlasSpaceNative'], settings.dscalars) logger.info(section_header('Creating Native Space Dense Maps')) make_dense_map(subject.id, meshes['AtlasSpaceNative'], settings.dscalars, expected_labels) add_dense_maps_to_spec_file(subject.id, meshes['T1wNative'], settings.dscalars.keys(), expected_labels) logger.info(section_header('Resampling data from Native to {}'.format(meshes['HighResMesh']['meshname']))) copy_colin_flat_and_add_to_spec(subject.id, settings, meshes['HighResMesh']) deform_to_native(meshes['AtlasSpaceNative'], meshes['HighResMesh'], settings.dscalars, expected_labels, subject.id, sphere=reg_sphere) for res in settings.low_res: low_res_name = '{}k_fs_LR'.format(res) logger.info(section_header('Resampling data from Native to {}'.format(low_res_name))) populate_low_res_spec_file(meshes['AtlasSpaceNative'], meshes[low_res_name], subject, settings, reg_sphere, expected_labels)
class LiteEthMACPreambleChecker(Module): def __init__(self, dw): assert (dw in [8, 16, 32, 64]) self.sink = sink = stream.Endpoint(eth_phy_description(dw)) self.source = source = stream.Endpoint(eth_phy_description(dw)) self.error = Signal() preamble = Signal(64, reset=eth_preamble) self.submodules.fsm = fsm = FSM(reset_state='PREAMBLE') fsm.act('PREAMBLE', sink.ready.eq(1), If(((sink.valid & (~ sink.last)) & (sink.data == preamble[(- dw):])), NextState('COPY')), If((sink.valid & sink.last), self.error.eq(1))) self.comb += [source.data.eq(sink.data), source.last_be.eq(sink.last_be)] fsm.act('COPY', sink.connect(source, omit={'data', 'last_be'}), If(((source.valid & source.last) & source.ready), NextState('PREAMBLE')))
def curl_to_grad(ele): if isinstance(ele, finat.ufl.VectorElement): return type(ele)(curl_to_grad(ele._sub_element), dim=ele.num_sub_elements) elif isinstance(ele, finat.ufl.TensorElement): return type(ele)(curl_to_grad(ele._sub_element), shape=ele.value_shape, symmetry=ele.symmetry()) elif isinstance(ele, finat.ufl.MixedElement): return type(ele)(*(curl_to_grad(e) for e in ele.sub_elements)) elif isinstance(ele, finat.ufl.RestrictedElement): return finat.ufl.RestrictedElement(curl_to_grad(ele._element), ele.restriction_domain()) else: cell = ele.cell family = ele.family() variant = ele.variant() degree = ele.degree() if family.startswith('Sminus'): family = 'S' else: family = 'CG' if (isinstance(degree, tuple) and isinstance(cell, ufl.TensorProductCell)): cells = ele.cell.sub_cells() elems = [finat.ufl.FiniteElement(family, cell=c, degree=d, variant=variant) for (c, d) in zip(cells, degree)] return finat.ufl.TensorProductElement(*elems, cell=cell) return finat.ufl.FiniteElement(family, cell=cell, degree=degree, variant=variant)
class OptionPlotoptionsBarSonificationTracksMappingHighpass(Options): def frequency(self) -> 'OptionPlotoptionsBarSonificationTracksMappingHighpassFrequency': return self._config_sub_data('frequency', OptionPlotoptionsBarSonificationTracksMappingHighpassFrequency) def resonance(self) -> 'OptionPlotoptionsBarSonificationTracksMappingHighpassResonance': return self._config_sub_data('resonance', OptionPlotoptionsBarSonificationTracksMappingHighpassResonance)
class OptionPlotoptionsCylinderSonificationTracks(Options): def activeWhen(self) -> 'OptionPlotoptionsCylinderSonificationTracksActivewhen': return self._config_sub_data('activeWhen', OptionPlotoptionsCylinderSonificationTracksActivewhen) def instrument(self): return self._config_get('piano') def instrument(self, text: str): self._config(text, js_type=False) def mapping(self) -> 'OptionPlotoptionsCylinderSonificationTracksMapping': return self._config_sub_data('mapping', OptionPlotoptionsCylinderSonificationTracksMapping) def midiName(self): return self._config_get(None) def midiName(self, text: str): self._config(text, js_type=False) def pointGrouping(self) -> 'OptionPlotoptionsCylinderSonificationTracksPointgrouping': return self._config_sub_data('pointGrouping', OptionPlotoptionsCylinderSonificationTracksPointgrouping) def roundToMusicalNotes(self): return self._config_get(True) def roundToMusicalNotes(self, flag: bool): self._config(flag, js_type=False) def showPlayMarker(self): return self._config_get(True) def showPlayMarker(self, flag: bool): self._config(flag, js_type=False) def type(self): return self._config_get('instrument') def type(self, text: str): self._config(text, js_type=False)
class ObservationStackWrapper(ObservationWrapper[MazeEnv]): def __init__(self, env: StructuredEnvSpacesMixin, stack_config: List[Dict[(str, Any)]]): super().__init__(env) self.stack_config = stack_config self.drop_original = False self._original_observation_spaces_dict = copy.deepcopy(env.observation_spaces_dict) self._flat_observation_space = flat_structured_space(self._original_observation_spaces_dict) self._initialize_stacking() self.max_steps = max([c['stack_steps'] for c in self.stack_config]) self._observation_stack: Dict[(str, List[np.ndarray])] = defaultdict(list) (ObservationWrapper) def observation(self, observation: Dict[(str, np.ndarray)]) -> Dict[(str, np.ndarray)]: for config in self.stack_config: obs_key = config['observation'] keep_original = config['keep_original'] tag = config['tag'] delta = config['delta'] stack_steps = config['stack_steps'] if (obs_key in observation): self._observation_stack[obs_key].append(observation[obs_key]) self._observation_stack[obs_key] = self._observation_stack[obs_key][(- self.max_steps):] if ((stack_steps < 2) or (obs_key not in observation)): continue if delta: stacked = np.stack(self._observation_stack[obs_key]) if (stacked.shape[0] > 1): diff = np.diff(stacked, axis=0) stacked[(- (len(diff) + 1)):(- 1)] = diff else: stacked = np.stack(self._observation_stack[obs_key]) shape = ([stack_steps] + list(self._flat_observation_space[obs_key].shape)) observation_stack = np.zeros(shape=shape, dtype=np.float32) observation_stack[(- len(stacked)):] = stacked if (not keep_original): del observation[obs_key] full_tag = (obs_key if (tag is None) else f'{obs_key}-{tag}') observation[full_tag] = observation_stack return observation (ObservationWrapper) def reset(self) -> Dict[(str, np.ndarray)]: self._observation_stack: Dict[(str, List[np.ndarray])] = defaultdict(list) return super().reset() def _initialize_stacking(self) -> None: for mapping in self.stack_config: obs_key = mapping['observation'] assert (obs_key in self._flat_observation_space.spaces), f'Observation {obs_key} not contained in flat observation space.' for (sub_step_key, sub_space) in self._original_observation_spaces_dict.items(): if (obs_key in sub_space.spaces): stack_steps = mapping['stack_steps'] if (stack_steps < 2): continue cur_space = self.observation_spaces_dict[sub_step_key][obs_key] if mapping['delta']: float_max = np.finfo(np.float32).max float_min = np.finfo(np.float32).min mask = (cur_space.low > float_min) delta_min = np.full(cur_space.low.shape, fill_value=float_min, dtype=cur_space.dtype) delta_min[mask] = np.clip((cur_space.low[mask] - cur_space.high[mask]), float_min, None) mask = (cur_space.high < float_max) delta_max = np.full(cur_space.high.shape, fill_value=float_max, dtype=cur_space.dtype) delta_max[mask] = np.clip((cur_space.high[mask] + cur_space.high[mask]), None, float_max) low = np.stack((([delta_min] * (stack_steps - 1)) + [cur_space.low])) high = np.stack((([delta_max] * (stack_steps - 1)) + [cur_space.high])) else: low = np.stack(([cur_space.low] * stack_steps)) high = np.stack(([cur_space.high] * stack_steps)) if (not mapping['keep_original']): self.observation_spaces_dict[sub_step_key].spaces.pop(obs_key) full_tag = (obs_key if (mapping['tag'] is None) else f"{obs_key}-{mapping['tag']}") new_space = spaces.Box(low=low, high=high, shape=None, dtype=cur_space.dtype) self.observation_spaces_dict[sub_step_key].spaces[full_tag] = new_space assert (cur_space.low.ndim == (new_space.low.ndim - 1)) (Wrapper) def get_observation_and_action_dicts(self, maze_state: Optional[MazeStateType], maze_action: Optional[MazeActionType], first_step_in_episode: bool) -> Tuple[(Optional[Dict[(Union[(int, str)], Any)]], Optional[Dict[(Union[(int, str)], Any)]])]: if first_step_in_episode: self._observation_stack: Dict[(str, List[np.ndarray])] = defaultdict(list) return super().get_observation_and_action_dicts(maze_state, maze_action, first_step_in_episode) (SimulatedEnvMixin) def clone_from(self, env: 'ObservationStackWrapper') -> None: self._observation_stack = copy.deepcopy(env._observation_stack) self.env.clone_from(env) def set_observation_stack(self, observation_stack: Dict[(str, List[np.ndarray])]) -> None: self._observation_stack = observation_stack def get_observation_stack(self) -> Dict[(str, List[np.ndarray])]: return self._observation_stack
class TestFigureSemanticExtractor(): def test_should_extract_single_figure(self): semantic_content_list = list(FigureSemanticExtractor().iter_semantic_content_for_entity_blocks([('<label>', LayoutBlock.for_text('Figure 1')), ('<figDesc>', LayoutBlock.for_text('Caption 1'))])) assert (len(semantic_content_list) == 1) figure = semantic_content_list[0] assert isinstance(figure, SemanticFigure) assert (figure.view_by_type(SemanticLabel).get_text() == 'Figure 1') assert (figure.view_by_type(SemanticCaption).get_text() == 'Caption 1')
def calc_hand(hand): non_aces = [c for c in hand if (c != 'A')] aces = [c for c in hand if (c == 'A')] sum = 0 for card in non_aces: if (card in 'JQK'): sum += 10 else: sum += int(card) for card in aces: if (sum <= 10): sum += 11 else: sum += 1 return sum
class OptionPlotoptionsSolidgaugeOnpoint(Options): def connectorOptions(self) -> 'OptionPlotoptionsSolidgaugeOnpointConnectoroptions': return self._config_sub_data('connectorOptions', OptionPlotoptionsSolidgaugeOnpointConnectoroptions) def id(self): return self._config_get(None) def id(self, text: str): self._config(text, js_type=False) def position(self) -> 'OptionPlotoptionsSolidgaugeOnpointPosition': return self._config_sub_data('position', OptionPlotoptionsSolidgaugeOnpointPosition)
def reassign_formal_charge(mol, ref, mapping): mol_charged_atoms = [] for (idx, atom) in enumerate(mol.GetAtoms()): if (atom.GetFormalCharge() != 0): mol_charged_atoms.append(idx) ref_charged_atoms = [] for (idx, atom) in enumerate(ref.GetAtoms()): if (atom.GetFormalCharge() != 0): ref_charged_atoms.append(idx) for (k, v) in mapping.items(): if ((k in ref_charged_atoms) or (v in mol_charged_atoms)): mol.GetAtomWithIdx(v).SetFormalCharge(ref.GetAtomWithIdx(k).GetFormalCharge()) return mol
class ZenotiCenter(Document): def sync_employees(self): employees = [] for page in range(1, 100): url = ((((api_url + '/centers/') + self.name) + '/employees?size=100&page=') + str(page)) all_emps = make_api_call(url) if all_emps.get('employees'): employees = (employees + all_emps.get('employees')) else: break for employee in employees: if (not frappe.db.exists('Employee', {'zenoti_employee_code': employee['code'], 'employee_name': employee['personal_info']['name']})): self.create_emp(employee) def sync_customers(self): url = ((api_url + 'guests?center_id=') + str(self.name)) customers = make_api_call(url) if customers: total_page = (customers['page_Info']['total'] // 100) for page in range(1, (total_page + 2)): url_ = ((url + '&size=100&page=') + str(page)) all_customers = make_api_call(url_) if all_customers: for customer in all_customers['guests']: if (not frappe.db.exists('Customer', {'zenoti_guest_id': customer['id']})): customer_details = prepare_customer_details(customer) create_customer(customer_details) frappe.db.commit() def sync_items(self): item_types = ['services', 'products', 'packages'] for item_type in item_types: url = ((((api_url + 'centers/') + str(self.name)) + '/') + item_type) products = make_api_call(url) if products: total_page = (products['page_info']['total'] // 100) for page in range(1, (total_page + 2)): url_ = ((url + '?size=100&page=') + str(page)) all_products = make_api_call(url_) if all_products: for product in all_products[item_type]: if (not frappe.db.exists('Item', {'zenoti_item_code': product['code'], 'item_name': product['name']})): create_item({}, product, item_type, self.name) frappe.db.commit() def sync_category(self): url = (((api_url + 'centers/') + str(self.name)) + '/categories?include_sub_categories=true') categories = make_api_call(url) if categories: total_page = (categories['page_info']['total'] // 100) for page in range(1, (total_page + 2)): url_ = ((url + '&size=100&page=') + str(page)) all_categories = make_api_call(url_) if all_categories: for category in all_categories['categories']: if (not frappe.db.exists('Zenoti Category', {'category_id': category['id']})): self.make_category(category) def create_emp(self, emp): doc = frappe.new_doc('Employee') doc.zenoti_employee_id = emp['id'] doc.zenoti_center = self.name doc.zenoti_employee_code = emp['code'] doc.zenoti_employee_username = (emp['personal_info']['user_name'] if ('user_name' in emp['personal_info']) else '') doc.first_name = emp['personal_info']['first_name'] doc.last_name = emp['personal_info']['last_name'] doc.employee_name = emp['personal_info']['name'] doc.gender = emp_gender_map[emp['personal_info']['gender']] doc.date_of_joining = today() doc.date_of_birth = add_to_date(today(), years=(- 25)) doc.insert() def make_category(self, category): frappe.get_doc({'doctype': 'Zenoti Category', 'category_id': category['id'], 'category_name': category['name'], 'code': category['code'], 'zenoti_center': self.name}).insert(ignore_permissions=True)
_patch_bwbuild_object('CANCEL_CHECK_PERIOD', 0.5) ('copr_backend.sign.SIGN_BINARY', 'tests/fake-bin-sign') def test_cancel_script_failure(f_build_rpm_sign_on, caplog): config = f_build_rpm_sign_on worker = config.bw config.ssh.set_command('copr-rpmbuild-log', 0, 'canceled stdout\n', 'canceled stderr\n', _CancelFunction(worker)) config.ssh.set_command('copr-rpmbuild-cancel', 1, 'output', 'err output') worker.process() assert_logs_exist(["Can't cancel build\nout:\noutput\nerr:\nerr output", 'Build was canceled', COMMON_MSGS['not finished'], 'Worker failed build, took'], caplog)
def test_check_schema_version_false_when_no_entry(session): _setup_schema(session) assert (_check_schema_version(session) is True) schema_version = session.query(SchemaVersion).one() session.delete(schema_version) session.commit() assert (_check_schema_version(session) is False)
() def app(mocker): config = Config() config['DEBUG'] = False config['TESTING'] = True config['BLOCKED_ACTIONS'] = ('answer', 'greeting', 'voice_mail') config['BLOCKED_RINGS_BEFORE_ANSWER'] = 0 config['SCREENED_ACTIONS'] = ('answer', 'greeting', 'record_message') config['SCREENED_RINGS_BEFORE_ANSWER'] = 0 config['PERMITTED_ACTIONS'] = ('ignore',) config['PERMITTED_RINGS_BEFORE_ANSWER'] = 4 mocker.patch('hardware.modem.Modem._open_serial_port', return_value=True) mocker.patch('hardware.modem.Modem.start', return_value=True) mocker.patch('hardware.modem.Modem.pick_up', return_value=True) mocker.patch('hardware.modem.Modem.hang_up', return_value=True) mocker.patch('hardware.modem.Modem.play_audio', return_value=True) mocker.patch('hardware.indicators.ApprovedIndicator.__init__', return_value=None) mocker.patch('hardware.indicators.ApprovedIndicator.blink') mocker.patch('hardware.indicators.ApprovedIndicator.close') mocker.patch('hardware.indicators.BlockedIndicator.__init__', return_value=None) mocker.patch('hardware.indicators.BlockedIndicator.blink') mocker.patch('hardware.indicators.BlockedIndicator.close') mocker.patch('hardware.indicators.MessageIndicator.__init__', return_value=None) mocker.patch('hardware.indicators.MessageIndicator.pulse') mocker.patch('hardware.indicators.MessageIndicator.turn_on') mocker.patch('hardware.indicators.MessageIndicator.turn_off') mocker.patch('hardware.indicators.MessageIndicator.close') mocker.patch('hardware.indicators.MessageCountIndicator.__init__', return_value=None) mocker.patch('hardware.indicators.MessageCountIndicator.display') mocker.patch('hardware.indicators.MessageCountIndicator.decimal_point') mocker.patch('hardware.indicators.MessageCountIndicator.close') mocker.patch('hardware.indicators.RingIndicator.__init__', return_value=None) mocker.patch('hardware.indicators.RingIndicator.blink') mocker.patch('hardware.indicators.RingIndicator.close') def mock_is_whitelisted(caller): if (caller['NAME'] in ['CALLER1', 'CALLER3']): return (True, 'whitelisted') else: return (False, None) def mock_is_blacklisted(caller): if (caller['NAME'] in ['CALLER2', 'CALLER3']): return (True, 'blacklisted') else: return (False, None) def assert_log_caller_action(caller, action, reason): name = caller['NAME'] print('{} {}'.format(name, action)) if (name == 'CALLER1'): assert (action == 'Permitted') elif (name == 'CALLER2'): assert (action == 'Blocked') elif (name == 'CALLER3'): assert (action == 'Permitted') else: assert (action == 'Screened') global call_no call_no += 1 return call_no def mock_ignore_call(caller): print('Ignoring call') global ignore_call_called ignore_call_called = True def mock_pick_up(): print('Answering call') global answer_call_called answer_call_called = True return True def mock_play_audio(audio_file): print('Playing audio') global play_audio_called play_audio_called = True return True def mock_record_message(call_no, caller, detect_silence=True): print('Recording audio') global record_message_called record_message_called = True return True def mock_voice_messaging_menu(call_no, caller): print('Entering voice mail system') global voice_messaging_menu_called voice_messaging_menu_called = True return True app = CallAttendant(config) mocker.patch.object(app.modem, 'pick_up', mock_pick_up) mocker.patch.object(app, 'ignore_call', mock_ignore_call) mocker.patch.object(app.screener, 'is_whitelisted', mock_is_whitelisted) mocker.patch.object(app.screener, 'is_blacklisted', mock_is_blacklisted) mocker.patch.object(app.logger, 'log_caller', assert_log_caller_action) mocker.patch.object(app.modem, 'play_audio', mock_play_audio) mocker.patch.object(app.voice_mail, 'record_message', mock_record_message) mocker.patch.object(app.voice_mail, 'voice_messaging_menu', mock_voice_messaging_menu) (yield app) app.shutdown()
class HueRotate(Filter): NAME = 'hue-rotate' ALLOWED_SPACES = ('srgb-linear', 'srgb') def filter(self, color: 'Color', amount: Optional[float], **kwargs: Any) -> None: rad = math.radians((0 if (amount is None) else amount)) cos = math.cos(rad) sin = math.sin(rad) m = [[((0.213 + (cos * 0.787)) - (sin * 0.213)), ((0.715 - (cos * 0.715)) - (sin * 0.715)), ((0.072 - (cos * 0.072)) + (sin * 0.928))], [((0.213 - (cos * 0.213)) + (sin * 0.143)), ((0.715 + (cos * 0.285)) + (sin * 0.14)), ((0.072 - (cos * 0.072)) - (sin * 0.283))], [((0.213 - (cos * 0.213)) - (sin * 0.787)), ((0.715 - (cos * 0.715)) + (sin * 0.715)), ((0.072 + (cos * 0.928)) + (sin * 0.072))]] color[:(- 1)] = alg.dot(m, color[:(- 1)], dims=alg.D2_D1)
class CoercibleBytesTest(StringTest): def setUp(self): self.obj = CoercibleBytesTrait() _default_value = b'bytes' _good_values = [b'', b'10', b'-10', 10, [10], (10,), set([10]), {10: 'foo'}, True] _bad_values = ['', 'string', (- 10), 10.1, [b''], [b'bytes'], [(- 10)], ((- 10),), {(- 10): 'foo'}, set([(- 10)]), [256], (256,), {256: 'foo'}, set([256]), {b'ten': b'10'}, (b'',), None] def coerce(self, value): return bytes(value)
class MoveDifferential(MoveTank): def __init__(self, left_motor_port, right_motor_port, wheel_class, wheel_distance_mm, desc=None, motor_class=LargeMotor): MoveTank.__init__(self, left_motor_port, right_motor_port, desc, motor_class) self.wheel = wheel_class() self.wheel_distance_mm = wheel_distance_mm self.circumference_mm = (self.wheel_distance_mm * math.pi) self.min_circle_radius_mm = (self.wheel_distance_mm / 2) self.x_pos_mm = 0.0 self.y_pos_mm = 0.0 self.odometry_thread_run = False self.theta = 0.0 def on_for_distance(self, speed, distance_mm, brake=True, block=True): rotations = (distance_mm / self.wheel.circumference_mm) log.debug(('%s: on_for_rotations distance_mm %s, rotations %s, speed %s' % (self, distance_mm, rotations, speed))) MoveTank.on_for_rotations(self, speed, speed, rotations, brake, block) def _on_arc(self, speed, radius_mm, distance_mm, brake, block, arc_right): if (radius_mm < self.min_circle_radius_mm): raise ValueError('{}: radius_mm {} is less than min_circle_radius_mm {}'.format(self, radius_mm, self.min_circle_radius_mm)) circle_outer_mm = ((2 * math.pi) * (radius_mm + (self.wheel_distance_mm / 2))) circle_middle_mm = ((2 * math.pi) * radius_mm) circle_inner_mm = ((2 * math.pi) * (radius_mm - (self.wheel_distance_mm / 2))) if arc_right: left_speed = speed right_speed = (float((circle_inner_mm / circle_outer_mm)) * left_speed) else: right_speed = speed left_speed = (float((circle_inner_mm / circle_outer_mm)) * right_speed) log.debug(('%s: arc %s, radius %s, distance %s, left-speed %s, right-speed %s' % (self, ('right' if arc_right else 'left'), radius_mm, distance_mm, left_speed, right_speed))) log.debug(('%s: circle_outer_mm %s, circle_middle_mm %s, circle_inner_mm %s' % (self, circle_outer_mm, circle_middle_mm, circle_inner_mm))) circle_middle_percentage = float((distance_mm / circle_middle_mm)) circle_outer_final_mm = (circle_middle_percentage * circle_outer_mm) outer_wheel_rotations = float((circle_outer_final_mm / self.wheel.circumference_mm)) outer_wheel_degrees = (outer_wheel_rotations * 360) log.debug(('%s: arc %s, circle_middle_percentage %s, circle_outer_final_mm %s, ' % (self, ('right' if arc_right else 'left'), circle_middle_percentage, circle_outer_final_mm))) log.debug(('%s: outer_wheel_rotations %s, outer_wheel_degrees %s' % (self, outer_wheel_rotations, outer_wheel_degrees))) MoveTank.on_for_degrees(self, left_speed, right_speed, outer_wheel_degrees, brake, block) def on_arc_right(self, speed, radius_mm, distance_mm, brake=True, block=True): self._on_arc(speed, radius_mm, distance_mm, brake, block, True) def on_arc_left(self, speed, radius_mm, distance_mm, brake=True, block=True): self._on_arc(speed, radius_mm, distance_mm, brake, block, False) def turn_degrees(self, speed, degrees, brake=True, block=True, error_margin=2, use_gyro=False): def final_angle(init_angle, degrees): result = (init_angle - degrees) while (result <= (- 360)): result += 360 while (result >= 360): result -= 360 if (result < 0): result += 360 return result use_gyro = bool((use_gyro and block and brake)) if (use_gyro and (not self._gyro)): raise DeviceNotDefined("The 'gyro' variable must be defined with a GyroSensor. Example: tank.gyro = GyroSensor()") if use_gyro: angle_init_degrees = self._gyro.circle_angle() else: angle_init_degrees = math.degrees(self.theta) angle_target_degrees = final_angle(angle_init_degrees, degrees) log.info(('%s: turn_degrees() %d degrees from %s to %s' % (self, degrees, angle_init_degrees, angle_target_degrees))) distance_mm = ((abs(degrees) / 360) * self.circumference_mm) rotations = (distance_mm / self.wheel.circumference_mm) if (degrees > 0): MoveTank.on_for_rotations(self, speed, (speed * (- 1)), rotations, brake, block) else: MoveTank.on_for_rotations(self, (speed * (- 1)), speed, rotations, brake, block) if use_gyro: angle_current_degrees = self._gyro.circle_angle() if ((90 >= angle_target_degrees >= 0) and (270 <= angle_current_degrees <= 360)): degrees_error = ((angle_target_degrees + (360 - angle_current_degrees)) * (- 1)) elif ((360 >= angle_target_degrees >= 270) and (0 <= angle_current_degrees <= 90)): degrees_error = (angle_current_degrees + (360 - angle_target_degrees)) elif (angle_current_degrees > angle_target_degrees): degrees_error = (angle_current_degrees - angle_target_degrees) else: degrees_error = ((angle_target_degrees - angle_current_degrees) * (- 1)) log.info(('%s: turn_degrees() ended up at %s, error %s, error_margin %s' % (self, angle_current_degrees, degrees_error, error_margin))) if (abs(degrees_error) > error_margin): self.turn_degrees(speed, degrees_error, brake, block, error_margin, use_gyro) def turn_right(self, speed, degrees, brake=True, block=True, error_margin=2, use_gyro=False): self.turn_degrees(speed, abs(degrees), brake, block, error_margin, use_gyro) def turn_left(self, speed, degrees, brake=True, block=True, error_margin=2, use_gyro=False): self.turn_degrees(speed, (abs(degrees) * (- 1)), brake, block, error_margin, use_gyro) def turn_to_angle(self, speed, angle_target_degrees, brake=True, block=True, error_margin=2, use_gyro=False): if (not self.odometry_thread_run): raise ThreadNotRunning('odometry_start() must be called to track robot coordinates') while (angle_target_degrees < 0): angle_target_degrees += 360 angle_current_degrees = math.degrees(self.theta) while (angle_current_degrees < 0): angle_current_degrees += 360 if (angle_current_degrees > angle_target_degrees): turn_right = True angle_delta = (angle_current_degrees - angle_target_degrees) else: turn_right = False angle_delta = (angle_target_degrees - angle_current_degrees) if (angle_delta > 180): angle_delta = (360 - angle_delta) turn_right = (not turn_right) log.debug(('%s: turn_to_angle %s, current angle %s, delta %s, turn_right %s' % (self, angle_target_degrees, angle_current_degrees, angle_delta, turn_right))) self.odometry_coordinates_log() if turn_right: self.turn_degrees(speed, abs(angle_delta), brake, block, error_margin, use_gyro) else: self.turn_degrees(speed, (abs(angle_delta) * (- 1)), brake, block, error_margin, use_gyro) self.odometry_coordinates_log() def odometry_coordinates_log(self): log.debug(('%s: odometry angle %s at (%d, %d)' % (self, math.degrees(self.theta), self.x_pos_mm, self.y_pos_mm))) def odometry_start(self, theta_degrees_start=90.0, x_pos_start=0.0, y_pos_start=0.0, sleep_time=0.005): def _odometry_monitor(): left_previous = 0 right_previous = 0 self.theta = math.radians(theta_degrees_start) self.x_pos_mm = x_pos_start self.y_pos_mm = y_pos_start TWO_PI = (2 * math.pi) self.odometry_thread_run = True while self.odometry_thread_run: left_current = self.left_motor.position right_current = self.right_motor.position left_ticks = (left_current - left_previous) right_ticks = (right_current - right_previous) if ((not left_ticks) and (not right_ticks)): if sleep_time: time.sleep(sleep_time) continue left_previous = left_current right_previous = right_current left_rotations = float((left_ticks / self.left_motor.count_per_rot)) right_rotations = float((right_ticks / self.right_motor.count_per_rot)) left_mm = float((left_rotations * self.wheel.circumference_mm)) right_mm = float((right_rotations * self.wheel.circumference_mm)) mm = ((left_mm + right_mm) / 2.0) self.theta += ((right_mm - left_mm) / self.wheel_distance_mm) self.theta -= float((int((self.theta / TWO_PI)) * TWO_PI)) self.x_pos_mm += (mm * math.cos(self.theta)) self.y_pos_mm += (mm * math.sin(self.theta)) if sleep_time: time.sleep(sleep_time) _thread.start_new_thread(_odometry_monitor, ()) while (not self.odometry_thread_run): pass def odometry_stop(self): if self.odometry_thread_run: self.odometry_thread_run = False def on_to_coordinates(self, speed, x_target_mm, y_target_mm, brake=True, block=True): if (not self.odometry_thread_run): raise ThreadNotRunning('odometry_start() must be called to track robot coordinates') self.off(brake='hold') x_delta = (x_target_mm - self.x_pos_mm) y_delta = (y_target_mm - self.y_pos_mm) angle_target_radians = math.atan2(y_delta, x_delta) angle_target_degrees = math.degrees(angle_target_radians) self.turn_to_angle(speed, angle_target_degrees, brake=True, block=True) distance_mm = math.sqrt((pow((self.x_pos_mm - x_target_mm), 2) + pow((self.y_pos_mm - y_target_mm), 2))) self.on_for_distance(speed, distance_mm, brake, block)
def genome_scatter(cnarr, segments=None, variants=None, do_trend=False, y_min=None, y_max=None, title=None, segment_color=SEG_COLOR): if ((cnarr or segments) and variants): axgrid = pyplot.GridSpec(5, 1, hspace=0.85) axis = pyplot.subplot(axgrid[:3]) axis2 = pyplot.subplot(axgrid[3:], sharex=axis) axis2.tick_params(labelbottom=False) chrom_sizes = plots.chromosome_sizes((cnarr or segments)) axis2 = snv_on_genome(axis2, variants, chrom_sizes, segments, do_trend, segment_color) else: (_fig, axis) = pyplot.subplots() if (title is None): title = (cnarr or segments or variants).sample_id if (cnarr or segments): axis.set_title(title) axis = cnv_on_genome(axis, cnarr, segments, do_trend, y_min, y_max, segment_color) else: axis.set_title(f'Variant allele frequencies: {title}') chrom_sizes = collections.OrderedDict(((chrom, subarr['end'].max()) for (chrom, subarr) in variants.by_chromosome())) axis = snv_on_genome(axis, variants, chrom_sizes, segments, do_trend, segment_color) return axis.get_figure()
class PluginHandle(object): typefns = {'integer': int, 'float': float, 'bool': (lambda x: (x == 'true')), 'boolean': (lambda x: (x == 'true')), 'string': str, 'enumeration': str} def __init__(self, service_name, cbinfo=(None, None)): (self.ingest_ref, self.callback) = cbinfo self.logger = logger.setup(service_name) def config_update(self, udict): def snake_case(name): return re.sub("(['A-Z'])", '_\\1', name).lower() def get_typed_value(k): _t = udict[k]['type'] def ident_fn(x): return x _typefn = (ident_fn if (_t not in PluginHandle.typefns) else PluginHandle.typefns[_t]) return _typefn(udict[k]['value']) for k in udict.keys(): _v = get_typed_value(k) setattr(self, snake_case(k), _v) def rpc_setup(self, config, module_dir='', restart_rpc=False): _server_module = getattr(self, 'RPC_SERVER_NAME', None) if (_server_module is None): raise ValueError('RPC_SERVER_NAME class variable must be set to the name of the RPC server module') if ((self.rpc is None) or restart_rpc): self.rpc = iprpc.IPCModuleClient(_server_module, module_dir) self.rpc.plugin_init(self._rpc_config()) def _rpc_config(self): _params = getattr(self, 'RPC_CONFIG_MEMBERS', []) return {k: getattr(self, k) for k in _params} def shutdown(self): if (self.rpc is not None): self.rpc.plugin_shutdown()
_in_both(js=False) def test_mutate_array2(): try: import numpy as np except ImportError: skip('No numpy') a = np.arange(12) print(list(a.flat)) mutate_array(a, dict(mutation='replace', index=3, objects=np.zeros((4,)))) print(list(a.flat)) a = np.arange(12) a.shape = (3, 4) mutate_array(a, dict(mutation='replace', index=(1, 2), objects=np.zeros((2, 2)))) print(list(a.flat))
def test_auto_load_and_overriding(yaml_config_file): class ContainerWithConfig(containers.DeclarativeContainer): config = providers.Configuration(yaml_files=[yaml_config_file]) container = ContainerWithConfig(config={'section1': {'value1': 'overridden'}}) assert (container.config.section1.value1() == 'overridden')
def read_enum(decoder, writer_schema, named_schemas, reader_schema=None, options={}): symbol = writer_schema['symbols'][decoder.read_enum()] if (reader_schema and (symbol not in reader_schema['symbols'])): default = reader_schema.get('default') if default: return default else: symlist = reader_schema['symbols'] msg = f"{symbol} not found in reader symbol list {reader_schema['name']}, known symbols: {symlist}" raise SchemaResolutionError(msg) return symbol
def _calculate_expected_base_fee_per_gas(parent_block) -> int: parent_base_fee_per_gas = parent_block['base_fee_per_gas'] parent_gas_target = (parent_block['gas_limit'] // BLOCK_ELASTICITY_MULTIPLIER) parent_gas_used = parent_block['gas_used'] if (parent_gas_used == parent_gas_target): return parent_base_fee_per_gas elif (parent_gas_used > parent_gas_target): gas_used_delta = (parent_gas_used - parent_gas_target) overburnt_wei = (parent_base_fee_per_gas * gas_used_delta) base_fee_per_gas_delta = max(((overburnt_wei // parent_gas_target) // BASE_FEE_MAX_CHANGE_DENOMINATOR), 1) return (parent_base_fee_per_gas + base_fee_per_gas_delta) else: gas_used_delta = (parent_gas_target - parent_gas_used) underburnt_wei = (parent_base_fee_per_gas * gas_used_delta) base_fee_per_gas_delta = ((underburnt_wei // parent_gas_target) // BASE_FEE_MAX_CHANGE_DENOMINATOR) return max((parent_base_fee_per_gas - base_fee_per_gas_delta), 0)
class OptionPlotoptionsArearangeSonificationDefaultinstrumentoptionsPointgrouping(Options): def algorithm(self): return self._config_get('minmax') def algorithm(self, text: str): self._config(text, js_type=False) def enabled(self): return self._config_get(True) def enabled(self, flag: bool): self._config(flag, js_type=False) def groupTimespan(self): return self._config_get(15) def groupTimespan(self, num: float): self._config(num, js_type=False) def prop(self): return self._config_get('y') def prop(self, text: str): self._config(text, js_type=False)