code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def pty_wrapper_main(): sys.path.insert(0, os.path.dirname(__file__)) import _pty _pty.spawn(sys.argv[1:])
Main function of the pty wrapper script
def list_entitlements_options(f): @common_entitlements_options @decorators.common_cli_config_options @decorators.common_cli_list_options @decorators.common_cli_output_options @decorators.common_api_auth_options @decorators.initialise_api @click.argument( "owner_repo", metavar="OWNER/REPO", callback=validators.validate_owner_repo ) @click.pass_context @functools.wraps(f) def wrapper(ctx, *args, **kwargs): return ctx.invoke(f, *args, **kwargs) return wrapper
Options for list entitlements subcommand.
def update_settings(self, kwargs_model={}, kwargs_constraints={}, kwargs_likelihood={}, lens_add_fixed=[], source_add_fixed=[], lens_light_add_fixed=[], ps_add_fixed=[], cosmo_add_fixed=[], lens_remove_fixed=[], source_remove_fixed=[], lens_light_remove_fixed=[], ps_remove_fixed=[], cosmo_remove_fixed=[], change_source_lower_limit=None, change_source_upper_limit=None): self._updateManager.update_options(kwargs_model, kwargs_constraints, kwargs_likelihood) self._updateManager.update_fixed(self._lens_temp, self._source_temp, self._lens_light_temp, self._ps_temp, self._cosmo_temp, lens_add_fixed, source_add_fixed, lens_light_add_fixed, ps_add_fixed, cosmo_add_fixed, lens_remove_fixed, source_remove_fixed, lens_light_remove_fixed, ps_remove_fixed, cosmo_remove_fixed) self._updateManager.update_limits(change_source_lower_limit, change_source_upper_limit) return 0
updates lenstronomy settings "on the fly" :param kwargs_model: kwargs, specified keyword arguments overwrite the existing ones :param kwargs_constraints: kwargs, specified keyword arguments overwrite the existing ones :param kwargs_likelihood: kwargs, specified keyword arguments overwrite the existing ones :param lens_add_fixed: [[i_model, ['param1', 'param2',...], [...]] :param source_add_fixed: [[i_model, ['param1', 'param2',...], [...]] :param lens_light_add_fixed: [[i_model, ['param1', 'param2',...], [...]] :param ps_add_fixed: [[i_model, ['param1', 'param2',...], [...]] :param cosmo_add_fixed: ['param1', 'param2',...] :param lens_remove_fixed: [[i_model, ['param1', 'param2',...], [...]] :param source_remove_fixed: [[i_model, ['param1', 'param2',...], [...]] :param lens_light_remove_fixed: [[i_model, ['param1', 'param2',...], [...]] :param ps_remove_fixed: [[i_model, ['param1', 'param2',...], [...]] :param cosmo_remove_fixed: ['param1', 'param2',...] :return: 0, the settings are overwritten for the next fitting step to come
def almutem(sign, lon): planets = const.LIST_SEVEN_PLANETS res = [None, 0] for ID in planets: sc = score(ID, sign, lon) if sc > res[1]: res = [ID, sc] return res[0]
Returns the almutem for a given sign and longitude.
async def become(self, layer_type: Type[L], request: 'Request'): if layer_type != RawText: super(Text, self).become(layer_type, request) return RawText(await render(self.text, request))
Transforms the translatable string into an actual string and put it inside a RawText.
def startAlertListener(self, callback=None): notifier = AlertListener(self, callback) notifier.start() return notifier
Creates a websocket connection to the Plex Server to optionally recieve notifications. These often include messages from Plex about media scans as well as updates to currently running Transcode Sessions. NOTE: You need websocket-client installed in order to use this feature. >> pip install websocket-client Parameters: callback (func): Callback function to call on recieved messages. raises: :class:`plexapi.exception.Unsupported`: Websocket-client not installed.
def _isValidQuery(self, query, mode="phonefy"): try: validator = self.modes[mode].get("query_validator") if validator: try: compiledRegexp = re.compile( "^{expr}$".format( expr=validator ) ) return compiledRegexp.match(query) except AttributeError as e: return True except AttributeError as e: compiledRegexp = re.compile("^{r}$".format(r=self.validQuery[mode])) return compiledRegexp.match(query)
Method to verify if a given query is processable by the platform. The system looks for the forbidden characters in self.Forbidden list. Args: ----- query: The query to be launched. mode: To be chosen amongst mailfy, phonefy, usufy, searchfy. Return: ------- True | False
def import_pipeline(url, pipeline_id, auth, json_payload, verify_ssl, overwrite = False): parameters = { 'overwrite' : overwrite } import_result = requests.post(url + '/' + pipeline_id + '/import', params=parameters, headers=X_REQ_BY, auth=auth, verify=verify_ssl, json=json_payload) if import_result.status_code != 200: logging.error('Import error response: ' + import_result.text) import_result.raise_for_status() logging.info('Pipeline import successful.') return import_result.json()
Import a pipeline. This will completely overwrite the existing pipeline. Args: url (str): the host url in the form 'http://host:port/'. pipeline_id (str): the ID of of the exported pipeline. auth (tuple): a tuple of username, and password. json_payload (dict): the exported json payload as a dictionary. overwrite (bool): overwrite existing pipeline verify_ssl (bool): whether to verify ssl certificates Returns: dict: the response json
def _init_nxapi(opts): proxy_dict = opts.get('proxy', {}) conn_args = copy.deepcopy(proxy_dict) conn_args.pop('proxytype', None) opts['multiprocessing'] = conn_args.pop('multiprocessing', True) try: rpc_reply = __utils__['nxos.nxapi_request']('show clock', **conn_args) DEVICE_DETAILS['conn_args'] = conn_args DEVICE_DETAILS['initialized'] = True DEVICE_DETAILS['up'] = True DEVICE_DETAILS['no_save_config'] = opts['proxy'].get('no_save_config', False) except Exception as ex: log.error('Unable to connect to %s', conn_args['host']) log.error('Please check the following:\n') log.error('-- Verify that "feature nxapi" is enabled on your NX-OS device: %s', conn_args['host']) log.error('-- Verify that nxapi settings on the NX-OS device and proxy minion config file match') log.error('-- Exception Generated: %s', ex) exit() log.info('nxapi DEVICE_DETAILS info: %s', DEVICE_DETAILS) return True
Open a connection to the NX-OS switch over NX-API. As the communication is HTTP(S) based, there is no connection to maintain, however, in order to test the connectivity and make sure we are able to bring up this Minion, we are executing a very simple command (``show clock``) which doesn't come with much overhead and it's sufficient to confirm we are indeed able to connect to the NX-API endpoint as configured.
async def start_all_linking(self, linkcode, group, address=None): _LOGGING.info('Starting the All-Linking process') if address: linkdevice = self.plm.devices[Address(address).id] if not linkdevice: linkdevice = create(self.plm, address, None, None) _LOGGING.info('Attempting to link the PLM to device %s. ', address) self.plm.start_all_linking(linkcode, group) asyncio.sleep(.5, loop=self.loop) linkdevice.enter_linking_mode(group=group) else: _LOGGING.info('Starting All-Linking on PLM. ' 'Waiting for button press') self.plm.start_all_linking(linkcode, group) await asyncio.sleep(self.wait_time, loop=self.loop) _LOGGING.info('%d devices added to the All-Link Database', len(self.plm.devices)) await asyncio.sleep(.1, loop=self.loop)
Start the All-Linking process with the IM and device.
def decorator(func): r def wrapper(__decorated__=None, *Args, **KwArgs): if __decorated__ is None: return lambda _func: func(_func, *Args, **KwArgs) else: return func(__decorated__, *Args, **KwArgs) return wrap(wrapper, func)
r"""Makes the passed decorators to support optional args.
def list(self, request, *args, **kwargs): return super(ServicesViewSet, self).list(request, *args, **kwargs)
Filter services by type ^^^^^^^^^^^^^^^^^^^^^^^ It is possible to filter services by their types. Example: /api/services/?service_type=DigitalOcean&service_type=OpenStack
def execute(func, handler, args, kwargs): tracing = handler.settings.get('opentracing_tracing') with tracer_stack_context(): if tracing._trace_all: attrs = handler.settings.get('opentracing_traced_attributes', []) tracing._apply_tracing(handler, attrs) return func(*args, **kwargs)
Wrap the handler ``_execute`` method to trace incoming requests, extracting the context from the headers, if available.
def noEmptyNests(node): if type(node)==list: for i in node: noEmptyNests(i) if type(node)==dict: for i in node.values(): noEmptyNests(i) if node["children"] == []: node.pop("children") return node
recursively make sure that no dictionaries inside node contain empty children lists
def as_string(self, chars, show_leaf=True, current_linkable=False, class_current="active_link"): return self.__do_menu("as_string", show_leaf, current_linkable, class_current, chars)
It returns breadcrumb as string
def get_create_index_sql(self, index, table): if isinstance(table, Table): table = table.get_quoted_name(self) name = index.get_quoted_name(self) columns = index.get_quoted_columns(self) if not columns: raise DBALException('Incomplete definition. "columns" required.') if index.is_primary(): return self.get_create_primary_key_sql(index, table) query = "CREATE %sINDEX %s ON %s" % ( self.get_create_index_sql_flags(index), name, table, ) query += " (%s)%s" % ( self.get_index_field_declaration_list_sql(columns), self.get_partial_index_sql(index), ) return query
Returns the SQL to create an index on a table on this platform. :param index: The index :type index: Index :param table: The table :type table: Table or str :rtype: str
def get_object_name(obj): name_dispatch = { ast.Name: "id", ast.Attribute: "attr", ast.Call: "func", ast.FunctionDef: "name", ast.ClassDef: "name", ast.Subscript: "value", } if hasattr(ast, "arg"): name_dispatch[ast.arg] = "arg" while not isinstance(obj, str): assert type(obj) in name_dispatch obj = getattr(obj, name_dispatch[type(obj)]) return obj
Return the name of a given object
def get_curricula_by_term(term, view_unpublished=False): view_unpublished = "true" if view_unpublished else "false" url = "{}?{}".format( curriculum_search_url_prefix, urlencode([ ("quarter", term.quarter.lower(),), ("year", term.year,), ("view_unpublished", view_unpublished,)])) return _json_to_curricula(get_resource(url))
Returns a list of restclients.Curriculum models, for the passed Term model.
def add_adjust(self, data, prehashed=False): subtrees = self._get_whole_subtrees() new_node = Node(data, prehashed=prehashed) self.leaves.append(new_node) for node in reversed(subtrees): new_parent = Node(node.val + new_node.val) node.p, new_node.p = new_parent, new_parent new_parent.l, new_parent.r = node, new_node node.sib, new_node.sib = new_node, node node.side, new_node.side = 'L', 'R' new_node = new_node.p self.root = new_node
Add a new leaf, and adjust the tree, without rebuilding the whole thing.
def get_application_parser(commands): parser = argparse.ArgumentParser( description=configuration.APPLICATION_DESCRIPTION, usage =configuration.EXECUTABLE_NAME + ' [sub-command] [options]', add_help=False) parser.add_argument( 'sub_command', choices=[name for name in commands], nargs="?") parser.add_argument("-h", "--help", action="store_true") return parser
Builds an argument parser for the application's CLI. :param commands: :return: ArgumentParser
def create_time_subscription(self, instance, on_data=None, timeout=60): manager = WebSocketSubscriptionManager(self, resource='time') subscription = TimeSubscription(manager) wrapped_callback = functools.partial( _wrap_callback_parse_time_info, subscription, on_data) manager.open(wrapped_callback, instance) subscription.reply(timeout=timeout) return subscription
Create a new subscription for receiving time updates of an instance. Time updates are emitted at 1Hz. This method returns a future, then returns immediately. Stop the subscription by canceling the future. :param str instance: A Yamcs instance name :param on_data: Function that gets called with :class:`~datetime.datetime` updates. :type on_data: Optional[Callable[~datetime.datetime]) :param timeout: The amount of seconds to wait for the request to complete. :type timeout: Optional[float] :return: Future that can be used to manage the background websocket subscription. :rtype: .TimeSubscription
def create_client_with_lazy_load(api_key, cache_time_to_live_seconds=60, config_cache_class=None, base_url=None): if api_key is None: raise ConfigCatClientException('API Key is required.') if cache_time_to_live_seconds < 1: cache_time_to_live_seconds = 1 return ConfigCatClient(api_key, 0, 0, None, cache_time_to_live_seconds, config_cache_class, base_url)
Create an instance of ConfigCatClient and setup Lazy Load mode with custom options :param api_key: ConfigCat ApiKey to access your configuration. :param cache_time_to_live_seconds: The cache TTL. :param config_cache_class: If you want to use custom caching instead of the client's default InMemoryConfigCache, You can provide an implementation of ConfigCache. :param base_url: You can set a base_url if you want to use a proxy server between your application and ConfigCat
def set_defaults(config, cluster_name): config['postgresql'].setdefault('name', cluster_name) config['postgresql'].setdefault('scope', cluster_name) config['postgresql'].setdefault('listen', '127.0.0.1') config['postgresql']['authentication'] = {'replication': None} config['restapi']['listen'] = ':' in config['restapi']['listen'] and config['restapi']['listen'] or '127.0.0.1:8008'
fill-in some basic configuration parameters if config file is not set
def get_function_for_cognito_trigger(self, trigger): print("get_function_for_cognito_trigger", self.settings.COGNITO_TRIGGER_MAPPING, trigger, self.settings.COGNITO_TRIGGER_MAPPING.get(trigger)) return self.settings.COGNITO_TRIGGER_MAPPING.get(trigger)
Get the associated function to execute for a cognito trigger
def save_params(model_name: str): with open(model_name + '.params', 'w') as f: json.dump(pr.__dict__, f)
Save current global listener params to a file
def remove_accounts_from_institute(accounts_query, institute): query = accounts_query.filter(date_deleted__isnull=True) for account in query: remove_account_from_institute(account, institute)
Remove accounts from institute.
def clear_data(self): self.__header.title = None self.__header.subtitle = None self.__prologue.text = None self.__epilogue.text = None self.__items_section.items = None
Clear menu data from previous menu generation.
def transformFromNative(self): if self.isNative and self.behavior and self.behavior.hasNative: try: return self.behavior.transformFromNative(self) except Exception as e: lineNumber = getattr(self, 'lineNumber', None) if isinstance(e, NativeError): if lineNumber is not None: e.lineNumber = lineNumber raise else: msg = "In transformFromNative, unhandled exception on line %s %s: %s" msg = msg % (lineNumber, sys.exc_info()[0], sys.exc_info()[1]) raise NativeError(msg, lineNumber) else: return self
Return self transformed into a ContentLine or Component if needed. May have side effects. If it does, transformFromNative and transformToNative MUST have perfectly inverse side effects. Allowing such side effects is convenient for objects whose transformations only change a few attributes. Note that it isn't always possible for transformFromNative to be a perfect inverse of transformToNative, in such cases transformFromNative should return a new object, not self after modifications.
def move_roles_to_base_role_config_group(resource_root, service_name, role_names, cluster_name="default"): return call(resource_root.put, _get_role_config_groups_path(cluster_name, service_name) + '/roles', ApiRole, True, data=role_names, api_version=3)
Moves roles to the base role config group. The roles can be moved from any role config group belonging to the same service. The role type of the roles may vary. Each role will be moved to its corresponding base group depending on its role type. @param role_names: The names of the roles to move. @return: List of roles which have been moved successfully. @since: API v3
def _collapse_subitems(base, items): out = [] for d in items: newd = _diff_dict(base, d) out.append(newd) return out
Collapse full data representations relative to a standard base.
def passwordReset1to2(old): new = old.upgradeVersion(old.typeName, 1, 2, installedOn=None) for iface in new.store.interfacesFor(new): new.store.powerDown(new, iface) new.deleteFromStore()
Power down and delete the item
def _emit_message(cls, message): sys.stdout.write(message) sys.stdout.flush()
Print a message to STDOUT.
def save_source(driver, name): source = driver.page_source file_name = os.path.join(os.environ.get('SAVED_SOURCE_DIR'), '{name}.html'.format(name=name)) try: with open(file_name, 'wb') as output_file: output_file.write(source.encode('utf-8')) except Exception: msg = u"Could not save the browser page source to {}.".format(file_name) LOGGER.warning(msg)
Save the rendered HTML of the browser. The location of the source can be configured by the environment variable `SAVED_SOURCE_DIR`. If not set, this defaults to the current working directory. Args: driver (selenium.webdriver): The Selenium-controlled browser. name (str): A name to use in the output file name. Note that ".html" is appended automatically Returns: None
def _get_acquisition(self, model, space): from copy import deepcopy acqOpt_config = deepcopy(self.config['acquisition']['optimizer']) acqOpt_name = acqOpt_config['name'] del acqOpt_config['name'] from ..optimization import AcquisitionOptimizer acqOpt = AcquisitionOptimizer(space, acqOpt_name, **acqOpt_config) from ..acquisitions import select_acquisition return select_acquisition(self.config['acquisition']['type']).fromConfig(model, space, acqOpt, None, self.config['acquisition'])
Imports the acquisition
def heatmap(z, x=None, y=None, colorscale='Viridis'): z = np.atleast_1d(z) data = [go.Heatmap(z=z, x=x, y=y, colorscale=colorscale)] return Chart(data=data)
Create a heatmap. Parameters ---------- z : TODO x : TODO, optional y : TODO, optional colorscale : TODO, optional Returns ------- Chart
def _loadable_get_(name, self): "Used to lazily-evaluate & memoize an attribute." func = getattr(self._attr_func_, name) ret = func() setattr(self._attr_data_, name, ret) setattr( type(self), name, property( functools.partial(self._simple_get_, name) ) ) delattr(self._attr_func_, name) return ret
Used to lazily-evaluate & memoize an attribute.
def do_PUT(self): self.do_initial_operations() payload = self.coap_uri.get_payload() if payload is None: logger.error("BAD PUT REQUEST") self.send_error(BAD_REQUEST) return logger.debug(payload) coap_response = self.client.put(self.coap_uri.path, payload) self.client.stop() logger.debug("Server response: %s", coap_response.pretty_print()) self.set_http_response(coap_response)
Perform a PUT request
def compute_taxes(self, precision=None): base = self.gross - self.total_discounts return quantize(sum([t.compute(base, precision) for t in self.__taxes]), precision)
Returns the total amount of taxes for this line with a specific number of decimals @param precision: int Number of decimal places @return: Decimal
def change(img): if not os.path.isfile(img): return desktop = get_desktop_env() if OS == "Darwin": set_mac_wallpaper(img) elif OS == "Windows": set_win_wallpaper(img) else: set_desktop_wallpaper(desktop, img) logging.info("Set the new wallpaper.")
Set the wallpaper.
def codes_match_any(self, codes): for selector in self.code_selectors: if selector.code in codes: return True return False
Match any code.
def hash_pair(first: Keccak256, second: Optional[Keccak256]) -> Keccak256: assert first is not None if second is None: return first if first > second: return sha3(second + first) return sha3(first + second)
Computes the keccak hash of the elements ordered topologically. Since a merkle proof will not include all the elements, but only the path starting from the leaves up to the root, the order of the elements is not known by the proof checker. The topological order is used as a deterministic way of ordering the elements making sure the smart contract verification and the python code are compatible.
def handle(cls, value, **kwargs): value = read_value_from_path(value) region = None if "@" in value: region, value = value.split("@", 1) kms = get_session(region).client('kms') value = value.encode('utf-8') decoded = codecs.decode(value, 'base64') return kms.decrypt(CiphertextBlob=decoded)["Plaintext"]
Decrypt the specified value with a master key in KMS. kmssimple field types should be in the following format: [<region>@]<base64 encrypted value> Note: The region is optional, and defaults to the environment's `AWS_DEFAULT_REGION` if not specified. For example: # We use the aws cli to get the encrypted value for the string # "PASSWORD" using the master key called "myStackerKey" in # us-east-1 $ aws --region us-east-1 kms encrypt --key-id alias/myStackerKey \ --plaintext "PASSWORD" --output text --query CiphertextBlob CiD6bC8t2Y<...encrypted blob...> # In stacker we would reference the encrypted value like: conf_key: ${kms us-east-1@CiD6bC8t2Y<...encrypted blob...>} You can optionally store the encrypted value in a file, ie: kms_value.txt us-east-1@CiD6bC8t2Y<...encrypted blob...> and reference it within stacker (NOTE: the path should be relative to the stacker config file): conf_key: ${kms file://kms_value.txt} # Both of the above would resolve to conf_key: PASSWORD
def load_config(data, *models, **kwargs): if isinstance(models, tuple) and isinstance(models[0], list): models = models[0] config = get_config(data, *models, **kwargs) test = kwargs.pop('test', False) debug = kwargs.pop('debug', False) commit = kwargs.pop('commit', True) replace = kwargs.pop('replace', False) return __salt__['net.load_config'](text=config, test=test, debug=debug, commit=commit, replace=replace, inherit_napalm_device=napalm_device)
Generate and load the config on the device using the OpenConfig or IETF models and device profiles. data Dictionary structured with respect to the models referenced. models A list of models to be used when generating the config. profiles: ``None`` Use certain profiles to generate the config. If not specified, will use the platform default profile(s). test: ``False`` Dry run? If set as ``True``, will apply the config, discard and return the changes. Default: ``False`` and will commit the changes on the device. commit: ``True`` Commit? Default: ``True``. debug: ``False`` Debug mode. Will insert a new key under the output dictionary, as ``loaded_config`` containing the raw configuration loaded on the device. replace: ``False`` Should replace the config with the new generate one? CLI Example: .. code-block:: bash salt '*' napalm_yang.load_config {} models.openconfig_interfaces test=True debug=True Output Example: .. code-block:: jinja device1: ---------- already_configured: False comment: diff: [edit interfaces ge-0/0/0] - mtu 1400; [edit interfaces ge-0/0/0 unit 0 family inet] - dhcp; [edit interfaces lo0] - unit 0 { - description lo0.0; - } + unit 1 { + description "new loopback"; + } loaded_config: <configuration> <interfaces replace="replace"> <interface> <name>ge-0/0/0</name> <unit> <name>0</name> <family> <inet/> </family> <description>ge-0/0/0.0</description> </unit> <description>management interface</description> </interface> <interface> <name>ge-0/0/1</name> <disable/> <description>ge-0/0/1</description> </interface> <interface> <name>ae0</name> <unit> <name>0</name> <vlan-id>100</vlan-id> <family> <inet> <address> <name>192.168.100.1/24</name> </address> <address> <name>172.20.100.1/24</name> </address> </inet> </family> <description>a description</description> </unit> <vlan-tagging/> <unit> <name>1</name> <vlan-id>1</vlan-id> <family> <inet> <address> <name>192.168.101.1/24</name> </address> </inet> </family> <disable/> <description>ae0.1</description> </unit> <vlan-tagging/> <unit> <name>2</name> <vlan-id>2</vlan-id> <family> <inet> <address> <name>192.168.102.1/24</name> </address> </inet> </family> <description>ae0.2</description> </unit> <vlan-tagging/> </interface> <interface> <name>lo0</name> <unit> <name>1</name> <description>new loopback</description> </unit> <description>lo0</description> </interface> </interfaces> </configuration> result: True
def retrieve_loadbalancer_status(self, loadbalancer, **_params): return self.get(self.lbaas_loadbalancer_path_status % (loadbalancer), params=_params)
Retrieves status for a certain load balancer.
def setCachedDataKey(engineVersionHash, key, value): cacheFile = CachedDataManager._cacheFileForHash(engineVersionHash) return JsonDataManager(cacheFile).setKey(key, value)
Sets the cached data value for the specified engine version hash and dictionary key
def pole_from_endpoints(coord1, coord2): c1 = coord1.cartesian / coord1.cartesian.norm() coord2 = coord2.transform_to(coord1.frame) c2 = coord2.cartesian / coord2.cartesian.norm() pole = c1.cross(c2) pole = pole / pole.norm() return coord1.frame.realize_frame(pole)
Compute the pole from a great circle that connects the two specified coordinates. This assumes a right-handed rule from coord1 to coord2: the pole is the north pole under that assumption. Parameters ---------- coord1 : `~astropy.coordinates.SkyCoord` Coordinate of one point on a great circle. coord2 : `~astropy.coordinates.SkyCoord` Coordinate of the other point on a great circle. Returns ------- pole : `~astropy.coordinates.SkyCoord` The coordinates of the pole.
def write_byte(self, address, value): LOGGER.debug("Writing byte %s to device %s!", bin(value), hex(address)) return self.driver.write_byte(address, value)
Writes the byte to unaddressed register in a device.
def getControllerStateWithPose(self, eOrigin, unControllerDeviceIndex, unControllerStateSize=sizeof(VRControllerState_t)): fn = self.function_table.getControllerStateWithPose pControllerState = VRControllerState_t() pTrackedDevicePose = TrackedDevicePose_t() result = fn(eOrigin, unControllerDeviceIndex, byref(pControllerState), unControllerStateSize, byref(pTrackedDevicePose)) return result, pControllerState, pTrackedDevicePose
fills the supplied struct with the current state of the controller and the provided pose with the pose of the controller when the controller state was updated most recently. Use this form if you need a precise controller pose as input to your application when the user presses or releases a button. This function is deprecated in favor of the new IVRInput system.
def reinit_index(index=INDEX_NAME): es_conn.indices.delete(index, ignore=404) try: es_conn.indices.create(index, INDEX_SETTINGS.get(index, None)) except TransportError as e: raise Exception('Failed to created index, got: {}'.format(e.error))
Delete and then initialise the given index name Gets settings if they exist in the mappings module. https://elasticsearch-py.readthedocs.io/en/master/api.html#elasticsearch.client.IndicesClient.create https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html https://elasticsearch-py.readthedocs.io/en/master/api.html#elasticsearch.client.IndicesClient.delete https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-delete-index.html
def from_dict(cls, d): conf = {} for k in d["config"]: v = d["config"][k] if isinstance(v, dict): conf[str(k)] = get_dict_handler(d["config"]["type"])(v) else: conf[str(k)] = v return get_class(str(d["class"]))(config=conf)
Restores its state from a dictionary, used in de-JSONification. :param d: the object dictionary :type d: dict
def conforms(self, json: str, name: str = "", verbose: bool=False) -> ValidationResult: json = self._to_string(json) if not self.is_json(json) else json try: self.json_obj = loads(json, self.module) except ValueError as v: return ValidationResult(False, str(v), name, None) logfile = StringIO() logger = Logger(cast(TextIO, logfile)) if not is_valid(self.json_obj, logger): return ValidationResult(False, logfile.getvalue().strip('\n'), name, None) return ValidationResult(True, "", name, type(self.json_obj).__name__)
Determine whether json conforms with the JSG specification :param json: JSON string, URI to JSON or file name with JSON :param name: Test name for ValidationResult -- printed in dx if present :param verbose: True means print the response :return: pass/fail + fail reason
def get_postalcodes_around_radius(self, pc, radius): postalcodes = self.get(pc) if postalcodes is None: raise PostalCodeNotFoundException("Could not find postal code you're searching for.") else: pc = postalcodes[0] radius = float(radius) earth_radius = 6371 dlat = radius / earth_radius dlon = asin(sin(dlat) / cos(radians(pc.latitude))) lat_delta = degrees(dlat) lon_delta = degrees(dlon) if lat_delta < 0: lat_range = (pc.latitude + lat_delta, pc.latitude - lat_delta) else: lat_range = (pc.latitude - lat_delta, pc.latitude + lat_delta) long_range = (pc.longitude - lat_delta, pc.longitude + lon_delta) return format_result(self.conn_manager.query(PC_RANGE_QUERY % ( long_range[0], long_range[1], lat_range[0], lat_range[1] )))
Bounding box calculations updated from pyzipcode
def find_faces(self, image, draw_box=False): frame_gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) faces = self.cascade.detectMultiScale( frame_gray, scaleFactor=1.3, minNeighbors=5, minSize=(50, 50), flags=0) if draw_box: for x, y, w, h in faces: cv2.rectangle(image, (x, y), (x + w, y + h), (0, 255, 0), 2) return faces
Uses a haarcascade to detect faces inside an image. Args: image: The image. draw_box: If True, the image will be marked with a rectangle. Return: The faces as returned by OpenCV's detectMultiScale method for cascades.
def on_apply(self, event): for label in self.setting_map.keys(): setting = self.setting_map[label] ctrl = self.controls[label] value = ctrl.GetValue() if str(value) != str(setting.value): oldvalue = setting.value if not setting.set(value): print("Invalid value %s for %s" % (value, setting.name)) continue if str(oldvalue) != str(setting.value): self.parent_pipe.send(setting)
called on apply
def write(editor, location, force=False): if location and not force and os.path.exists(location): editor.show_message('File exists (add ! to overriwe)') else: eb = editor.window_arrangement.active_editor_buffer if location is None and eb.location is None: editor.show_message(_NO_FILE_NAME) else: eb.write(location)
Write file.
def get_safe_type(self): product_type = self.product_id.split('_')[1] if product_type.startswith('MSI'): return EsaSafeType.COMPACT_TYPE if product_type in ['OPER', 'USER']: return EsaSafeType.OLD_TYPE raise ValueError('Unrecognized product type of product id {}'.format(self.product_id))
Determines the type of ESA product. In 2016 ESA changed structure and naming of data. Therefore the class must distinguish between old product type and compact (new) product type. :return: type of ESA product :rtype: constants.EsaSafeType :raises: ValueError
def check_declared(self, node): for ident in node.undeclared_identifiers(): if ident != 'context' and\ ident not in self.declared.union(self.locally_declared): self.undeclared.add(ident) for ident in node.declared_identifiers(): self.locally_declared.add(ident)
update the state of this Identifiers with the undeclared and declared identifiers of the given node.
def press_keycode(self, keycode, metastate=None): driver = self._current_application() driver.press_keycode(keycode, metastate)
Sends a press of keycode to the device. Android only. Possible keycodes & meta states can be found in http://developer.android.com/reference/android/view/KeyEvent.html Meta state describe the pressed state of key modifiers such as Shift, Ctrl & Alt keys. The Meta State is an integer in which each bit set to 1 represents a pressed meta key. For example - META_SHIFT_ON = 1 - META_ALT_ON = 2 | metastate=1 --> Shift is pressed | metastate=2 --> Alt is pressed | metastate=3 --> Shift+Alt is pressed - _keycode- - the keycode to be sent to the device - _metastate- - status of the meta keys
def close(self): with self.stop_lock: self.stopped = True return ioloop_util.submit(self._flush, io_loop=self.io_loop)
Ensure that all spans from the queue are submitted. Returns Future that will be completed once the queue is empty.
def in_navitem(self, resources, nav_href): if nav_href.endswith('/index'): nav_href = nav_href[:-6] return self.docname.startswith(nav_href)
Given href of nav item, determine if resource is in it
def disallow(self): value = self._schema.get("disallow", None) if value is None: return if not isinstance(value, (basestring, dict, list)): raise SchemaError( "disallow value {0!r} is not a simple type name, nested " "schema nor a list of those".format(value)) if isinstance(value, list): disallow_list = value else: disallow_list = [value] seen = set() for js_disallow in disallow_list: if isinstance(js_disallow, dict): pass else: if js_disallow in seen: raise SchemaError( "disallow value {0!r} contains duplicate element" " {1!r}".format(value, js_disallow)) else: seen.add(js_disallow) if js_disallow not in ( "string", "number", "integer", "boolean", "object", "array", "null", "any"): raise SchemaError( "disallow value {0!r} is not a simple type" " name".format(js_disallow)) return disallow_list
Description of disallowed objects. Disallow must be a type name, a nested schema or a list of those. Type name must be one of ``string``, ``number``, ``integer``, ``boolean``, ``object``, ``array``, ``null`` or ``any``.
def merged(cls, *flatterms: 'FlatTerm') -> 'FlatTerm': return cls(cls._combined_wildcards_iter(sum(flatterms, cls.empty())))
Concatenate the given flatterms to a single flatterm. Args: *flatterms: The flatterms which are concatenated. Returns: The concatenated flatterms.
def to_ufo_background_image(self, ufo_glyph, layer): image = layer.backgroundImage if image is None: return ufo_image = ufo_glyph.image ufo_image.fileName = image.path ufo_image.transformation = image.transform ufo_glyph.lib[CROP_KEY] = list(image.crop) ufo_glyph.lib[LOCKED_KEY] = image.locked ufo_glyph.lib[ALPHA_KEY] = image.alpha
Copy the backgound image from the GSLayer to the UFO Glyph.
def unicode_wrapper(self, property, default=ugettext('Untitled')): try: value = getattr(self, property) except ValueError: logger.warn( u'ValueError rendering unicode for %s object.', self._meta.object_name ) value = None if not value: value = default return value
Wrapper to allow for easy unicode representation of an object by the specified property. If this wrapper is not able to find the right translation of the specified property, it will return the default value instead. Example:: def __unicode__(self): return unicode_wrapper('name', default='Unnamed')
def from_class_name(name, theme_element): msg = "No such themeable element {}".format(name) try: klass = themeable._registry[name] except KeyError: raise PlotnineError(msg) if not issubclass(klass, themeable): raise PlotnineError(msg) return klass(theme_element)
Create an themeable by name Parameters ---------- name : str Class name theme_element : element object One of :class:`element_line`, :class:`element_rect`, :class:`element_text` or :class:`element_blank` Returns ------- out : Themeable
def min_heapify(arr, start, simulation, iteration): end = len(arr) - 1 last_parent = (end - start - 1) // 2 for parent in range(last_parent, -1, -1): current_parent = parent while current_parent <= last_parent: child = 2 * current_parent + 1 if child + 1 <= end - start and arr[child + start] > arr[ child + 1 + start]: child = child + 1 if arr[child + start] < arr[current_parent + start]: arr[current_parent + start], arr[child + start] = \ arr[child + start], arr[current_parent + start] current_parent = child if simulation: iteration = iteration + 1 print("iteration",iteration,":",*arr) else: break return iteration
Min heapify helper for min_heap_sort
def as_plural(result_key): if result_key.endswith('y'): return re.sub("y$", "ies", result_key) elif result_key.endswith('address'): return result_key + 'es' elif result_key.endswith('us'): return re.sub("us$", "uses", result_key) elif not result_key.endswith('s'): return result_key + 's' else: return result_key
Given a result key, return in the plural form.
def average_gradients(tower_gradients): r average_grads = [] with tf.device(Config.cpu_device): for grad_and_vars in zip(*tower_gradients): grads = [] for g, _ in grad_and_vars: expanded_g = tf.expand_dims(g, 0) grads.append(expanded_g) grad = tf.concat(grads, 0) grad = tf.reduce_mean(grad, 0) grad_and_var = (grad, grad_and_vars[0][1]) average_grads.append(grad_and_var) return average_grads
r''' A routine for computing each variable's average of the gradients obtained from the GPUs. Note also that this code acts as a synchronization point as it requires all GPUs to be finished with their mini-batch before it can run to completion.
def get_historical_data(self, uuid, start, end, average_by=0): return self.parse_data((yield from self._get( HISTORICAL_DATA_URL.format(uuid= uuid, start = trunc(start.replace(tzinfo=timezone.utc).timestamp()), end = trunc(end.replace(tzinfo=timezone.utc).timestamp()), average_by= trunc(average_by)))))
Get the data from one device for a specified time range. .. note:: Can fetch a maximum of 42 days of data. To speed up query processing, you can use a combination of average factor multiple of 1H in seconds (e.g. 3600), and o'clock start and end times :param uuid: Id of the device :type uuid: str :param start: start of the range :type start: datetime :param end: end of the range :type end: datetime :param average_by: amount of seconds to average data over. 0 or 300 for no average. Use 3600 (average hourly) or a multiple for long range requests (e.g. more than 1 day) :type average_by: integer :returns: list of datapoints :raises: ClientError, AuthFailure, BadFormat, ForbiddenAccess, TooManyRequests, InternalError .. seealso:: :func:`parse_data` for return data syntax
def get_documents_count(self): db_collections = [ self.database[c] for c in self.get_collection_names() ] return sum([c.count() for c in db_collections])
Counts documents in database :return: Number of documents in db
def load_image(name, n, m=None, gpu=None, square=None): if m is None: m = n if gpu is None: gpu = 0 if square is None: square = 0 command = ('Shearlab.load_image("{}", {}, {}, {}, {})'.format(name, n, m, gpu, square)) return j.eval(command)
Function to load images with certain size.
def update(self, networkipv4s): data = {'networks': networkipv4s} networkipv4s_ids = [str(networkipv4.get('id')) for networkipv4 in networkipv4s] return super(ApiNetworkIPv4, self).put('api/v3/networkv4/%s/' % ';'.join(networkipv4s_ids), data)
Method to update network-ipv4's :param networkipv4s: List containing network-ipv4's desired to updated :return: None
def get_instance(self, payload): return SyncListItemInstance( self._version, payload, service_sid=self._solution['service_sid'], list_sid=self._solution['list_sid'], )
Build an instance of SyncListItemInstance :param dict payload: Payload response from the API :returns: twilio.rest.preview.sync.service.sync_list.sync_list_item.SyncListItemInstance :rtype: twilio.rest.preview.sync.service.sync_list.sync_list_item.SyncListItemInstance
def isdicom(fn): fn = str(fn) if fn.endswith('.dcm'): return True with open(fn,'rb') as fh: fh.seek(0x80) return fh.read(4)==b'DICM'
True if the fn points to a DICOM image
def hypot(x, y, context=None): return _apply_function_in_current_context( BigFloat, mpfr.mpfr_hypot, ( BigFloat._implicit_convert(x), BigFloat._implicit_convert(y), ), context, )
Return the Euclidean norm of x and y, i.e., the square root of the sum of the squares of x and y.
def _consumers(self): app_config = self.lti_kwargs['app'].config config = app_config.get('PYLTI_CONFIG', dict()) consumers = config.get('consumers', dict()) return consumers
Gets consumer's map from app config :return: consumers map
def clean(file_, imports): modules_not_imported = compare_modules(file_, imports) re_remove = re.compile("|".join(modules_not_imported)) to_write = [] try: f = open_func(file_, "r+") except OSError: logging.error("Failed on file: {}".format(file_)) raise else: for i in f.readlines(): if re_remove.match(i) is None: to_write.append(i) f.seek(0) f.truncate() for i in to_write: f.write(i) finally: f.close() logging.info("Successfully cleaned up requirements in " + file_)
Remove modules that aren't imported in project from file.
def random_expr(depth, vlist, ops): if not depth: return str(vlist[random.randrange(len(vlist))]) max_depth_side = random.randrange(2) other_side_depth = random.randrange(depth) left = random_expr(depth - 1 if max_depth_side else other_side_depth, vlist, ops) right = random_expr(depth - 1 if not max_depth_side else other_side_depth, vlist, ops) op = ops[random.randrange(len(ops))] return ExprNode(left, right, op)
Generate a random expression tree. Args: depth: At least one leaf will be this many levels down from the top. vlist: A list of chars. These chars are randomly selected as leaf values. ops: A list of ExprOp instances. Returns: An ExprNode instance which is the root of the generated expression tree.
def getItalianAccentedVocal(vocal, acc_type="g"): vocals = {'a': {'g': u'\xe0', 'a': u'\xe1'}, 'e': {'g': u'\xe8', 'a': u'\xe9'}, 'i': {'g': u'\xec', 'a': u'\xed'}, 'o': {'g': u'\xf2', 'a': u'\xf3'}, 'u': {'g': u'\xf9', 'a': u'\xfa'}} return vocals[vocal][acc_type]
It returns given vocal with grave or acute accent
def use_sequestered_composition_view(self): self._containable_views['composition'] = SEQUESTERED for session in self._get_provider_sessions(): try: session.use_sequestered_composition_view() except AttributeError: pass
Pass through to provider CompositionLookupSession.use_sequestered_composition_view
def eventFilter(self, object, event): if object == self.parent() and event.type() == QtCore.QEvent.Resize: self.resize(event.size()) elif event.type() == QtCore.QEvent.Close: self.setResult(0) return False
Resizes this overlay as the widget resizes. :param object | <QtCore.QObject> event | <QtCore.QEvent> :return <bool>
def version(self, api_version=True): url = self._url("/version", versioned_api=api_version) return self._result(self._get(url), json=True)
Returns version information from the server. Similar to the ``docker version`` command. Returns: (dict): The server version information Raises: :py:class:`docker.errors.APIError` If the server returns an error.
def export_draco(mesh): with tempfile.NamedTemporaryFile(suffix='.ply') as temp_ply: temp_ply.write(export_ply(mesh)) temp_ply.flush() with tempfile.NamedTemporaryFile(suffix='.drc') as encoded: subprocess.check_output([draco_encoder, '-qp', '28', '-i', temp_ply.name, '-o', encoded.name]) encoded.seek(0) data = encoded.read() return data
Export a mesh using Google's Draco compressed format. Only works if draco_encoder is in your PATH: https://github.com/google/draco Parameters ---------- mesh : Trimesh object Returns ---------- data : str or bytes DRC file bytes
def _format_credentials(self): tenant_name = self.tenant_name or self.username tenant_id = self.tenant_id or self.username return {"auth": {"passwordCredentials": {"username": tenant_name, "password": self.password, }, "tenantId": tenant_id}}
Returns the current credentials in the format expected by the authentication service.
def describe_api_models(restApiId, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) models = _multi_call(conn.get_models, 'items', restApiId=restApiId) return {'models': [_convert_datetime_str(model) for model in models]} except ClientError as e: return {'error': __utils__['boto3.get_error'](e)}
Get all models for a given API CLI Example: .. code-block:: bash salt myminion boto_apigateway.describe_api_models restApiId
def remove_handler(self, name): index = None for i, h in enumerate(self.capture_handlers): if h['name'] == name: index = i if index is not None: self.capture_handlers[index]['logger'].close() del self.capture_handlers[index]
Remove a handler given a name Note, if multiple handlers have the same name the last matching instance in the handler list will be removed. Args: name: The name of the handler to remove
def inflate_context_tuple(ast_rootpath, root_env): with util.LogTime('inflate_context_tuple'): inflated = ast_rootpath[0].eval(root_env) current = inflated env = root_env try: for node in ast_rootpath[1:]: if is_tuple_member_node(node): assert framework.is_tuple(current) with util.LogTime('into tuple'): thunk, env = inflated.get_thunk_env(node.name) current = framework.eval(thunk, env) elif framework.is_list(current): with util.LogTime('eval thing'): current = framework.eval(node, env) if framework.is_tuple(current): inflated = current except (gcl.EvaluationError, ast.UnparseableAccess): pass return inflated
Instantiate a Tuple from a TupleNode. Walking the AST tree upwards, evaluate from the root down again.
def url_unescape( value: Union[str, bytes], encoding: Optional[str] = "utf-8", plus: bool = True ) -> Union[str, bytes]: if encoding is None: if plus: value = to_basestring(value).replace("+", " ") return urllib.parse.unquote_to_bytes(value) else: unquote = urllib.parse.unquote_plus if plus else urllib.parse.unquote return unquote(to_basestring(value), encoding=encoding)
Decodes the given value from a URL. The argument may be either a byte or unicode string. If encoding is None, the result will be a byte string. Otherwise, the result is a unicode string in the specified encoding. If ``plus`` is true (the default), plus signs will be interpreted as spaces (literal plus signs must be represented as "%2B"). This is appropriate for query strings and form-encoded values but not for the path component of a URL. Note that this default is the reverse of Python's urllib module. .. versionadded:: 3.1 The ``plus`` argument
def _get_sorted(self, resources): tmp = [] for resource in resources: path = resource._path priority = path.count('/') * 10 - path.count('{') tmp.append((priority, resource)) return [resource for prio, resource in reversed(sorted(tmp))]
Order the resources by priority - the most specific paths come first. :param resources: List of :class:`wsgiservice.resource.Resource` classes to be served by this application.
def execute_template(self, template_name, variables, args=None): js_text = self.build_js_from_template(template_name, variables) try: self.execute_script(js_text, args) except WebDriverException: return False return True
Execute script from a template @type template_name: str @value template_name: Script template to implement @type args: dict @value args: Dictionary representing command line args @rtype: bool @rtype: Success or failure
def _parallel_downloader(voxforge_url, archive_dir, total, counter): def download(d): (i, file) = d download_url = voxforge_url + '/' + file c = counter.increment() print('Downloading file {} ({}/{})...'.format(i+1, c, total)) maybe_download(filename_of(download_url), archive_dir, download_url) return download
Generate a function to download a file based on given parameters This works by currying the above given arguments into a closure in the form of the following function. :param voxforge_url: the base voxforge URL :param archive_dir: the location to store the downloaded file :param total: the total number of files to download :param counter: an atomic counter to keep track of # of downloaded files :return: a function that actually downloads a file given these params
def dimensions(self, dims): nx, ny, nz = dims[0], dims[1], dims[2] self.SetDimensions(nx, ny, nz) self.Modified()
Sets the dataset dimensions. Pass a length three tuple of integers
def mkdir(dirname): dir_list = [] while not client.isdir(dirname): dir_list.append(dirname) dirname = os.path.dirname(dirname) while len(dir_list) > 0: logging.info("Creating directory: %s" % (dir_list[-1])) try: client.mkdir(dir_list.pop()) except IOError as e: if e.errno == errno.EEXIST: pass else: raise e
make directory tree in vospace. @param dirname: name of the directory to make
def get_outcome(self, outcome): from canvasapi.outcome import Outcome outcome_id = obj_or_id(outcome, "outcome", (Outcome,)) response = self.__requester.request( 'GET', 'outcomes/{}'.format(outcome_id) ) return Outcome(self.__requester, response.json())
Returns the details of the outcome with the given id. :calls: `GET /api/v1/outcomes/:id \ <https://canvas.instructure.com/doc/api/outcomes.html#method.outcomes_api.show>`_ :param outcome: The outcome object or ID to return. :type outcome: :class:`canvasapi.outcome.Outcome` or int :returns: An Outcome object. :rtype: :class:`canvasapi.outcome.Outcome`
def tree_to_xml(self): self.filter_remove(remember=True) tree = self.treeview root = ET.Element('interface') items = tree.get_children() for item in items: node = self.tree_node_to_xml('', item) root.append(node) self.filter_restore() return ET.ElementTree(root)
Traverses treeview and generates a ElementTree object
def getall(self, key, default=_marker): identity = self._title(key) res = [v for i, k, v in self._impl._items if i == identity] if res: return res if not res and default is not _marker: return default raise KeyError('Key not found: %r' % key)
Return a list of all values matching the key.
def get_item_ids_for_assessment_part(self, assessment_part_id): item_ids = [] for question_map in self._my_map['questions']: if question_map['assessmentPartId'] == str(assessment_part_id): item_ids.append(Id(question_map['itemId'])) return item_ids
convenience method returns item ids associated with an assessment_part_id
def get_edit_url_link(self, text=None, cls=None, icon_class=None, **attrs): if text is None: text = 'Edit' return build_link(href=self.get_edit_url(), text=text, cls=cls, icon_class=icon_class, **attrs)
Gets the html edit link for the object.
def external2internal(xe, bounds): xi = np.empty_like(xe) for i, (v, bound) in enumerate(zip(xe, bounds)): a = bound[0] b = bound[1] if a == None and b == None: xi[i] = v elif b == None: xi[i] = np.sqrt((v - a + 1.) ** 2. - 1) elif a == None: xi[i] = np.sqrt((b - v + 1.) ** 2. - 1) else: xi[i] = np.arcsin((2. * (v - a) / (b - a)) - 1.) return xi
Convert a series of external variables to internal variables
def unset(self, key): params = { 'key': key } return self.client.delete(self._url(), params=params)
Removes the rules config for a given key. Args: key (str): rules config key to remove See: https://auth0.com/docs/api/management/v2#!/Rules_Configs/delete_rules_configs_by_key