code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def drawAsInfinite(requestContext, seriesList): for series in seriesList: series.options['drawAsInfinite'] = True series.name = 'drawAsInfinite(%s)' % series.name return seriesList
Takes one metric or a wildcard seriesList. If the value is zero, draw the line at 0. If the value is above zero, draw the line at infinity. If the value is null or less than zero, do not draw the line. Useful for displaying on/off metrics, such as exit codes. (0 = success, anything else = failure.) Example:: drawAsInfinite(Testing.script.exitCode)
def dfs_present(path): cmd_return = _hadoop_cmd('dfs', 'stat', path) match = 'No such file or directory' return False if match in cmd_return else True
Check if a file or directory is present on the distributed FS. CLI Example: .. code-block:: bash salt '*' hadoop.dfs_present /some_random_file Returns True if the file is present
def activate_program(self, program): self.logger.debug("activate_program %s", program) if program in self.program_stack: return with self._program_lock: self.logger.debug("activate_program got through %s", program) self.program_stack.append(program) self._update_program_stack()
Called by program which desires to manipulate this actuator, when it is activated.
def can_solve(cls, resource): for solvable_resource in cls.solvable_resources: if isinstance(resource, solvable_resource): return True return False
Tells if the solver is able to resolve the given resource. Arguments --------- resource : subclass of ``dataql.resources.Resource`` The resource to check if it is solvable by the current solver class Returns ------- boolean ``True`` if the current solver class can solve the given resource, ``False`` otherwise. Example ------- >>> AttributeSolver.solvable_resources (<class 'dataql.resources.Field'>,) >>> AttributeSolver.can_solve(Field('foo')) True >>> AttributeSolver.can_solve(Object('bar')) False
def verify(self, keys=None): try: res = self._verify() except AssertionError as err: logger.error("Verification error on the response: %s", err) raise else: if res is None: return None if not isinstance(self.response, samlp.Response): return self if self.parse_assertion(keys): return self else: logger.error("Could not parse the assertion") return None
Verify that the assertion is syntactically correct and the signature is correct if present. :param keys: If not the default key file should be used then use one of these.
def init_hierarchy(cls, model_admin): hierarchy = getattr(model_admin, 'hierarchy') if hierarchy: if not isinstance(hierarchy, Hierarchy): hierarchy = AdjacencyList() else: hierarchy = NoHierarchy() model_admin.hierarchy = hierarchy
Initializes model admin with hierarchy data.
def polynomial(img, mask, inplace=False, replace_all=False, max_dev=1e-5, max_iter=20, order=2): if inplace: out = img else: out = img.copy() lastm = 0 for _ in range(max_iter): out2 = polyfit2dGrid(out, mask, order=order, copy=not inplace, replace_all=replace_all) if replace_all: out = out2 break res = (np.abs(out2 - out)).mean() print('residuum: ', res) if res < max_dev: out = out2 break out = out2 mask = _highGrad(out) m = mask.sum() if m == lastm or m == img.size: break lastm = m out = np.clip(out, 0, 1, out=out) return out
replace all masked values calculate flatField from 2d-polynomal fit filling all high gradient areas within averaged fit-image returns flatField, average background level, fitted image, valid indices mask
def main(args=None): if args is None: args = sys.argv[1:] o = Options() try: o.parseOptions(args) except usage.UsageError, e: raise SystemExit(str(e)) else: return createSSLCertificate(o)
Create a private key and a certificate and write them to a file.
def get_static_lib_paths(): libs = [] is_linux = sys.platform.startswith('linux') if is_linux: libs += ['-Wl,--start-group'] libs += get_raw_static_lib_path() if is_linux: libs += ['-Wl,--end-group'] return libs
Return the required static libraries path
def set_deployment_run_name(self): log = logging.getLogger(self.cls_logger + '.set_deployment_run_name') self.deployment_run_name = self.get_value('cons3rt.deploymentRun.name') log.info('Found deployment run name: {n}'.format(n=self.deployment_run_name))
Sets the deployment run name from deployment properties :return: None
def _element_find_from_root( root, element_path ): element = None element_names = element_path.split('/') if element_names[0] == root.tag: if len(element_names) > 1: element = root.find('/'.join(element_names[1:])) else: element = root return element
Find the element specified by the given path starting from the root element of the document. The first component of the element path is expected to be the name of the root element. Return None if the element is not found.
def get_trial(self, trial_id): response = requests.get( urljoin(self._path, "trials/{}".format(trial_id))) return self._deserialize(response)
Returns trial information by trial_id.
def check(branch: str = 'master'): if os.environ.get('TRAVIS') == 'true': travis(branch) elif os.environ.get('SEMAPHORE') == 'true': semaphore(branch) elif os.environ.get('FRIGG') == 'true': frigg(branch) elif os.environ.get('CIRCLECI') == 'true': circle(branch) elif os.environ.get('GITLAB_CI') == 'true': gitlab(branch) elif 'BITBUCKET_BUILD_NUMBER' in os.environ: bitbucket(branch)
Detects the current CI environment, if any, and performs necessary environment checks. :param branch: The branch that should be the current branch.
def Jobs(self, crawlId=None): crawlId = crawlId if crawlId else defaultCrawlId() return JobClient(self.server, crawlId, self.confId)
Create a JobClient for listing and creating jobs. The JobClient inherits the confId from the Nutch client. :param crawlId: crawlIds to use for this client. If not provided, will be generated by nutch.defaultCrawlId() :return: a JobClient
def _new_alloc_handle(shape, ctx, delay_alloc, dtype=mx_real_t): hdl = NDArrayHandle() check_call(_LIB.MXNDArrayCreateEx( c_array_buf(mx_uint, native_array('I', shape)), mx_uint(len(shape)), ctypes.c_int(ctx.device_typeid), ctypes.c_int(ctx.device_id), ctypes.c_int(int(delay_alloc)), ctypes.c_int(int(_DTYPE_NP_TO_MX[np.dtype(dtype).type])), ctypes.byref(hdl))) return hdl
Return a new handle with specified shape and context. Empty handle is only used to hold results. Returns ------- handle A new empty `NDArray` handle.
def handle(cls, value, **kwargs): try: env_var_name, default_val = value.split("::", 1) except ValueError: raise ValueError("Invalid value for default: %s. Must be in " "<env_var>::<default value> format." % value) if env_var_name in kwargs['context'].environment: return kwargs['context'].environment[env_var_name] else: return default_val
Use a value from the environment or fall back to a default if the environment doesn't contain the variable. Format of value: <env_var>::<default value> For example: Groups: ${default app_security_groups::sg-12345,sg-67890} If `app_security_groups` is defined in the environment, its defined value will be returned. Otherwise, `sg-12345,sg-67890` will be the returned value. This allows defaults to be set at the config file level.
def _load_data(batch, targets, major_axis): if isinstance(batch, list): new_batch = [] for i in range(len(targets)): new_batch.append([b.data[i] for b in batch]) new_targets = [[dst for _, dst in d_target] for d_target in targets] _load_general(new_batch, new_targets, major_axis) else: _load_general(batch.data, targets, major_axis)
Load data into sliced arrays.
def move_to_next_bit_address(self): self._current_bit_address = self.next_bit_address() self.mark_address(self._current_bit_address.split('.')[0], self._size_of_current_register_address)
Moves to next available bit address position
def irreducible_causes(self): return tuple(link for link in self if link.direction is Direction.CAUSE)
The set of irreducible causes in this |Account|.
def set_attr(self, **kwargs): for key, value in kwargs.items(): if value is not None: if not isinstance(value, string_type): raise ValueError("Only string values are accepted") self.__attr[key] = value else: self.__attr.pop(key, None) return self
Set attributes to the Booster. Parameters ---------- **kwargs The attributes to set. Setting a value to None deletes an attribute. Returns ------- self : Booster Booster with set attributes.
def qualified_class_name(o): module = o.__class__.__module__ if module is None or module == str.__class__.__module__: return o.__class__.__name__ return module + '.' + o.__class__.__name__
Full name of an object, including the module
def max(self, axis=None, skipna=True): nv.validate_minmax_axis(axis) return nanops.nanmax(self._values, skipna=skipna)
Return the maximum value of the Index. Parameters ---------- axis : int, optional For compatibility with NumPy. Only 0 or None are allowed. skipna : bool, default True Returns ------- scalar Maximum value. See Also -------- Index.min : Return the minimum value in an Index. Series.max : Return the maximum value in a Series. DataFrame.max : Return the maximum values in a DataFrame. Examples -------- >>> idx = pd.Index([3, 2, 1]) >>> idx.max() 3 >>> idx = pd.Index(['c', 'b', 'a']) >>> idx.max() 'c' For a MultiIndex, the maximum is determined lexicographically. >>> idx = pd.MultiIndex.from_product([('a', 'b'), (2, 1)]) >>> idx.max() ('b', 2)
def substitute(self, values: Dict[str, Any]) -> str: return self.template.substitute(values)
generate url with url template
def shell_source(script): pipe = subprocess.Popen( ". %s; env" % script, stdout=subprocess.PIPE, shell=True) output = pipe.communicate()[0].decode() env = {} for line in output.splitlines(): try: keyval = line.split("=", 1) env[keyval[0]] = keyval[1] except: pass os.environ.update(env)
Sometime you want to emulate the action of "source" in bash, settings some environment variables. Here is a way to do it.
def _handle_sigusr2(self, signum: int, frame: Any) -> None: logger.warning("Catched SIGUSR2") if self.current_task: logger.warning("Dropping current task...") raise Discard
Drop current task.
def get_printer(colors: bool = True, width_limit: bool = True, disabled: bool = False) -> Printer: global _printer global _colors colors = colors and _colors if not _printer or (colors != _printer._colors) or (width_limit != _printer._width_limit): _printer = Printer(DefaultWriter(disabled=disabled), colors=colors, width_limit=width_limit) return _printer
Returns an already initialized instance of the printer. :param colors: If False, no colors will be printed. :param width_limit: If True, printing width will be limited by console width. :param disabled: If True, nothing will be printed.
def load_fixture(fixture_path: str, fixture_key: str, normalize_fn: Callable[..., Any]=identity) -> Dict[str, Any]: file_fixtures = load_json_fixture(fixture_path) fixture = normalize_fn(file_fixtures[fixture_key]) return fixture
Loads a specific fixture from a fixture file, optionally passing it through a normalization function.
def exception_set(self, exception=None): if not exception: exception = sys.exc_info() self.exception = exception self.exception_raise = self._exception_raise
Records an exception to be raised at the appropriate time. This also changes the "exception_raise" attribute to point to the method that will, in fact
def _smooth_the_residuals(self): for primary_smooth in self._primary_smooths: smooth = smoother.perform_smooth(self.x, primary_smooth.cross_validated_residual, MID_SPAN) self._residual_smooths.append(smooth.smooth_result)
Apply the MID_SPAN to the residuals of the primary smooths. "For stability reasons, it turns out to be a little better to smooth |r_{i}(J)| against xi" - [1]
def traverse(self, traverser, **kwargs): result = self.rule.traverse(traverser, **kwargs) return self.conversion(result)
Implementation of mandatory interface for traversing the whole rule tree. This method will call the ``traverse`` method of child rule tree and then perform arbitrary conversion of the result before returning it back. The optional ``kwargs`` are passed down to traverser callback as additional arguments and can be used to provide additional data or context. :param pynspect.rules.RuleTreeTraverser traverser: Traverser object providing appropriate interface. :param dict kwargs: Additional optional keyword arguments to be passed down to traverser callback.
def _solve(self, sense=None): while len(self._remove_constr) > 0: self._remove_constr.pop().delete() try: return self._prob.solve(sense=sense) except lp.SolverError as e: raise_from(MOMAError(text_type(e)), e) finally: self._remove_constr = []
Remove old constraints and then solve the current problem. Args: sense: Minimize or maximize the objective. (:class:`.lp.ObjectiveSense) Returns: The Result object for the solved LP problem
def load_config(args, config_path=".inlineplz.yml"): config = {} try: with open(config_path) as configfile: config = yaml.safe_load(configfile) or {} if config: print("Loaded config from {}".format(config_path)) pprint.pprint(config) except (IOError, OSError, yaml.parser.ParserError): traceback.print_exc() args = update_from_config(args, config) args.ignore_paths = args.__dict__.get("ignore_paths") or [ "node_modules", ".git", ".tox", "godeps", "vendor", "site-packages", "venv", ".env", "spec", "migrate", "bin", "fixtures", "cassettes", ".cache", ".idea", ".pytest_cache", "__pycache__", "dist", ] if config_path != ".inlineplz.yml": return args if args.config_dir and not config: new_config_path = os.path.join(args.config_dir, config_path) if os.path.exists(new_config_path): return load_config(args, new_config_path) return args
Load inline-plz config from yaml config file with reasonable defaults.
def check_url(self, url, is_image_src=False): return bool(self._allowed_url_re.match(url))
This method is used to check a URL. Returns :obj:`True` if the URL is "safe", :obj:`False` otherwise. The default implementation only allows HTTP and HTTPS links. That means no ``mailto:``, no ``xmpp:``, no ``ftp:``, etc. This method exists specifically to allow easy customization of link filtering through subclassing, so don't hesitate to write your own. If you're thinking of implementing a blacklist approach, see "`Which URL schemes are dangerous (XSS exploitable)? <http://security.stackexchange.com/q/148428/37409>`_".
def rsync(local_path, remote_path, exclude=None, extra_opts=None): if not local_path.endswith('/'): local_path += '/' exclude = exclude or [] exclude.extend(['*.egg-info', '*.pyc', '.git', '.gitignore', '.gitmodules', '/build/', '/dist/']) with hide('running'): run("mkdir -p '{}'".format(remote_path)) return rsync_project( remote_path, local_path, delete=True, extra_opts='-i --omit-dir-times -FF ' + (extra_opts if extra_opts else ''), ssh_opts='-o StrictHostKeyChecking=no', exclude=exclude)
Helper to rsync submodules across
def _pct_diff(self, best, other): return colorize("{}%".format( round(((best-other)/best)*100, 2)).rjust(10), "red")
Calculates and colorizes the percent difference between @best and @other
def profile_path(self, path, must_exist=False): full_path = self.session.profile / path if must_exist and not full_path.exists(): raise FileNotFoundError( errno.ENOENT, os.strerror(errno.ENOENT), PurePath(full_path).name, ) return full_path
Return path from current profile.
def _convert_value(self, column, value): if isinstance(value, GObject.Value): return value return GObject.Value(self.get_column_type(column), value)
Convert value to a GObject.Value of the expected type
def label(self): if self.valuetype_class.is_label(): return self for c in self.table.columns: if c.parent == self.name and c.valuetype_class.is_label(): return c return None
Return first child of the column that is marked as a label. Returns self if the column is a label
def _convert_punctuation(punctuation, conversion_table): if punctuation in conversion_table: return conversion_table[punctuation] return re.escape(punctuation)
Return a regular expression for a punctuation string.
def safe_args(args, options, max_args=None, argfile=None, delimiter='\n', quoter=None, delete=True): max_args = max_args or options.max_subprocess_args if len(args) > max_args: def create_argfile(f): logger.debug('Creating argfile {} with contents {}'.format(f.name, ' '.join(args))) f.write(delimiter.join(args)) f.close() return [quoter(f.name) if quoter else '@{}'.format(f.name)] if argfile: try: with safe_open(argfile, 'w') as fp: yield create_argfile(fp) finally: if delete and os.path.exists(argfile): os.unlink(argfile) else: with temporary_file(cleanup=delete, binary_mode=False) as fp: yield create_argfile(fp) else: yield args
Yields args if there are less than a limit otherwise writes args to an argfile and yields an argument list with one argument formed from the path of the argfile. :param args: The args to work with. :param OptionValueContainer options: scoped options object for this task :param max_args: The maximum number of args to let though without writing an argfile. If not specified then the maximum will be loaded from the --max-subprocess-args option. :param argfile: The file to write args to when there are too many; defaults to a temporary file. :param delimiter: The delimiter to insert between args written to the argfile, defaults to '\n' :param quoter: A function that can take the argfile path and return a single argument value; defaults to: <code>lambda f: '@' + f<code> :param delete: If True deletes any arg files created upon exit from this context; defaults to True.
def compute_average_oxidation_state(site): try: avg_oxi = sum([sp.oxi_state * occu for sp, occu in site.species.items() if sp is not None]) return avg_oxi except AttributeError: pass try: return site.charge except AttributeError: raise ValueError("Ewald summation can only be performed on structures " "that are either oxidation state decorated or have " "site charges.")
Calculates the average oxidation state of a site Args: site: Site to compute average oxidation state Returns: Average oxidation state of site.
def mpim_history(self, *, channel: str, **kwargs) -> SlackResponse: kwargs.update({"channel": channel}) return self.api_call("mpim.history", http_verb="GET", params=kwargs)
Fetches history of messages and events from a multiparty direct message. Args: channel (str): Multiparty direct message to fetch history for. e.g. 'G1234567890'
def get_error(self): exc_info = sys.exc_info() if exc_info[0] is None: return None else: err_type, err_value, err_trace = exc_info[0], exc_info[1], None if self.verbose and len(exc_info) > 2: err_trace = exc_info[2] return format_error(err_type, err_value, err_trace)
Properly formats the current error.
def set_trace(host='', port=5555, patch_stdstreams=False): pdb = WebPdb.active_instance if pdb is None: pdb = WebPdb(host, port, patch_stdstreams) else: pdb.remove_trace() pdb.set_trace(sys._getframe().f_back)
Start the debugger This method suspends execution of the current script and starts a PDB debugging session. The web-interface is opened on the specified port (default: ``5555``). Example:: import web_pdb;web_pdb.set_trace() Subsequent :func:`set_trace` calls can be used as hardcoded breakpoints. :param host: web-UI hostname or IP-address :type host: str :param port: web-UI port. If ``port=-1``, choose a random port value between 32768 and 65536. :type port: int :param patch_stdstreams: redirect all standard input and output streams to the web-UI. :type patch_stdstreams: bool
def FinalizeTransferUrl(self, url): url_builder = _UrlBuilder.FromUrl(url) if self.global_params.key: url_builder.query_params['key'] = self.global_params.key return url_builder.url
Modify the url for a given transfer, based on auth and version.
def three_cornered_hat_phase(phasedata_ab, phasedata_bc, phasedata_ca, rate, taus, function): (tau_ab, dev_ab, err_ab, ns_ab) = function(phasedata_ab, data_type='phase', rate=rate, taus=taus) (tau_bc, dev_bc, err_bc, ns_bc) = function(phasedata_bc, data_type='phase', rate=rate, taus=taus) (tau_ca, dev_ca, err_ca, ns_ca) = function(phasedata_ca, data_type='phase', rate=rate, taus=taus) var_ab = dev_ab * dev_ab var_bc = dev_bc * dev_bc var_ca = dev_ca * dev_ca assert len(var_ab) == len(var_bc) == len(var_ca) var_a = 0.5 * (var_ab + var_ca - var_bc) var_a[var_a < 0] = 0 dev_a = np.sqrt(var_a) err_a = [d/np.sqrt(nn) for (d, nn) in zip(dev_a, ns_ab)] return tau_ab, dev_a, err_a, ns_ab
Three Cornered Hat Method Given three clocks A, B, C, we seek to find their variances :math:`\\sigma^2_A`, :math:`\\sigma^2_B`, :math:`\\sigma^2_C`. We measure three phase differences, assuming no correlation between the clocks, the measurements have variances: .. math:: \\sigma^2_{AB} = \\sigma^2_{A} + \\sigma^2_{B} \\sigma^2_{BC} = \\sigma^2_{B} + \\sigma^2_{C} \\sigma^2_{CA} = \\sigma^2_{C} + \\sigma^2_{A} Which allows solving for the variance of one clock as: .. math:: \\sigma^2_{A} = {1 \\over 2} ( \\sigma^2_{AB} + \\sigma^2_{CA} - \\sigma^2_{BC} ) and similarly cyclic permutations for :math:`\\sigma^2_B` and :math:`\\sigma^2_C` Parameters ---------- phasedata_ab: np.array phase measurements between clock A and B, in seconds phasedata_bc: np.array phase measurements between clock B and C, in seconds phasedata_ca: np.array phase measurements between clock C and A, in seconds rate: float The sampling rate for phase, in Hz taus: np.array The tau values for deviations, in seconds function: allantools deviation function The type of statistic to compute, e.g. allantools.oadev Returns ------- tau_ab: np.array Tau values corresponding to output deviations dev_a: np.array List of computed values for clock A References ---------- http://www.wriley.com/3-CornHat.htm
def html_entity_decode_codepoint(self, m, defs=htmlentities.codepoint2name): try: char = defs[m.group(1)] return "&{char};".format(char=char) except ValueError: return m.group(0) except KeyError: return m.group(0)
decode html entity into one of the codepoint2name
def _send(self, line): if not line.endswith('\r\n'): if line.endswith('\n'): logger.debug('Fixing bare LF before sending data to socket') line = line[0:-1] + '\r\n' else: logger.debug( 'Fixing missing CRLF before sending data to socket') line = line + '\r\n' logger.debug('Client sent: ' + line.rstrip()) self._socket.send(line)
Write a line of data to the server. Args: line -- A single line of data to write to the socket.
def request_token(self): client = OAuth1( client_key=self._server_cache[self.client.server].key, client_secret=self._server_cache[self.client.server].secret, callback_uri=self.callback, ) request = {"auth": client} response = self._requester( requests.post, "oauth/request_token", **request ) data = parse.parse_qs(response.text) data = { 'token': data[self.PARAM_TOKEN][0], 'token_secret': data[self.PARAM_TOKEN_SECRET][0] } return data
Gets OAuth request token
def generate_epochs_info(epoch_list): time1 = time.time() epoch_info = [] for sid, epoch in enumerate(epoch_list): for cond in range(epoch.shape[0]): sub_epoch = epoch[cond, :, :] for eid in range(epoch.shape[1]): r = np.sum(sub_epoch[eid, :]) if r > 0: start = np.nonzero(sub_epoch[eid, :])[0][0] epoch_info.append((cond, sid, start, start + r)) time2 = time.time() logger.debug( 'epoch separation done, takes %.2f s' % (time2 - time1) ) return epoch_info
use epoch_list to generate epoch_info defined below Parameters ---------- epoch_list: list of 3D (binary) array in shape [condition, nEpochs, nTRs] Contains specification of epochs and conditions, assuming 1. all subjects have the same number of epochs; 2. len(epoch_list) equals the number of subjects; 3. an epoch is always a continuous time course. Returns ------- epoch_info: list of tuple (label, sid, start, end). label is the condition labels of the epochs; sid is the subject id, corresponding to the index of raw_data; start is the start TR of an epoch (inclusive); end is the end TR of an epoch(exclusive). Assuming len(labels) labels equals the number of epochs and the epochs of the same sid are adjacent in epoch_info
def cache(self, dependency: Dependency, value): if dependency.threadlocal: setattr(self._local, dependency.name, value) elif dependency.singleton: self._singleton[dependency.name] = value
Store an instance of dependency in the cache. Does nothing if dependency is NOT a threadlocal or a singleton. :param dependency: The ``Dependency`` to cache :param value: The value to cache for dependency :type dependency: Dependency
def asserted(self): if self.index == 0: return False return bool(lib.EnvFactExistp(self._env, self._fact))
True if the fact has been asserted within CLIPS.
def untrace_module(module): for name, function in inspect.getmembers(module, inspect.isfunction): untrace_function(module, function) for name, cls in inspect.getmembers(module, inspect.isclass): untrace_class(cls) set_untraced(module) return True
Untraces given module members. :param module: Module to untrace. :type module: ModuleType :return: Definition success. :rtype: bool
def get_label(self, label_name): for label in self.get_labels(): if label.name == label_name: return label
Return the user's label that has a given name. :param label_name: The name to search for. :type label_name: str :return: A label that has a matching name or ``None`` if not found. :rtype: :class:`pytodoist.todoist.Label` >>> from pytodoist import todoist >>> user = todoist.login('john.doe@gmail.com', 'password') >>> label = user.get_label('family')
def set_title(self, index, title): index = unicode_type(int(index)) self._titles[index] = title self.send_state('_titles')
Sets the title of a container page. Parameters ---------- index : int Index of the container page title : unicode New title
def from_file(cls, filename): with open(filename) as infp: if filename.endswith('.yaml') or filename.endswith('.yml'): import yaml data = yaml.safe_load(infp) else: import json data = json.load(infp) return cls.from_data(data)
Construct an APIDefinition by parsing the given `filename`. If PyYAML is installed, YAML files are supported. JSON files are always supported. :param filename: The filename to read. :rtype: APIDefinition
def _create_tcex_dirs(): dirs = ['tcex.d', 'tcex.d/data', 'tcex.d/profiles'] for d in dirs: if not os.path.isdir(d): os.makedirs(d)
Create tcex.d directory and sub directories.
def _validate_complex_fault_geometry(self, node, _float_re): valid_edges = [] for edge_node in node.nodes: try: coords = split_coords_3d(edge_node.LineString.posList.text) edge = geo.Line([geo.Point(*p) for p in coords]) except ValueError: edge = [] if len(edge): valid_edges.append(True) else: valid_edges.append(False) if node["spacing"] and all(valid_edges): return raise LogicTreeError( node, self.filename, "'complexFaultGeometry' node is not valid")
Validates a node representation of a complex fault geometry - this check merely verifies that the format is correct. If the geometry does not conform to the Aki & Richards convention this will not be verified here, but will raise an error when the surface is created.
def get_public_key(self, key_id, is_search_embedded=False): if isinstance(key_id, int): return self._public_keys[key_id] for item in self._public_keys: if item.get_id() == key_id: return item if is_search_embedded: for authentication in self._authentications: if authentication.get_public_key_id() == key_id: return authentication.get_public_key() return None
Key_id can be a string, or int. If int then the index in the list of keys.
def between(y, z): return _combinable(lambda x: (y <= x < z) or _equal_or_float_equal(x, y))
Greater than or equal to y and less than z.
def parse_server_addr(str_addr, default_port=26000): m = ADDR_STR_RE.match(str_addr) if m is None: raise ValueError('Bad address string "{0}"'.format(str_addr)) dct = m.groupdict() port = dct.get('port') if port is None: port = default_port else: port = int(port) if port == 0: raise ValueError("Port can't be zero") host = dct['host'] if dct['host'] else dct['host6'] return host, port
Parse address and returns host and port Args: str_addr --- string that contains server ip or hostname and optionaly port Returns: tuple (host, port) Examples: >>> parse_server_addr('127.0.0.1:26006') ('127.0.0.1', 26006) >>> parse_server_addr('[2001:db8:85a3:8d3:1319:8a2e:370:7348]:26006') ('2001:db8:85a3:8d3:1319:8a2e:370:7348', 26006) >>> parse_server_addr('[2001:db8:85a3:8d3:1319:8a2e:370:7348]') ('2001:db8:85a3:8d3:1319:8a2e:370:7348', 26000) >>> parse_server_addr('localhost:123') ('localhost', 123) >>> parse_server_addr('localhost:1d23') Traceback (most recent call last): ... ValueError: Bad address string "localhost:1d23"
def get_rec_dtype(self, **keys): colnums = keys.get('colnums', None) vstorage = keys.get('vstorage', self._vstorage) if colnums is None: colnums = self._extract_colnums() descr = [] isvararray = numpy.zeros(len(colnums), dtype=numpy.bool) for i, colnum in enumerate(colnums): dt, isvar = self.get_rec_column_descr(colnum, vstorage) descr.append(dt) isvararray[i] = isvar dtype = numpy.dtype(descr) offsets = numpy.zeros(len(colnums), dtype='i8') for i, n in enumerate(dtype.names): offsets[i] = dtype.fields[n][1] return dtype, offsets, isvararray
Get the dtype for the specified columns parameters ---------- colnums: integer array The column numbers, 0 offset vstorage: string, optional See docs in read_columns
def get_jwt_decrypt_keys(self, jwt, **kwargs): try: _key_type = jwe_alg2keytype(jwt.headers['alg']) except KeyError: _key_type = '' try: _kid = jwt.headers['kid'] except KeyError: logger.info('Missing kid') _kid = '' keys = self.get(key_use='enc', owner='', key_type=_key_type) try: _aud = kwargs['aud'] except KeyError: _aud = '' if _aud: try: allow_missing_kid = kwargs['allow_missing_kid'] except KeyError: allow_missing_kid = False try: nki = kwargs['no_kid_issuer'] except KeyError: nki = {} keys = self._add_key(keys, _aud, 'enc', _key_type, _kid, nki, allow_missing_kid) keys = [k for k in keys if k.appropriate_for('decrypt')] return keys
Get decryption keys from this keyjar based on information carried in a JWE. These keys should be usable to decrypt an encrypted JWT. :param jwt: A cryptojwt.jwt.JWT instance :param kwargs: Other key word arguments :return: list of usable keys
def _create(self, **kwargs): if 'uri' in self._meta_data: error = "There was an attempt to assign a new uri to this "\ "resource, the _meta_data['uri'] is %s and it should"\ " not be changed." % (self._meta_data['uri']) raise URICreationCollision(error) self._check_exclusive_parameters(**kwargs) requests_params = self._handle_requests_params(kwargs) self._minimum_one_is_missing(**kwargs) self._check_create_parameters(**kwargs) kwargs = self._check_for_python_keywords(kwargs) for key1, key2 in self._meta_data['reduction_forcing_pairs']: kwargs = self._reduce_boolean_pair(kwargs, key1, key2) _create_uri = self._meta_data['container']._meta_data['uri'] session = self._meta_data['bigip']._meta_data['icr_session'] kwargs = self._prepare_request_json(kwargs) response = session.post(_create_uri, json=kwargs, **requests_params) result = self._produce_instance(response) return result
wrapped by `create` override that in subclasses to customize
def _get_image_url(self, image_id): gce = self._connect() filter = "name eq %s" % image_id request = gce.images().list(project=self._project_id, filter=filter) response = self._execute_request(request) response = self._wait_until_done(response) image_url = None if "items" in response: image_url = response["items"][0]["selfLink"] if image_url: return image_url else: raise ImageError("Could not find given image id `%s`" % image_id)
Gets the url for the specified image. Unfortunatly this only works for images uploaded by the user. The images provided by google will not be found. :param str image_id: image identifier :return: str - api url of the image
def prompt_for(self, next_param, intent_name): def decorator(f): prompts = self._intent_prompts.get(intent_name) if prompts: prompts[next_param] = f else: self._intent_prompts[intent_name] = {} self._intent_prompts[intent_name][next_param] = f @wraps(f) def wrapper(*args, **kw): self._flask_assitant_view_func(*args, **kw) return f return decorator
Decorates a function to prompt for an action's required parameter. The wrapped function is called if next_param was not recieved with the given intent's request and is required for the fulfillment of the intent's action. Arguments: next_param {str} -- name of the parameter required for action function intent_name {str} -- name of the intent the dependent action belongs to
def bitstring_to_bytes(bitstr): bitlist = list(bitstr) bits_missing = (8 - len(bitlist) % 8) % 8 bitlist = [0]*bits_missing + bitlist result = bytearray() for i in range(0, len(bitlist), 8): byte = 0 for j in range(8): byte = (byte << 1) | bitlist[i+j] result.append(byte) return bytes(result)
Converts a pyasn1 univ.BitString instance to byte sequence of type 'bytes'. The bit string is interpreted big-endian and is left-padded with 0 bits to form a multiple of 8.
def respond_webhook(self, environ): request = FieldStorage(fp=environ["wsgi.input"], environ=environ) url = environ["PATH_INFO"] params = dict([(k, request[k].value) for k in request]) try: if self.bot is None: raise NotImplementedError response = self.bot.handle_webhook_event(environ, url, params) except NotImplementedError: return 404 except: self.logger.debug(format_exc()) return 500 return response or 200
Passes the request onto a bot with a webhook if the webhook path is requested.
def clear(self): for root, dirs, files in os.walk(self._root_dir, topdown=False): for file in files: os.unlink(os.path.join(root, file)) os.rmdir(root) root_dir = os.path.abspath( os.path.join(self._root_dir, os.pardir)) self.__init__(root_dir)
Clears all data from the data store permanently
def _set_child(self, name, child): if not isinstance(child, Parentable): raise ValueError('Parentable child object expected, not {child}'.format(child=child)) child._set_parent(self) self._store_child(name, child)
Set child. :param name: Child name. :param child: Parentable object.
def sources_add(source_uri, ruby=None, runas=None, gem_bin=None): return _gem(['sources', '--add', source_uri], ruby, gem_bin=gem_bin, runas=runas)
Add a gem source. :param source_uri: string The source URI to add. :param gem_bin: string : None Full path to ``gem`` binary to use. :param ruby: string : None If RVM or rbenv are installed, the ruby version and gemset to use. Ignored if ``gem_bin`` is specified. :param runas: string : None The user to run gem as. CLI Example: .. code-block:: bash salt '*' gem.sources_add http://rubygems.org/
def text_cleanup(data, key, last_type): if key in data and last_type == STRING_TYPE: data[key] = data[key].strip() return data
I strip extra whitespace off multi-line strings if they are ready to be stripped!
def as_event_class(obj): if is_string(obj): for c in all_subclasses(AbinitEvent): if c.__name__ == obj or c.yaml_tag == obj: return c raise ValueError("Cannot find event class associated to %s" % obj) assert obj in all_subclasses(AbinitEvent) return obj
Convert obj into a subclass of AbinitEvent. obj can be either a class or a string with the class name or the YAML tag
def restart(self): command = const.CMD_RESTART cmd_response = self.__send_command(command) if cmd_response.get('status'): self.is_connect = False self.next_uid = 1 return True else: raise ZKErrorResponse("can't restart device")
restart the device :return: bool
def cmd_list(options): (i_info, param_str) = gather_data(options) if i_info: awsc.get_all_aminames(i_info) param_str = "Instance List - " + param_str + "\n" list_instances(i_info, param_str) else: print("No instances found with parameters: {}".format(param_str))
Gather data for instances matching args and call display func. Args: options (object): contains args and data from parser.
def get_marshaller_for_type(self, tp): if not isinstance(tp, str): tp = tp.__module__ + '.' + tp.__name__ if tp in self._types: index = self._types[tp] else: return None, False m = self._marshallers[index] if self._imported_required_modules[index]: return m, True if not self._has_required_modules[index]: return m, False success = self._import_marshaller_modules(m) self._has_required_modules[index] = success self._imported_required_modules[index] = success return m, success
Gets the appropriate marshaller for a type. Retrieves the marshaller, if any, that can be used to read/write a Python object with type 'tp'. The modules it requires, if available, will be loaded. Parameters ---------- tp : type or str Python object ``type`` (which would be the class reference) or its string representation like ``'collections.deque'``. Returns ------- marshaller : marshaller or None The marshaller that can read/write the type to file. ``None`` if no appropriate marshaller is found. has_required_modules : bool Whether the required modules for reading the type are present or not. See Also -------- hdf5storage.Marshallers.TypeMarshaller.types
def checkConfig(): config_file_dir = os.path.join(cwd, "config.py") if os.path.exists(config_file_dir): print("Making a backup of your config file!") config_file_dir2 = os.path.join(cwd, "config.py.oldbak") copyfile(config_file_dir, config_file_dir2)
If the config.py file exists, back it up
def _compute_mod_regs(self, regs_init, regs_fini): assert regs_init.keys() == regs_fini.keys() modified_regs = [] for reg in regs_init: if regs_init[reg] != regs_fini[reg]: modified_regs.append(reg) return modified_regs
Compute modified registers.
def wait_for_event(self, event_name, predicate, timeout=DEFAULT_TIMEOUT, *args, **kwargs): deadline = time.time() + timeout while True: event = None try: event = self.pop_event(event_name, 1) except queue.Empty: pass if event and predicate(event, *args, **kwargs): return event if time.time() > deadline: raise queue.Empty( 'Timeout after {}s waiting for event: {}'.format( timeout, event_name))
Wait for an event that satisfies a predicate to appear. Continuously pop events of a particular name and check against the predicate until an event that satisfies the predicate is popped or timed out. Note this will remove all the events of the same name that do not satisfy the predicate in the process. Args: event_name: Name of the event to be popped. predicate: A function that takes an event and returns True if the predicate is satisfied, False otherwise. timeout: Number of seconds to wait. *args: Optional positional args passed to predicate(). **kwargs: Optional keyword args passed to predicate(). Returns: The event that satisfies the predicate. Raises: queue.Empty: Raised if no event that satisfies the predicate was found before time out.
def _bsecurate_cli_print_component_file(args): data = fileio.read_json_basis(args.file) return printing.component_basis_str(data, elements=args.elements)
Handles the print-component-file subcommand
def refweights(self): return numpy.full(self.shape, 1./self.shape[0], dtype=float)
A |numpy| |numpy.ndarray| with equal weights for all segment junctions.. >>> from hydpy.models.hstream import * >>> parameterstep('1d') >>> states.qjoints.shape = 5 >>> states.qjoints.refweights array([ 0.2, 0.2, 0.2, 0.2, 0.2])
def get_buffer_size_in_pages(cls, address, size): if size < 0: size = -size address = address - size begin, end = cls.align_address_range(address, address + size) return int(float(end - begin) / float(cls.pageSize))
Get the number of pages in use by the given buffer. @type address: int @param address: Aligned memory address. @type size: int @param size: Buffer size. @rtype: int @return: Buffer size in number of pages.
def run(): args = client_helper.grab_server_args() workbench = zerorpc.Client(timeout=300, heartbeat=60) workbench.connect('tcp://'+args['server']+':'+args['port']) all_set = workbench.generate_sample_set() results = workbench.set_work_request('view_customer', all_set) for customer in results: print customer['customer']
This client generates customer reports on all the samples in workbench.
def _on_disconnect(_loop, adapter, _adapter_id, conn_id): conn_string = adapter._get_property(conn_id, 'connection_string') if conn_string is None: adapter._logger.debug("Dropping disconnect notification with unknown conn_id=%s", conn_id) return adapter._teardown_connection(conn_id, force=True) event = dict(reason='no reason passed from legacy adapter', expected=False) adapter.notify_event_nowait(conn_string, 'disconnection', event)
Callback when a device disconnects unexpectedly.
def command_repo_remove(self): if len(self.args) == 2 and self.args[0] == "repo-remove": Repo().remove(self.args[1]) else: usage("")
Remove custom repositories
def _acceptpeak(peak, amp, definitive_peaks, spk1, rr_buffer): definitive_peaks_out = definitive_peaks definitive_peaks_out = numpy.append(definitive_peaks_out, peak) spk1 = 0.125 * amp + 0.875 * spk1 if len(definitive_peaks_out) > 1: rr_buffer.pop(0) rr_buffer += [definitive_peaks_out[-1] - definitive_peaks_out[-2]] return numpy.array(definitive_peaks_out), spk1, rr_buffer
Private function intended to insert a new RR interval in the buffer. ---------- Parameters ---------- peak : int Sample where the peak under analysis is located. amp : int Amplitude of the peak under analysis. definitive_peaks : list List with the definitive_peaks stored until the present instant. spk1 : float Actual value of SPK1 parameter defined in Pan-Tompkins real-time R peak detection algorithm (named signal peak). rr_buffer : list Data structure that stores the duration of the last eight RR intervals. Returns ------- definitive_peaks_out : list Definitive peaks list. spk1 : float Updated value of SPK1 parameter. rr_buffer : list Buffer after appending a new RR interval and excluding the oldest one.
def _add_inline_definition(item, statement): global _current_statement backup = _current_statement type_, options = _expand_one_key_dictionary(item) _current_statement = UnnamedStatement(type=type_) _parse_statement(options) statement.add_child(_current_statement) _current_statement = backup
Adds an inline definition to statement.
def TextInfo(filename=None, editable=False, **kwargs): args = [] if filename: args.append('--filename=%s' % filename) if editable: args.append('--editable') for generic_args in kwargs_helper(kwargs): args.append('--%s=%s' % generic_args) p = run_zenity('--text-info', *args) if p.wait() == 0: return p.stdout.read()
Show the text of a file to the user. This will raise a Zenity Text Information Dialog presenting the user with the contents of a file. It returns the contents of the text box. filename - The path to the file to show. editable - True if the text should be editable. kwargs - Optional command line parameters for Zenity such as height, width, etc.
def parse_feeds(self, message_channel=True): if parse: for feed_url in self.feeds: feed = parse(feed_url) for item in feed.entries: if item["id"] not in self.feed_items: self.feed_items.add(item["id"]) if message_channel: message = self.format_item_message(feed, item) self.message_channel(message) return
Iterates through each of the feed URLs, parses their items, and sends any items to the channel that have not been previously been parsed.
def run(self): services = Service.objects.all() for service in services: poll_service.apply_async(kwargs={"service_id": str(service.id)}) return "Queued <%s> Service(s) for Polling" % services.count()
Queues all services to be polled. Should be run via beat.
def destroy(self, folder=None, as_coro=False): async def _destroy(folder): ret = self.save_info(folder) for a in self.get_agents(addr=False): a.close(folder=folder) await self.shutdown(as_coro=True) return ret return run_or_coro(_destroy(folder), as_coro)
Destroy the environment. Does the following: 1. calls :py:meth:`~creamas.core.Environment.save_info` 2. for each agent: calls :py:meth:`close` 3. Shuts down its RPC-service.
def is_nash(self, action_profile, tol=None): if self.N == 2: for i, player in enumerate(self.players): own_action, opponent_action = \ action_profile[i], action_profile[1-i] if not player.is_best_response(own_action, opponent_action, tol): return False elif self.N >= 3: for i, player in enumerate(self.players): own_action = action_profile[i] opponents_actions = \ tuple(action_profile[i+1:]) + tuple(action_profile[:i]) if not player.is_best_response(own_action, opponents_actions, tol): return False else: if not self.players[0].is_best_response(action_profile[0], None, tol): return False return True
Return True if `action_profile` is a Nash equilibrium. Parameters ---------- action_profile : array_like(int or array_like(float)) An array of N objects, where each object must be an integer (pure action) or an array of floats (mixed action). tol : scalar(float) Tolerance level used in determining best responses. If None, default to each player's `tol` attribute value. Returns ------- bool True if `action_profile` is a Nash equilibrium; False otherwise.
def user_defined_symbols(self): sym_in_current = set(self.symtable.keys()) sym_from_construction = set(self.no_deepcopy) unique_symbols = sym_in_current.difference(sym_from_construction) return unique_symbols
Return a set of symbols that have been added to symtable after construction. I.e., the symbols from self.symtable that are not in self.no_deepcopy. Returns ------- unique_symbols : set symbols in symtable that are not in self.no_deepcopy
def _check_ndim(self, values, ndim): if ndim is None: ndim = values.ndim if self._validate_ndim and values.ndim != ndim: msg = ("Wrong number of dimensions. values.ndim != ndim " "[{} != {}]") raise ValueError(msg.format(values.ndim, ndim)) return ndim
ndim inference and validation. Infers ndim from 'values' if not provided to __init__. Validates that values.ndim and ndim are consistent if and only if the class variable '_validate_ndim' is True. Parameters ---------- values : array-like ndim : int or None Returns ------- ndim : int Raises ------ ValueError : the number of dimensions do not match
def getTamilWords( tweet ): tweet = TamilTweetParser.cleanupPunct( tweet ); nonETwords = filter( lambda x: len(x) > 0 , re.split(r'\s+',tweet) ); tamilWords = filter( TamilTweetParser.isTamilPredicate, nonETwords ); return tamilWords
word needs to all be in the same tamil language
def _trim_dict_in_dict(data, max_val_size, replace_with): for key in data: if isinstance(data[key], dict): _trim_dict_in_dict(data[key], max_val_size, replace_with) else: if sys.getsizeof(data[key]) > max_val_size: data[key] = replace_with
Takes a dictionary, max_val_size and replace_with and recursively loops through and replaces any values that are greater than max_val_size.
def _submatch(self, node, results=None): if self.wildcards: for c, r in generate_matches(self.content, node.children): if c == len(node.children): if results is not None: results.update(r) return True return False if len(self.content) != len(node.children): return False for subpattern, child in zip(self.content, node.children): if not subpattern.match(child, results): return False return True
Match the pattern's content to the node's children. This assumes the node type matches and self.content is not None. Returns True if it matches, False if not. If results is not None, it must be a dict which will be updated with the nodes matching named subpatterns. When returning False, the results dict may still be updated.
def decrypt(data, digest=True): alg, _, data = data.rpartition("$") if not alg: return data data = _from_hex_digest(data) if digest else data try: return implementations["decryption"][alg]( data, implementations["get_key"]() ) except KeyError: raise CryptError("Can not decrypt key for algorithm: %s" % alg)
Decrypt provided data.
def invoke(self, function_name, raw_python=False, command=None, no_color=False): key = command if command is not None else 'command' if raw_python: command = {'raw_command': function_name} else: command = {key: function_name} import json as json response = self.zappa.invoke_lambda_function( self.lambda_name, json.dumps(command), invocation_type='RequestResponse', ) if 'LogResult' in response: if no_color: print(base64.b64decode(response['LogResult'])) else: decoded = base64.b64decode(response['LogResult']).decode() formatted = self.format_invoke_command(decoded) colorized = self.colorize_invoke_command(formatted) print(colorized) else: print(response) if 'FunctionError' in response: raise ClickException( "{} error occurred while invoking command.".format(response['FunctionError']) )
Invoke a remote function.
def correctly_signed_response(self, decoded_xml, must=False, origdoc=None, only_valid_cert=False, require_response_signature=False, **kwargs): response = samlp.any_response_from_string(decoded_xml) if not response: raise TypeError('Not a Response') if response.signature: if 'do_not_verify' in kwargs: pass else: self._check_signature(decoded_xml, response, class_name(response), origdoc) elif require_response_signature: raise SignatureError('Signature missing for response') return response
Check if a instance is correctly signed, if we have metadata for the IdP that sent the info use that, if not use the key that are in the message if any. :param decoded_xml: The SAML message as a XML string :param must: Whether there must be a signature :param origdoc: :param only_valid_cert: :param require_response_signature: :return: None if the signature can not be verified otherwise an instance