code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def argmin(self, axis=None, skipna=True): nv.validate_minmax_axis(axis) return nanops.nanargmin(self._values, skipna=skipna)
Return a ndarray of the minimum argument indexer. Parameters ---------- axis : {None} Dummy argument for consistency with Series skipna : bool, default True Returns ------- numpy.ndarray See Also -------- numpy.ndarray.argmin
def parse_result(cls, result): if len(result) == 3: items, count, context = result else: context = {} items, count = result return items, count, context
Parse an items + count tuple result. May either be three item tuple containing items, count, and a context dictionary (see: relation convention) or a two item tuple containing only items and count.
def __get_request_auth(username, password): def __request_auth(credentials, user_data): for credential in credentials: if credential[0] == libvirt.VIR_CRED_AUTHNAME: credential[4] = username if username else \ __salt__['config.get']('virt:connection:auth:username', credential[3]) elif credential[0] == libvirt.VIR_CRED_NOECHOPROMPT: credential[4] = password if password else \ __salt__['config.get']('virt:connection:auth:password', credential[3]) else: log.info('Unhandled credential type: %s', credential[0]) return 0
Get libvirt.openAuth callback with username, password values overriding the configuration ones.
def run(self): notice('Starting output thread', self.color) o = Thread(target=self.__output_thread, name='output') o.start() self.threads.append(o) try: notice('Starting input thread', self.color) self.__input_thread() except KeyboardInterrupt: self.__shutdown()
Run self on provided screen
def isPeregrine(ID, sign, lon): info = getInfo(sign, lon) for dign, objID in info.items(): if dign not in ['exile', 'fall'] and ID == objID: return False return True
Returns if an object is peregrine on a sign and longitude.
async def _wait_for_data(self, current_command, number_of_bytes): while number_of_bytes: next_command_byte = await self.read() current_command.append(next_command_byte) number_of_bytes -= 1 return current_command
This is a private utility method. This method accumulates the requested number of bytes and then returns the full command :param current_command: command id :param number_of_bytes: how many bytes to wait for :returns: command
def backup_list(self, query, detail): import csv from wal_e.storage.base import BackupInfo bl = self._backup_list(detail) if query is None: bl_iter = bl else: bl_iter = bl.find_all(query) w_csv = csv.writer(sys.stdout, dialect='excel-tab') w_csv.writerow(BackupInfo._fields) for bi in bl_iter: w_csv.writerow([getattr(bi, k) for k in BackupInfo._fields]) sys.stdout.flush()
Lists base backups and basic information about them
def recurse_up(directory, filename): directory = osp.abspath(directory) while True: searchfile = osp.join(directory, filename) if osp.isfile(searchfile): return directory if directory == '/': break else: directory = osp.dirname(directory) return False
Recursive walk a directory up to root until it contains `filename`
def _prepareSObjects(sObjects): sObjectsCopy = copy.deepcopy(sObjects) if isinstance(sObjectsCopy, dict): _doPrep(sObjectsCopy) else: for listitems in sObjectsCopy: _doPrep(listitems) return sObjectsCopy
Prepare a SObject
def apply_mapping(raw_row, mapping): row = {target: mapping_func(raw_row[source_key]) for target, (mapping_func, source_key) in mapping.fget().items()} return row
Override this to hand craft conversion of row.
def validate(self, metadata, path, value): if isinstance(value, Requirement): if metadata.testing and self.mock_value is not None: value = self.mock_value elif self.default_value is not None: value = self.default_value elif not value.required: return None else: raise ValidationError(f"Missing required configuration for: {'.'.join(path)}") try: return self.type(value) except ValueError: raise ValidationError(f"Missing required configuration for: {'.'.join(path)}: {value}")
Validate this requirement.
def to_cloudformation(self, **kwargs): function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") source_arn = self.get_source_arn() permission = self._construct_permission(function, source_arn=source_arn) subscription_filter = self.get_subscription_filter(function, permission) resources = [permission, subscription_filter] return resources
Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list
def standard_exc_info(self): tb = self.frames[0] if type(tb) is not TracebackType: tb = tb.tb return self.exc_type, self.exc_value, tb
Standard python exc_info for re-raising
def update(self, other): for new in other.values(): new_key = new.__class__.__name__ for child in new.__class__.mro()[1:-2]: child_key = child.__name__ try: self[child_key].merge(new) except KeyError: pass except ValueError: del self[child_key] try: self[new_key].merge(new) except (KeyError, ValueError): self[new_key] = new
Update themeables with those from `other` This method takes care of inserting the `themeable` into the underlying dictionary. Before doing the insertion, any existing themeables that will be affected by a new from `other` will either be merged or removed. This makes sure that a general themeable of type :class:`text` can be added to override an existing specific one of type :class:`axis_text_x`.
def lazy(func): try: frame = sys._getframe(1) except Exception: _locals = None else: _locals = frame.f_locals func_name = func.func_name if six.PY2 else func.__name__ return LazyStub(func_name, func, _locals)
Decorator, which can be used for lazy imports @lazy def yaml(): import yaml return yaml
def clear_search_defaults(self, args=None): if args is None: self._search_defaults.clear() else: for arg in args: if arg in self._search_defaults: del self._search_defaults[arg]
Clear all search defaults specified by the list of parameter names given as ``args``. If ``args`` is not given, then clear all existing search defaults. Examples:: conn.set_search_defaults(scope=ldap.SCOPE_BASE, attrs=['cn']) conn.clear_search_defaults(['scope']) conn.clear_search_defaults()
def freeze_extensions(self): output_path = os.path.join(_registry_folder(), 'frozen_extensions.json') with open(output_path, "w") as outfile: json.dump(self._dump_extensions(), outfile)
Freeze the set of extensions into a single file. Freezing extensions can speed up the extension loading process on machines with slow file systems since it requires only a single file to store all of the extensions. Calling this method will save a file into the current virtual environment that stores a list of all currently found extensions that have been installed as entry_points. Future calls to `load_extensions` will only search the one single file containing frozen extensions rather than enumerating all installed distributions.
def input_password(self, locator, text): self._info("Typing password into text field '%s'" % locator) self._element_input_text_by_locator(locator, text)
Types the given password into text field identified by `locator`. Difference between this keyword and `Input Text` is that this keyword does not log the given password. See `introduction` for details about locating elements.
def build_doctype(qualifiedName, publicId=None, systemId=None, internalSubset=None): doctype = ElifeDocumentType(qualifiedName) doctype._identified_mixin_init(publicId, systemId) if internalSubset: doctype.internalSubset = internalSubset return doctype
Instantiate an ElifeDocumentType, a subclass of minidom.DocumentType, with some properties so it is more testable
def load_repo(client, path=None, index='git'): path = dirname(dirname(abspath(__file__))) if path is None else path repo_name = basename(path) repo = git.Repo(path) create_git_index(client, index) for ok, result in streaming_bulk( client, parse_commits(repo.refs.master.commit, repo_name), index=index, doc_type='doc', chunk_size=50 ): action, result = result.popitem() doc_id = '/%s/doc/%s' % (index, result['_id']) if not ok: print('Failed to %s document %s: %r' % (action, doc_id, result)) else: print(doc_id)
Parse a git repository with all it's commits and load it into elasticsearch using `client`. If the index doesn't exist it will be created.
def clear(self): self.sam |= KEY_WRITE for v in list(self.values()): del self[v.name] for k in list(self.subkeys()): self.del_subkey(k.name)
Remove all subkeys and values from this key.
def show_menu(title, options, default=None, height=None, width=None, multiselect=False, precolored=False): plugins = [FilterPlugin()] if any(isinstance(opt, OptionGroup) for opt in options): plugins.append(OptionGroupPlugin()) if title: plugins.append(TitlePlugin(title)) if precolored: plugins.append(PrecoloredPlugin()) menu = Termenu(options, default=default, height=height, width=width, multiselect=multiselect, plugins=plugins) return menu.show()
Shows an interactive menu in the terminal. Arguments: options: list of menu options default: initial option to highlight height: maximum height of the menu width: maximum width of the menu multiselect: allow multiple items to be selected? precolored: allow strings with embedded ANSI commands Returns: * If multiselect is True, returns a list of selected options. * If mutliselect is False, returns the selected option. * If an option is a 2-tuple, the first item will be displayed and the second item will be returned. * If menu is cancelled (Esc pressed), returns None. * Notes: * You can pass OptionGroup objects to `options` to create sub-headers in the menu.
def prepare_xml(args, parser): if args.source == constants.TEI_SOURCE_CBETA_GITHUB: corpus_class = tacl.TEICorpusCBETAGitHub else: raise Exception('Unsupported TEI source option provided') corpus = corpus_class(args.input, args.output) corpus.tidy()
Prepares XML files for stripping. This process creates a single, normalised TEI XML file for each work.
def from_dict(cls, d): return IonEntry(Ion.from_dict(d["ion"]), d["energy"], d.get("name", None))
Returns an IonEntry object from a dict.
def _PrintDictAsTable(self, src_dict): key_list = list(src_dict.keys()) key_list.sort() print('|', end='') for key in key_list: print(' {0:s} |'.format(key), end='') print('') print('|', end='') for key in key_list: print(' :---: |', end='') print('') print('|', end='') for key in key_list: print(' {0!s} |'.format(src_dict[key]), end='') print('\n')
Prints a table of artifact definitions. Args: src_dict (dict[str, ArtifactDefinition]): artifact definitions by name.
def _configure_logger_for_production(logger): stderr_handler = logging.StreamHandler(sys.stderr) stderr_handler.setLevel(logging.INFO) if 'STDERR' in app.config: logger.addHandler(stderr_handler) file_handler = logging.handlers.RotatingFileHandler( app.config.get('LOG_FILE'), maxBytes=67108864, backupCount=5) file_handler.setLevel(logging.INFO) if 'LOG_FILE' in app.config: logger.addHandler(file_handler) mail_handler = logging.handlers.SMTPHandler( '127.0.0.1', app.config.get('FROM_EMAIL'), app.config.get('ADMINS', []), 'CKAN Service Error') mail_handler.setLevel(logging.ERROR) if 'FROM_EMAIL' in app.config: logger.addHandler(mail_handler)
Configure the given logger for production deployment. Logs to stderr and file, and emails errors to admins.
def jit_load(self): try: model = importlib.import_module('.' + self.model, 'andes.models') device = getattr(model, self.device) self.system.__dict__[self.name] = device(self.system, self.name) g = self.system.__dict__[self.name]._group self.system.group_add(g) self.system.__dict__[g].register_model(self.name) self.system.devman.register_device(self.name) self.loaded = 1 logger.debug('Imported model <{:s}.{:s}>.'.format( self.model, self.device)) except ImportError: logger.error( 'non-JIT model <{:s}.{:s}> import error' .format(self.model, self.device)) except AttributeError: logger.error( 'model <{:s}.{:s}> not exist. Check models/__init__.py' .format(self.model, self.device))
Import and instantiate this JIT object Returns -------
def _read_mode_tr(self, size, kind): if size != 12: raise ProtocolError(f'{self.alias}: [Optno {kind}] invalid format') _idnm = self._read_unpack(2) _ohcn = self._read_unpack(2) _rhcn = self._read_unpack(2) _ipad = self._read_ipv4_addr() data = dict( kind=kind, type=self._read_opt_type(kind), length=size, id=_idnm, ohc=_ohcn, rhc=_rhcn, ip=_ipad, ) return data
Read Traceroute option. Positional arguments: size - int, length of option kind - int, 82 (TR) Returns: * dict -- extracted Traceroute (TR) option Structure of Traceroute (TR) option [RFC 1393][RFC 6814]: 0 8 16 24 +-+-+-+-+-+-+-+-+---------------+---------------+---------------+ |F| C | Number | Length | ID Number | +-+-+-+-+-+-+-+-+---------------+---------------+---------------+ | Outbound Hop Count | Return Hop Count | +---------------+---------------+---------------+---------------+ | Originator IP Address | +---------------+---------------+---------------+---------------+ Octets Bits Name Description 0 0 ip.tr.kind Kind (82) 0 0 ip.tr.type.copy Copied Flag (0) 0 1 ip.tr.type.class Option Class (0) 0 3 ip.tr.type.number Option Number (18) 1 8 ip.tr.length Length (12) 2 16 ip.tr.id ID Number 4 32 ip.tr.ohc Outbound Hop Count 6 48 ip.tr.rhc Return Hop Count 8 64 ip.tr.ip Originator IP Address
def get_points_within_r(center_points, target_points, r): r tree = cKDTree(target_points) indices = tree.query_ball_point(center_points, r) return tree.data[indices].T
r"""Get all target_points within a specified radius of a center point. All data must be in same coordinate system, or you will get undetermined results. Parameters ---------- center_points: (X, Y) ndarray location from which to grab surrounding points within r target_points: (X, Y) ndarray points from which to return if they are within r of center_points r: integer search radius around center_points to grab target_points Returns ------- matches: (X, Y) ndarray A list of points within r distance of, and in the same order as, center_points
def get(cls, action, suffix=None): action_id = _action_id(action, suffix) if action_id not in cls._HANDLERS: if LOG_OPTS['register']: hookenv.log('Registering reactive handler for %s' % _short_action_id(action, suffix), level=hookenv.DEBUG) cls._HANDLERS[action_id] = cls(action, suffix) return cls._HANDLERS[action_id]
Get or register a handler for the given action. :param func action: Callback that is called when invoking the Handler :param func suffix: Optional suffix for the handler's ID
def get_constraint(self, twig=None, **kwargs): if twig is not None: kwargs['twig'] = twig kwargs['context'] = 'constraint' return self.get(**kwargs)
Filter in the 'constraint' context :parameter str constraint: name of the constraint (optional) :parameter **kwargs: any other tags to do the filter (except constraint or context) :return: :class:`phoebe.parameters.parameters.ParameterSet`
def exportUsufy(data, ext, fileH): if ext == "csv": usufyToCsvExport(data, fileH+"."+ext) elif ext == "gml": usufyToGmlExport(data, fileH+"."+ext) elif ext == "json": usufyToJsonExport(data, fileH+"."+ext) elif ext == "ods": usufyToOdsExport(data, fileH+"."+ext) elif ext == "png": usufyToPngExport(data, fileH+"."+ext) elif ext == "txt": usufyToTextExport(data, fileH+"."+ext) elif ext == "xls": usufyToXlsExport(data, fileH+"."+ext) elif ext == "xlsx": usufyToXlsxExport(data, fileH+"."+ext)
Method that exports the different structures onto different formats. Args: ----- data: Data to export. ext: One of the following: csv, excel, json, ods. fileH: Fileheader for the output files. Returns: -------- Performs the export as requested by parameter.
def client_credentials(self, client_id, client_secret, audience, grant_type='client_credentials'): return self.post( 'https://{}/oauth/token'.format(self.domain), data={ 'client_id': client_id, 'client_secret': client_secret, 'audience': audience, 'grant_type': grant_type, }, headers={'Content-Type': 'application/json'} )
Client credentials grant This is the OAuth 2.0 grant that server processes utilize in order to access an API. Use this endpoint to directly request an access_token by using the Application Credentials (a Client Id and a Client Secret). Args: grant_type (str): Denotes the flow you're using. For client credentials use client_credentials client_id (str): your application's client Id client_secret (str): your application's client Secret audience (str): The unique identifier of the target API you want to access. Returns: access_token
def update(self, historics_id, name): return self.request.post('update', data=dict(id=historics_id, name=name))
Update the name of the given Historics query. Uses API documented at http://dev.datasift.com/docs/api/rest-api/endpoints/historicsupdate :param historics_id: playback id of the job to start :type historics_id: str :param name: new name of the stream :type name: str :return: dict of REST API output with headers attached :rtype: :class:`~datasift.request.DictResponse` :raises: :class:`~datasift.exceptions.DataSiftApiException`, :class:`requests.exceptions.HTTPError`
def get_valid_https_verify(value): http_verify_value = value bool_values = {'false': False, 'true': True} if isinstance(value, bool): http_verify_value = value elif (isinstance(value, str) or isinstance(value, unicode)) and value.lower() in bool_values.keys(): http_verify_value = bool_values[value.lower()] return http_verify_value
Get a value that can be the boolean representation of a string or a boolean itself and returns It as a boolean. If this is not the case, It returns a string. :value: The HTTPS_verify input value. A string can be passed as a path to a CA_BUNDLE certificate :returns: True, False or a string.
def _hashed_key(self): return abs(int(hashlib.md5( self.key_prefix.encode('utf8') ).hexdigest(), 16)) % (10 ** ( self._size_mod if hasattr(self, '_size_mod') else 5))
Returns 16-digit numeric hash of the redis key
def no_content_response(response): "Cautious assessment of the response body for no content." if not hasattr(response, '_container'): return True if response._container is None: return True if isinstance(response._container, (list, tuple)): if len(response._container) == 1 and not response._container[0]: return True return False
Cautious assessment of the response body for no content.
def clone_and_update(self, **kwargs): cloned = self.clone() cloned.update(**kwargs) return cloned
Clones the object and updates the clone with the args @param kwargs: Keyword arguments to set @return: The cloned copy with updated values
def run_evaluation(self, stream_name: str) -> None: def prediction(): logging.info('Running prediction') self._run_zeroth_epoch([stream_name]) logging.info('Prediction done\n\n') self._try_run(prediction)
Run the main loop with the given stream in the prediction mode. :param stream_name: name of the stream to be evaluated
def _get_imports_h(self, data_types): if not isinstance(data_types, list): data_types = [data_types] import_classes = [] for data_type in data_types: if is_user_defined_type(data_type): import_classes.append(fmt_class_prefix(data_type)) for field in data_type.all_fields: data_type, _ = unwrap_nullable(field.data_type) while is_list_type(data_type) or is_map_type(data_type): data_type = (data_type.value_data_type if is_map_type(data_type) else data_type.data_type) if is_user_defined_type(data_type): import_classes.append(fmt_class_prefix(data_type)) import_classes = list(set(import_classes)) import_classes.sort() return import_classes
Emits all necessary header file imports for the given Stone data type.
def create(callback=None, path=None, method=Method.POST, resource=None, tags=None, summary="Create a new resource", middleware=None): def inner(c): op = ResourceOperation(c, path or NoPath, method, resource, tags, summary, middleware) op.responses.add(Response(HTTPStatus.CREATED, "{name} has been created")) op.responses.add(Response(HTTPStatus.BAD_REQUEST, "Validation failed.", Error)) return op return inner(callback) if callback else inner
Decorator to configure an operation that creates a resource.
def fit(self, volumes, energies): eos_fit = self.model(np.array(volumes), np.array(energies)) eos_fit.fit() return eos_fit
Fit energies as function of volumes. Args: volumes (list/np.array) energies (list/np.array) Returns: EOSBase: EOSBase object
def all(self, order_by=None, limit=0): with rconnect() as conn: try: query = self._base() if order_by is not None: query = self._order_by(query, order_by) if limit > 0: query = self._limit(query, limit) log.debug(query) rv = query.run(conn) except Exception as e: log.warn(e) raise else: data = [self._model(_) for _ in rv] return data
Fetch all items. :param limit: How many rows to fetch. :param order_by: column on which to order the results. \ To change the sort, prepend with < or >.
def random_int_generator(maxrange): try: return random.randint(0,maxrange) except: line, filename, synerror = trace() raise ArcRestHelperError({ "function": "random_int_generator", "line": line, "filename": filename, "synerror": synerror, } ) finally: pass
Generates a random integer from 0 to `maxrange`, inclusive. Args: maxrange (int): The upper range of integers to randomly choose. Returns: int: The randomly generated integer from :py:func:`random.randint`. Examples: >>> arcresthelper.common.random_int_generator(15) 9
def _add_updated_at_column(self, values): if not self._model.uses_timestamps(): return values column = self._model.get_updated_at_column() if "updated_at" not in values: values.update({column: self._model.fresh_timestamp_string()}) return values
Add the "updated_at" column to a dictionary of values. :param values: The values to update :type values: dict :return: The new dictionary of values :rtype: dict
def read_lock(self): me = self._current_thread() if me in self._pending_writers: raise RuntimeError("Writer %s can not acquire a read lock" " while waiting for the write lock" % me) with self._cond: while True: if self._writer is None or self._writer == me: try: self._readers[me] = self._readers[me] + 1 except KeyError: self._readers[me] = 1 break self._cond.wait() try: yield self finally: with self._cond: try: me_instances = self._readers[me] if me_instances > 1: self._readers[me] = me_instances - 1 else: self._readers.pop(me) except KeyError: pass self._cond.notify_all()
Context manager that grants a read lock. Will wait until no active or pending writers. Raises a ``RuntimeError`` if a pending writer tries to acquire a read lock.
async def _notify(self, message: BaseMessage, responder: Responder): for cb in self._listeners: coro = cb(message, responder, self.fsm_creates_task) if not self.fsm_creates_task: self._register = await coro
Notify all callbacks that a message was received.
def get_version(cls): cmd_pieces = [cls.tool, '--version'] process = Popen(cmd_pieces, stdout=PIPE, stderr=PIPE) out, err = process.communicate() if err: return '' else: return out.splitlines()[0].strip()
Return the version number of the tool.
def use_size(self): "Return the total used size, including children." if self._nodes is None: return self._use_size return sum(i.use_size() for i in self._nodes)
Return the total used size, including children.
def query(self, event, pk, ts=None): key = self._keygen(event, ts) pk_ts = self.r.zscore(key, pk) return int(pk_ts) if pk_ts else None
Query the last update timestamp of an event pk. You can pass a timestamp to only look for events later than that within the same namespace. :param event: the event name. :param pk: the pk value for query. :param ts: query event pk after ts, default to None which will query all span of current namespace.
def AddStatEntry(self, stat_entry, timestamp): if timestamp in self._stat_entries: message = ("Duplicated stat entry write for path '%s' of type '%s' at " "timestamp '%s'. Old: %s. New: %s.") message %= ("/".join(self._components), self._path_type, timestamp, self._stat_entries[timestamp], stat_entry) raise db.Error(message) if timestamp not in self._path_infos: path_info = rdf_objects.PathInfo( path_type=self._path_type, components=self._components, timestamp=timestamp, stat_entry=stat_entry) self.AddPathInfo(path_info) else: self._path_infos[timestamp].stat_entry = stat_entry
Registers stat entry at a given timestamp.
def initializable(self): return bool(lib.EnvSlotInitableP(self._env, self._cls, self._name))
True if the Slot is initializable.
def _range2cols(areas): cols = [] for rng in areas.split(","): if ":" in rng: rng = rng.split(":") cols.extend(lrange(_excel2num(rng[0]), _excel2num(rng[1]) + 1)) else: cols.append(_excel2num(rng)) return cols
Convert comma separated list of column names and ranges to indices. Parameters ---------- areas : str A string containing a sequence of column ranges (or areas). Returns ------- cols : list A list of 0-based column indices. Examples -------- >>> _range2cols('A:E') [0, 1, 2, 3, 4] >>> _range2cols('A,C,Z:AB') [0, 2, 25, 26, 27]
def _LoadAuditEvents(handlers, get_report_args, actions=None, token=None, transformers=None): if transformers is None: transformers = {} if data_store.RelationalDBEnabled(): entries = data_store.REL_DB.ReadAPIAuditEntries( min_timestamp=get_report_args.start_time, max_timestamp=get_report_args.start_time + get_report_args.duration, router_method_names=list(handlers.keys())) rows = [_EntryToEvent(entry, handlers, transformers) for entry in entries] else: entries = report_utils.GetAuditLogEntries( offset=get_report_args.duration, now=get_report_args.start_time + get_report_args.duration, token=token) if actions is None: actions = set(handlers.values()) rows = [entry for entry in entries if entry.action in actions] rows.sort(key=lambda row: row.timestamp, reverse=True) return rows
Returns AuditEvents for given handlers, actions, and timerange.
def pack_data(self, remaining_size): payload = self.part_struct.pack(self.locator_id, self.readoffset + 1, self.readlength, b' ') return 4, payload
Pack data. readoffset has to be increased by one, seems like HANA starts from 1, not zero.
def get_object_class(object_type, vendor_id=0): if _debug: get_object_class._debug("get_object_class %r vendor_id=%r", object_type, vendor_id) cls = registered_object_types.get((object_type, vendor_id)) if _debug: get_object_class._debug(" - direct lookup: %s", repr(cls)) if (not cls) and vendor_id: cls = registered_object_types.get((object_type, 0)) if _debug: get_object_class._debug(" - default lookup: %s", repr(cls)) return cls
Return the class associated with an object type.
def get_metrics(self, slug_list): keys = ['seconds', 'minutes', 'hours', 'day', 'week', 'month', 'year'] key_mapping = {gran: key for gran, key in zip(GRANULARITIES, keys)} keys = [key_mapping[gran] for gran in self._granularities()] results = [] for slug in slug_list: metrics = self.r.mget(*self._build_keys(slug)) if any(metrics): results.append((slug, dict(zip(keys, metrics)))) return results
Get the metrics for multiple slugs. Returns a list of two-tuples containing the metric slug and a dictionary like the one returned by ``get_metric``:: ( some-metric, { 'seconds': 0, 'minutes': 0, 'hours': 0, 'day': 0, 'week': 0, 'month': 0, 'year': 0 } )
def createissue(self, project_id, title, **kwargs): data = {'id': id, 'title': title} if kwargs: data.update(kwargs) request = requests.post( '{0}/{1}/issues'.format(self.projects_url, project_id), headers=self.headers, data=data, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout) if request.status_code == 201: return request.json() else: return False
Create a new issue :param project_id: project id :param title: title of the issue :return: dict with the issue created
def get_stored_files(self): method = 'GET' endpoint = '/rest/v1/storage/{}'.format(self.client.sauce_username) return self.client.request(method, endpoint)
Check which files are in your temporary storage.
def convert_to_utf8(string): if (isinstance(string, unicode)): return string.encode('utf-8') try: u = unicode(string, 'utf-8') except TypeError: return str(string) utf8 = u.encode('utf-8') return utf8
Convert string to UTF8
def mse(x, xhat): buf_ = x - xhat np.square(buf_, out=buf_) sum_ = np.sum(buf_) sum_ /= x.size return sum_
Calcualte mse between vector or matrix x and xhat
def db_url(self, var=DEFAULT_DATABASE_ENV, default=NOTSET, engine=None): return self.db_url_config(self.get_value(var, default=default), engine=engine)
Returns a config dictionary, defaulting to DATABASE_URL. :rtype: dict
def get_url_array(self): urlarray = [] for urlobjects in self.__json_object["base_urls"]: urlarray.append(urlobjects["url"]) return urlarray
Get all url-objects in an array :return sites (array): The sites from the JSON-file
def scan_and_connect(self, devnames, timeout=DEF_TIMEOUT, calibration=True): responses = self.scan_devices(devnames, timeout) for dev in devnames: if dev not in responses: logger.error('Failed to find device {} during scan'.format(dev)) return (False, []) return self.connect([responses.get_device(dev) for dev in devnames], calibration)
Scan for and then connect to a set of one or more SK8s. This method is intended to be a simple way to combine the steps of running a BLE scan, checking the results and connecting to one or more devices. When called, a scan is started for a period equal to `timeout`, and a list of devices is collected. If at any point during the scan all of the supplied devices are detected, the scan will be ended immediately. After the scan has completed, the method will only proceed to creating connections if the scan results contain all the specified devices. Args: devnames (list): a list of device names (1 or more) timeout (float): a time period in seconds to run the scanning process (will be terminated early if all devices in `devnames` are discovered) Returns: Returns the same results as :meth:`connect`.
def my_permissions(self, projectKey=None, projectId=None, issueKey=None, issueId=None, ): params = {} if projectKey is not None: params['projectKey'] = projectKey if projectId is not None: params['projectId'] = projectId if issueKey is not None: params['issueKey'] = issueKey if issueId is not None: params['issueId'] = issueId return self._get_json('mypermissions', params=params)
Get a dict of all available permissions on the server. :param projectKey: limit returned permissions to the specified project :type projectKey: Optional[str] :param projectId: limit returned permissions to the specified project :type projectId: Optional[str] :param issueKey: limit returned permissions to the specified issue :type issueKey: Optional[str] :param issueId: limit returned permissions to the specified issue :type issueId: Optional[str] :rtype: Dict[str, Dict[str, Dict[str, str]]]
def is_acquired(self): values = self.client.get(self.key) return six.b(self._uuid) in values
Check if the lock is acquired
def _table_exists(self): self.cursor.execute("SHOW TABLES") for table in self.cursor.fetchall(): if table[0].lower() == self.name.lower(): return True return False
Database-specific method to see if the table exists
def draw_label_path(context, width, height, arrow_height, distance_to_port, port_offset): c = context c.rel_move_to(0, port_offset) c.rel_line_to(0, distance_to_port) c.rel_line_to(-width / 2., arrow_height) c.rel_line_to(0, height - arrow_height) c.rel_line_to(width, 0) c.rel_line_to(0, -(height - arrow_height)) c.rel_line_to(-width / 2., -arrow_height) c.close_path()
Draws the path for an upright label :param context: The Cairo context :param float width: Width of the label :param float height: Height of the label :param float distance_to_port: Distance to the port related to the label :param float port_offset: Distance from the port center to its border :param bool draw_connection_to_port: Whether to draw a line from the tip of the label to the port
def _spikes_in_clusters(spike_clusters, clusters): if len(spike_clusters) == 0 or len(clusters) == 0: return np.array([], dtype=np.int) return np.nonzero(np.in1d(spike_clusters, clusters))[0]
Return the ids of all spikes belonging to the specified clusters.
def check_valid_ip_or_cidr(val, return_as_cidr=False): is_ip = True if "/" in val: ip_check(val, netmask_expected=True) is_ip = False else: ip_check(val, netmask_expected=False) if return_as_cidr and is_ip: if val == "0.0.0.0": val = "0.0.0.0/0" else: val = "%s/32" % val try: ipaddress.IPv4Network(unicode(val)) except Exception as e: raise ArgsError("Not a valid network: %s" % str(e)) return val
Checks that the value is a valid IP address or a valid CIDR. Returns the specified value. If 'return_as_cidr' is set then the return value will always be in the form of a CIDR, even if a plain IP address was specified.
async def generate_access_token(self, user): payload = await self._get_payload(user) secret = self._get_secret(True) algorithm = self._get_algorithm() return jwt.encode(payload, secret, algorithm=algorithm).decode("utf-8")
Generate an access token for a given user.
def _find_dirs(metadata): ret = [] found = {} for bucket_dict in metadata: for bucket_name, data in six.iteritems(bucket_dict): dirpaths = set() for path in [k['Key'] for k in data]: prefix = '' for part in path.split('/')[:-1]: directory = prefix + part + '/' dirpaths.add(directory) prefix = directory if bucket_name not in found: found[bucket_name] = True ret.append({bucket_name: list(dirpaths)}) else: for bucket in ret: if bucket_name in bucket: bucket[bucket_name] += list(dirpaths) bucket[bucket_name] = list(set(bucket[bucket_name])) break return ret
Looks for all the directories in the S3 bucket cache metadata. Supports trailing '/' keys (as created by S3 console) as well as directories discovered in the path of file keys.
def activate_conf_set(self, set_name): with self._mutex: if not set_name in self.conf_sets: raise exceptions.NoSuchConfSetError(set_name) self._conf.activate_configuration_set(set_name)
Activate a configuration set by name. @raises NoSuchConfSetError
def create(self, identity, role_sid=values.unset): data = values.of({'Identity': identity, 'RoleSid': role_sid, }) payload = self._version.create( 'POST', self._uri, data=data, ) return InviteInstance( self._version, payload, service_sid=self._solution['service_sid'], channel_sid=self._solution['channel_sid'], )
Create a new InviteInstance :param unicode identity: The `identity` value that identifies the new resource's User :param unicode role_sid: The Role assigned to the new member :returns: Newly created InviteInstance :rtype: twilio.rest.chat.v2.service.channel.invite.InviteInstance
def authenticate(self, environ): try: hd = parse_dict_header(environ['HTTP_AUTHORIZATION']) except (KeyError, ValueError): return False return self.credentials_valid( hd['response'], environ['REQUEST_METHOD'], environ['httpauth.uri'], hd['nonce'], hd['Digest username'], )
Returns True if the credentials passed in the Authorization header are valid, False otherwise.
def get_firmware_image(self, image_id): api = self._get_api(update_service.DefaultApi) return FirmwareImage(api.firmware_image_retrieve(image_id))
Get a firmware image with provided image_id. :param str image_id: The firmware ID for the image to retrieve (Required) :return: FirmwareImage
def search(self, *filters, **kwargs): report = self._app.reports.build( 'search-' + random_string(8), keywords=kwargs.pop('keywords', []), limit=kwargs.pop('limit', Report.default_limit) ) for filter_tuples in filters: report.filter(*filter_tuples) return list(report)
Shortcut to generate a new temporary search report using provided filters and return the resulting records Args: *filters (tuple): Zero or more filter tuples of (field_name, operator, field_value) Keyword Args: keywords (list(str)): List of strings of keywords to use in report search limit (int): Set maximum number of returned Records, defaults to `Report.default_limit`. Set to 0 to return all records Notes: Uses a temporary Report instance with a random name to facilitate search. Records are normally paginated, but are returned as a single list here, potentially causing performance issues with large searches. All provided filters are AND'ed together Filter operators are available as constants in `swimlane.core.search` Examples: :: # Return records matching all filters with default limit from swimlane.core import search records = app.records.search( ('field_name', 'equals', 'field_value'), ('other_field', search.NOT_EQ, 'value') ) :: # Run keyword search with multiple keywords records = app.records.search(keywords=['example', 'test']) :: # Return all records from app records = app.records.search(limit=0) Returns: :class:`list` of :class:`~swimlane.core.resources.record.Record`: List of Record instances returned from the search results
def int_to_gematria(num, gershayim=True): if num in specialnumbers['specials']: retval = specialnumbers['specials'][num] return _add_gershayim(retval) if gershayim else retval parts = [] rest = str(num) while rest: digit = int(rest[0]) rest = rest[1:] if digit == 0: continue power = 10 ** len(rest) parts.append(specialnumbers['numerals'][power * digit]) retval = ''.join(parts) return _add_gershayim(retval) if gershayim else retval
convert integers between 1 an 999 to Hebrew numerals. - set gershayim flag to False to ommit gershayim
def cmd_extract_email(infile, verbose, jsonout): if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') data = infile.read() result = [] result = extract_email(data) if jsonout: print(json.dumps(result, indent=4)) else: print('\n'.join(result))
Extract email addresses from a file or stdin. Example: \b $ cat /var/log/auth.log | habu.extract.email john@securetia.com raven@acmecorp.net nmarks@fimax.com
def delete_snl(self, snl_ids): try: payload = {"ids": json.dumps(snl_ids)} response = self.session.post( "{}/snl/delete".format(self.preamble), data=payload) if response.status_code in [200, 400]: resp = json.loads(response.text, cls=MontyDecoder) if resp["valid_response"]: if resp.get("warning"): warnings.warn(resp["warning"]) return resp else: raise MPRestError(resp["error"]) raise MPRestError("REST error with status code {} and error {}" .format(response.status_code, response.text)) except Exception as ex: raise MPRestError(str(ex))
Delete earlier submitted SNLs. .. note:: As of now, this MP REST feature is open only to a select group of users. Opening up submissions to all users is being planned for the future. Args: snl_ids: List of SNL ids. Raises: MPRestError
def create_area(self, area_uuid): response = self._make_request('post', path="/area/{id}".format(id=area_uuid), headers={'Api-Key': self.auth_token}) return response.json()
Create an Upload Area :param str area_uuid: A RFC4122-compliant ID for the upload area :return: a dict of the form { "uri": "s3://<bucket_name>/<upload-area-id>/" } :rtype: dict :raises UploadApiException: if the an Upload Area was not created
def load_rml(self, rml_name): conn = CFG.rml_tstore cache_path = os.path.join(CFG.CACHE_DATA_PATH, 'rml_files', rml_name) if not os.path.exists(cache_path): results = get_graph(NSM.uri(getattr(NSM.kdr, rml_name), False), conn) with open(cache_path, "w") as file_obj: file_obj.write(json.dumps(results, indent=4)) else: results = json.loads(open(cache_path).read()) self.rml[rml_name] = RdfDataset(results) return self.rml[rml_name]
loads an rml mapping into memory args: rml_name(str): the name of the rml file
def incoming_connections(self): return list( takewhile(lambda c: c.direction == INCOMING, self.connections) )
Returns a list of all incoming connections for this peer.
def loads(cls, pickle_string): cls.load_counter_offset = StoreOptions.id_offset() val = pickle.loads(pickle_string) cls.load_counter_offset = None return val
Equivalent to pickle.loads except that the HoloViews trees is restored appropriately.
def converge(self, playbook=None, **kwargs): if playbook is None: pb = self._get_ansible_playbook(self.playbooks.converge, **kwargs) else: pb = self._get_ansible_playbook(playbook, **kwargs) return pb.execute()
Executes ``ansible-playbook`` against the converge playbook unless specified otherwise and returns a string. :param playbook: An optional string containing an absolute path to a playbook. :param kwargs: An optional keyword arguments. :return: str
def fetch(self): response = Response() has_next = True while has_next: resp = self._fetch(default_path='v2') results = None if resp.success: results = resp.data['results'] self.add_get_param('page', resp.data['pagination']['page'] + 1) has_next = resp.data['pagination']['has_next'] response.add(results, resp.status_code, resp.status_reason) return response
Run the request and fetch the results. This method will compile the request, send it to the SpaceGDN endpoint defined with the `SpaceGDN` object and wrap the results in a :class:`pyspacegdn.Response` object. Returns a :class:`pyspacegdn.Response` object.
def read_gain_from_frames(frame_filenames, gain_channel_name, start_time, end_time): gain = frame.read_frame(frame_filenames, gain_channel_name, start_time=start_time, end_time=end_time) return gain[0]
Returns the gain from the file.
def is_satisfied(self): return self._call_counter.has_correct_call_count() and ( self._call_counter.never() or self._is_satisfied)
Returns a boolean indicating whether or not the double has been satisfied. Stubs are always satisfied, but mocks are only satisfied if they've been called as was declared, or if call is expected not to happen. :return: Whether or not the double is satisfied. :rtype: bool
def map_overlay_obs(self): return json.loads(self._query(LAYER, OBSERVATIONS, ALL, CAPABILITIES, "").decode(errors="replace"))
Returns capabilities data for observation map overlays.
def _is_last_child(self, tagname, attributes=None): children = self.cur_node.getchildren() if children: result = self._is_node(tagname, attributes, node=children[-1]) return result return False
Check if last child of cur_node is tagname with attributes
def set_loglevel(self, level): self.log_level = level log_manager.config_stdio(default_level=level)
Set the minimum loglevel for all components Args: level (int): eg. logging.DEBUG or logging.ERROR. See also https://docs.python.org/2/library/logging.html#logging-levels
def get(self, key, default=None, as_int=False, setter=None): if as_int: val = uwsgi.cache_num(key, self.name) else: val = decode(uwsgi.cache_get(key, self.name)) if val is None: if setter is None: return default val = setter(key) if val is None: return default self.set(key, val) return val
Gets a value from the cache. :param str|unicode key: The cache key to get value for. :param default: Value to return if none found in cache. :param bool as_int: Return 64bit number instead of str. :param callable setter: Setter callable to automatically set cache value if not already cached. Required to accept a key and return a value that will be cached. :rtype: str|unicode|int
def get_student_enrollments(self): resp = self.requester.get( urljoin(self.base_url, self.enrollment_url)) resp.raise_for_status() return Enrollments(resp.json())
Returns an Enrollments object with the user enrollments Returns: Enrollments: object representing the student enrollments
def rm_subsets(ctx, dataset, kwargs): "removes the dataset's training-set and test-set folders if they exists" kwargs = parse_kwargs(kwargs) data(dataset, **ctx.obj).rm_subsets(**kwargs)
removes the dataset's training-set and test-set folders if they exists
def cmd_part(self, connection, sender, target, payload): if payload: connection.part(payload) else: raise ValueError("No channel given")
Asks the bot to leave a channel
async def full_dispatch_request( self, request_context: Optional[RequestContext]=None, ) -> Response: await self.try_trigger_before_first_request_functions() await request_started.send(self) try: result = await self.preprocess_request(request_context) if result is None: result = await self.dispatch_request(request_context) except Exception as error: result = await self.handle_user_exception(error) return await self.finalize_request(result, request_context)
Adds pre and post processing to the request dispatching. Arguments: request_context: The request context, optional as Flask omits this argument.
def _dump(file_obj, options, out=sys.stdout): total_count = 0 writer = None keys = None for row in DictReader(file_obj, options.col): if not keys: keys = row.keys() if not writer: writer = csv.DictWriter(out, keys, delimiter=u'\t', quotechar=u'\'', quoting=csv.QUOTE_MINIMAL) \ if options.format == 'csv' \ else JsonWriter(out) if options.format == 'json' \ else None if total_count == 0 and options.format == "csv" and not options.no_headers: writer.writeheader() if options.limit != -1 and total_count >= options.limit: return row_unicode = {k: v.decode("utf-8") if isinstance(v, bytes) else v for k, v in row.items()} writer.writerow(row_unicode) total_count += 1
Dump to fo with given options.
def select_functions(expr): body = Group(expr) return Group( function("timestamp", body, caseless=True) | function("ts", body, caseless=True) | function("utctimestamp", body, caseless=True) | function("utcts", body, caseless=True) | function("now", caseless=True) | function("utcnow", caseless=True) ).setResultsName("function")
Create the function expressions for selection
def aborting_function(): import random logging.info('In aborting_function') if random.random() < .5: from furious.errors import AbortAndRestart logging.info('Getting ready to restart') raise AbortAndRestart() logging.info('No longer restarting')
There is a 50% chance that this function will AbortAndRestart or complete successfully. The 50% chance simply represents a process that will fail half the time and succeed half the time.
def starts_with(self, other: 'Key') -> bool: if (self.key_type, self.identity, self.group) != (other.key_type, other.identity, other.group): return False if self.key_type == KeyType.TIMESTAMP: return True if self.key_type == KeyType.DIMENSION: if len(self.dimensions) < len(other.dimensions): return False return self.dimensions[0:len(other.dimensions)] == other.dimensions
Checks if this key starts with the other key provided. Returns False if key_type, identity or group are different. For `KeyType.TIMESTAMP` returns True. For `KeyType.DIMENSION` does prefix match between the two dimensions property.