code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def close(self): if self.is_connected: self.client.close() if self.forward_agent and self._agent_handler is not None: self._agent_handler.close()
Terminate the network connection to the remote end, if open. If no connection is open, this method does nothing. .. versionadded:: 2.0
def attach(func, params): sig = inspect.signature(func) params = Projection(sig.parameters.keys(), params) return functools.partial(func, **params)
Given a function and a namespace of possible parameters, bind any params matching the signature of the function to that function.
def request_data(key, url, file, string_content, start, end, fix_apple): data = [] try: data += events(url=url, file=file, string_content=string_content, start=start, end=end, fix_apple=fix_apple) finally: update_events(key, data) request_finished(key)
Request data, update local data cache and remove this Thread form queue. :param key: key for data source to get result later :param url: iCal URL :param file: iCal file path :param string_content: iCal content as string :param start: start date :param end: end date :param fix_apple: fix known Apple iCal issues
def intern(obj, timeout): core.gear.timeout = timeout core.gear.pool.append(obj)
Tell untwisted to process an extern event loop.
def text(self, prompt, default=None): prompt = prompt if prompt is not None else 'Enter some text' prompt += " [{0}]: ".format(default) if default is not None else ': ' return self.input(curry(filter_text, default=default), prompt)
Prompts the user for some text, with optional default
def reshape(self, *shape): if prod(self.shape) != prod(shape): raise ValueError("Reshaping must leave the number of elements unchanged") if self.shape[-1] != shape[-1]: raise ValueError("Reshaping cannot change the size of the constituent series (last dimension)") if self.labels is not None: newlabels = self.labels.reshape(*shape[:-1]) else: newlabels = None return self._constructor(self.values.reshape(shape), labels=newlabels).__finalize__(self, noprop=('labels',))
Reshape the Series object Cannot change the last dimension. Parameters ---------- shape: one or more ints New shape
def apply_default_constraints(self): try: self.apply_secthresh(pipeline_weaksec(self.koi)) except NoWeakSecondaryError: logging.warning('No secondary eclipse threshold set for {}'.format(self.koi)) self.set_maxrad(default_r_exclusion(self.koi))
Applies default secthresh & exclusion radius constraints
def future_set_exception_unless_cancelled( future: "Union[futures.Future[_T], Future[_T]]", exc: BaseException ) -> None: if not future.cancelled(): future.set_exception(exc) else: app_log.error("Exception after Future was cancelled", exc_info=exc)
Set the given ``exc`` as the `Future`'s exception. If the Future is already canceled, logs the exception instead. If this logging is not desired, the caller should explicitly check the state of the Future and call ``Future.set_exception`` instead of this wrapper. Avoids ``asyncio.InvalidStateError`` when calling ``set_exception()`` on a cancelled `asyncio.Future`. .. versionadded:: 6.0
def image_url(self): return construct_api_url(self.input, 'image', self.resolvers, False, self.get3d, False, **self.kwargs)
URL of a GIF image.
def drop(self, *column_or_columns): exclude = _varargs_labels_as_list(column_or_columns) return self.select([c for (i, c) in enumerate(self.labels) if i not in exclude and c not in exclude])
Return a Table with only columns other than selected label or labels. Args: ``column_or_columns`` (string or list of strings): The header names or indices of the columns to be dropped. ``column_or_columns`` must be an existing header name, or a valid column index. Returns: An instance of ``Table`` with given columns removed. >>> t = Table().with_columns( ... 'burgers', make_array('cheeseburger', 'hamburger', 'veggie burger'), ... 'prices', make_array(6, 5, 5), ... 'calories', make_array(743, 651, 582)) >>> t burgers | prices | calories cheeseburger | 6 | 743 hamburger | 5 | 651 veggie burger | 5 | 582 >>> t.drop('prices') burgers | calories cheeseburger | 743 hamburger | 651 veggie burger | 582 >>> t.drop(['burgers', 'calories']) prices 6 5 5 >>> t.drop('burgers', 'calories') prices 6 5 5 >>> t.drop([0, 2]) prices 6 5 5 >>> t.drop(0, 2) prices 6 5 5 >>> t.drop(1) burgers | calories cheeseburger | 743 hamburger | 651 veggie burger | 582
def get_comment_collection(cmt_id): query = recid = run_sql(query, (cmt_id,)) record_primary_collection = guess_primary_collection_of_a_record( recid[0][0]) return record_primary_collection
Extract the collection where the comment is written
def sign(self, payload): if self.authenticator: return self.authenticator.signed(payload) return payload
Sign payload using the supplied authenticator
def _clean_schema_fields(self, fields): fields_sorted = sorted(fields, key=lambda field: field["name"]) return [ {"name": field["name"], "type": field["type"]} for field in fields_sorted ]
Return a sanitized version of the schema for comparisons.
def missing_db_response(func): @wraps(func) def with_exception_handling(*args, **kwargs): try: return func(*args, **kwargs) except ConnectionError as error: return (dict(error='Unable to connect to Configuration Db.', error_message=str(error), links=dict(root='{}'.format(get_root_url()))), HTTPStatus.NOT_FOUND) return with_exception_handling
Decorator to check connection exceptions
def wait(self): self.thread.join() if self.error is not None: return self.error return self.result
Wait for the request to finish and return the result or error when finished :returns: result or error :type: result tyoe or Error
def languages(self, **kwargs): path = '/projects/%s/languages' % self.get_id() return self.manager.gitlab.http_get(path, **kwargs)
Get languages used in the project with percentage value. Args: **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabGetError: If the server failed to perform the request
def pngout(ext_args): args = _PNGOUT_ARGS + [ext_args.old_filename, ext_args.new_filename] extern.run_ext(args) return _PNG_FORMAT
Run the external program pngout on the file.
def specs_to_ir(specs, version='0.1b1', debug=False, route_whitelist_filter=None): parser_factory = ParserFactory(debug=debug) partial_asts = [] for path, text in specs: logger.info('Parsing spec %s', path) parser = parser_factory.get_parser() if debug: parser.test_lexing(text) partial_ast = parser.parse(text, path) if parser.got_errors_parsing(): msg, lineno, path = parser.get_errors()[0] raise InvalidSpec(msg, lineno, path) elif len(partial_ast) == 0: logger.info('Empty spec: %s', path) else: partial_asts.append(partial_ast) return IRGenerator(partial_asts, version, debug=debug, route_whitelist_filter=route_whitelist_filter).generate_IR()
Converts a collection of Stone specifications into the intermediate representation used by Stone backends. The process is: Lexer -> Parser -> Semantic Analyzer -> IR Generator. The code is structured as: 1. Parser (Lexer embedded within) 2. IR Generator (Semantic Analyzer embedded within) :type specs: List[Tuple[path: str, text: str]] :param specs: `path` is never accessed and is only used to report the location of a bad spec to the user. `spec` is the text contents of a spec (.stone) file. :raises: InvalidSpec :returns: stone.ir.Api
def _alter_umask(self): if self.umask is None: yield else: prev_umask = os.umask(self.umask) try: yield finally: os.umask(prev_umask)
Temporarily alter umask to custom setting, if applicable
def remove_column(table, remove_index): for row_index in range(len(table)): old_row = table[row_index] new_row = [] for column_index in range(len(old_row)): if column_index != remove_index: new_row.append(old_row[column_index]) table[row_index] = new_row return table
Removes the specified column from the table.
def setTargetRange(self, targetRange, padding=None): if self.axisNumber == X_AXIS: xRange, yRange = targetRange, None else: xRange, yRange = None, targetRange self.viewBox.setRange(xRange = xRange, yRange=yRange, padding=padding, update=False, disableAutoRange=False)
Sets the range of the target.
def iter_items(self, depth: int = 1): if depth is not None and not isinstance(depth, int): raise TypeError def itor(root, d): if d is not None: d -= 1 if d < 0: return for name in os.listdir(root): path = os.path.join(root, name) node = NodeInfo.from_path(path) yield node if isinstance(node, DirectoryInfo): yield from itor(path, d) yield from itor(self._path, depth)
get items from directory.
def __check_right_side_conflict(x, y, dfs_data): r = dfs_data['FG']['r'] w, z = dfs_data['RF'][r] return __check_conflict_fronds(x, y, w, z, dfs_data)
Checks to see if the frond xy will conflict with a frond on the right side of the embedding.
def maybe_coroutine(obj): if six.PY3 and asyncio.iscoroutine(obj): return defer.ensureDeferred(obj) return obj
If 'obj' is a coroutine and we're using Python3, wrap it in ensureDeferred. Otherwise return the original object. (This is to insert in all callback chains from user code, in case that user code is Python3 and used 'async def')
def get_tags(self, use_cached=True): device_json = self.get_device_json(use_cached) potential_tags = device_json.get("dpTags") if potential_tags: return list(filter(None, potential_tags.split(","))) else: return []
Get the list of tags for this device
def threeD_gridplot(nodes, **kwargs): from mpl_toolkits.mplot3d import Axes3D import matplotlib.pyplot as plt lats = [] longs = [] depths = [] for node in nodes: lats.append(float(node[0])) longs.append(float(node[1])) depths.append(float(node[2])) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(lats, longs, depths) ax.set_ylabel("Latitude (deg)") ax.set_xlabel("Longitude (deg)") ax.set_zlabel("Depth(km)") ax.get_xaxis().get_major_formatter().set_scientific(False) ax.get_yaxis().get_major_formatter().set_scientific(False) fig = _finalise_figure(fig=fig, **kwargs) return fig
Plot in a series of grid points in 3D. :type nodes: list :param nodes: List of tuples of the form (lat, long, depth) :returns: :class:`matplotlib.figure.Figure` .. rubric:: Example >>> from eqcorrscan.utils.plotting import threeD_gridplot >>> nodes = [(-43.5, 170.4, 4), (-43.3, 170.8, 12), (-43.4, 170.3, 8)] >>> threeD_gridplot(nodes=nodes) # doctest: +SKIP .. plot:: from eqcorrscan.utils.plotting import threeD_gridplot nodes = [(-43.5, 170.4, 4), (-43.3, 170.8, 12), (-43.4, 170.3, 8)] threeD_gridplot(nodes=nodes)
def temperature(self): ut = self.get_raw_temp() x1 = ((ut - self.cal['AC6']) * self.cal['AC5']) >> 15 x2 = (self.cal['MC'] << 11) // (x1 + self.cal['MD']) b5 = x1 + x2 return ((b5 + 8) >> 4) / 10
Get the temperature from the sensor. :returns: The temperature in degree celcius as a float :example: >>> sensor = BMP180(gw) >>> sensor.load_calibration() >>> sensor.temperature() 21.4
def _save_subimports(self, code, top_level_dependencies): for x in top_level_dependencies: if isinstance(x, types.ModuleType) and hasattr(x, '__package__') and x.__package__: prefix = x.__name__ + '.' for name, module in sys.modules.items(): if name is not None and name.startswith(prefix): tokens = set(name[len(prefix):].split('.')) if not tokens - set(code.co_names): self.save(module) self.write(pickle.POP)
Ensure de-pickler imports any package child-modules that are needed by the function
def list_adb_devices_by_usb_id(): out = adb.AdbProxy().devices(['-l']) clean_lines = new_str(out, 'utf-8').strip().split('\n') results = [] for line in clean_lines: tokens = line.strip().split() if len(tokens) > 2 and tokens[1] == 'device': results.append(tokens[2]) return results
List the usb id of all android devices connected to the computer that are detected by adb. Returns: A list of strings that are android device usb ids. Empty if there's none.
def check_tx(self, raw_transaction): self.abort_if_abci_chain_is_not_synced() logger.debug('check_tx: %s', raw_transaction) transaction = decode_transaction(raw_transaction) if self.bigchaindb.is_valid_transaction(transaction): logger.debug('check_tx: VALID') return ResponseCheckTx(code=CodeTypeOk) else: logger.debug('check_tx: INVALID') return ResponseCheckTx(code=CodeTypeError)
Validate the transaction before entry into the mempool. Args: raw_tx: a raw string (in bytes) transaction.
def set_impact_state(self): cls = self.__class__ if cls.enable_problem_impacts_states_change: logger.debug("%s is impacted and goes UNREACHABLE", self) self.state_before_impact = self.state self.state_id_before_impact = self.state_id self.state_changed_since_impact = False self.set_unreachable()
We just go an impact, so we go unreachable But only if we enable this state change in the conf :return: None
def _encode_dask_array(values, uniques=None, encode=False, onehot_dtype=None): if uniques is None: if encode and onehot_dtype: raise ValueError("Cannot use 'encode` and 'onehot_dtype' simultaneously.") if encode: uniques, encoded = da.unique(values, return_inverse=True) return uniques, encoded else: return da.unique(values) if encode: if onehot_dtype: dtype = onehot_dtype new_axis = 1 chunks = values.chunks + (len(uniques),) else: dtype = np.dtype("int") new_axis = None chunks = values.chunks return ( uniques, values.map_blocks( _check_and_search_block, uniques, onehot_dtype=onehot_dtype, dtype=dtype, new_axis=new_axis, chunks=chunks, ), ) else: return uniques
One-hot or label encode a dask array. Parameters ---------- values : da.Array, shape [n_samples,] unqiques : np.ndarray, shape [n_uniques,] encode : bool, default False Whether to encode the values (True) or just discover the uniques. onehot_dtype : np.dtype, optional Optional dtype for the resulting one-hot encoded array. This changes the shape, dtype, and underlying storage of the returned dask array. ======= ================= ========================= thing onehot_dtype=None onehot_dtype=onehot_dtype ======= ================= ========================= shape (n_samples,) (n_samples, len(uniques)) dtype np.intp onehot_dtype storage np.ndarray scipy.sparse.csr_matrix ======= ================= ========================= Returns ------- uniques : ndarray The discovered uniques (uniques=None) or just `uniques` encoded : da.Array, optional The encoded values. Only returend when ``encode=True``.
def delete_cached_branch_info(self): if os.path.isfile(constants.cached_branch_info): logger.debug('Deleting cached branch_info file...') os.remove(constants.cached_branch_info) else: logger.debug('Cached branch_info file does not exist.')
Deletes cached branch_info file
def connect(self, signal, **kwargs): signal.connect(self, **kwargs) self.connections.append((signal, kwargs))
Connect a specific signal type to this receiver.
def _get_debug_context(debug_port, debug_args, debugger_path): if debug_port and debugger_path: try: debugger = Path(debugger_path).resolve(strict=True) except OSError as error: if error.errno == errno.ENOENT: raise DebugContextException("'{}' could not be found.".format(debugger_path)) else: raise error if not debugger.is_dir(): raise DebugContextException("'{}' should be a directory with the debugger in it.".format(debugger_path)) debugger_path = str(debugger) return DebugContext(debug_port=debug_port, debug_args=debug_args, debugger_path=debugger_path)
Creates a DebugContext if the InvokeContext is in a debugging mode Parameters ---------- debug_port int Port to bind the debugger to debug_args str Additional arguments passed to the debugger debugger_path str Path to the directory of the debugger to mount on Docker Returns ------- samcli.commands.local.lib.debug_context.DebugContext Object representing the DebugContext Raises ------ samcli.commands.local.cli_common.user_exceptions.DebugContext When the debugger_path is not valid
def _add_event_in_element(self, element, event): if not self.main_script_added: self._generate_main_scripts() if self.script_list is not None: self.id_generator.generate_id(element) self.script_list.append_text( event + "Elements.push('" + element.get_attribute('id') + "');" )
Add a type of event in element. :param element: The element. :type element: hatemile.util.html.htmldomelement.HTMLDOMElement :param event: The type of event. :type event: str
def forward(self, actions, batch_info): while len(self.processes) < actions.shape[0]: len_action_space = self.action_space.shape[-1] self.processes.append( OrnsteinUhlenbeckNoiseProcess( np.zeros(len_action_space), float(self.std_dev) * np.ones(len_action_space) ) ) noise = torch.from_numpy(np.stack([x() for x in self.processes])).float().to(actions.device) return torch.min(torch.max(actions + noise, self.low_tensor), self.high_tensor)
Return model step after applying noise
def register_token(self, token_class, regexp=None): if regexp is None: regexp = token_class.regexp self.tokens.register(token_class, regexp)
Register a token class. Args: token_class (tdparser.Token): the token class to register regexp (optional str): the regexp for elements of that token. Defaults to the `regexp` attribute of the token class.
def plotnoisecum(noisepkl, fluxscale=1, plot_width=450, plot_height=400): noises = read_noise(noisepkl) imnoise = np.sort(fluxscale*noises[4]) frac = [float(count)/len(imnoise) for count in reversed(range(1, len(imnoise)+1))] noiseplot = Figure(plot_width=plot_width, plot_height=plot_height, toolbar_location="above", x_axis_label='Image noise (Jy; cal scaling {0:.3})'.format(fluxscale), y_axis_label='Cumulative fraction', tools='pan, wheel_zoom, reset') noiseplot.line(imnoise, frac) if fluxscale != 1: return noiseplot, imnoise else: return noiseplot
Merged noise pkl converted to interactive cumulative histogram noisepkl is standard noise pickle file. fluxscale is scaling applied by gain calibrator. telcal solutions have fluxscale=1. also returns corrected imnoise values if non-unity fluxscale provided
def monitor(self, listener): for line in self._stream(): self._record.append(line) if self.verbose: self.out.blather(line) if listener(line) is self.MONITOR_STOP: return
Relay the stream to listener until told to stop.
def ApplyPluginToTypedCollection(plugin, type_names, fetch_fn): for chunk in plugin.Start(): yield chunk def GetValues(tn): for v in fetch_fn(tn): yield v for type_name in sorted(type_names): stored_cls = rdfvalue.RDFValue.classes[type_name] for chunk in plugin.ProcessValues(stored_cls, functools.partial(GetValues, type_name)): yield chunk for chunk in plugin.Finish(): yield chunk
Applies instant output plugin to a collection of results. Args: plugin: InstantOutputPlugin instance. type_names: List of type names (strings) to be processed. fetch_fn: Function that takes a type name as an argument and returns available items (FlowResult) corresponding to this type. Items are returned as a generator Yields: Bytes chunks, as generated by the plugin.
def indent(self, value): if isinstance(value, str): if value.isspace() or len(value) == 0: self._indent = value else: raise ValueError('String indentation can only contain ' 'whitespace.') elif isinstance(value, int): if value >= 0: self._indent = value * ' ' else: raise ValueError('Indentation spacing must be nonnegative.') else: raise TypeError('Indentation must be specified by string or space ' 'width.')
Validate and set the indent width.
def add_layer_item(self, layer): if not layer.is_draft_version: raise ValueError("Layer isn't a draft version") self.items.append(layer.latest_version)
Adds a Layer to the publish group.
def print_status(self): tweets = self.received now = time.time() diff = now - self.since self.since = now self.received = 0 if diff > 0: logger.info("Receiving tweets at %s tps", tweets / diff)
Print out the current tweet rate and reset the counter
def save(self): result = yield gen.Task(RedisSession._redis_client.set, self._key, self.dumps()) LOGGER.debug('Saved session %s (%r)', self.id, result) raise gen.Return(result)
Store the session data in redis :param method callback: The callback method to invoke when done
def this(func, cache_obj=CACHE_OBJ, key=None, ttl=None, *args, **kwargs): key = key or (func.__name__ + str(args) + str(kwargs)) if cache_obj.has(key): return cache_obj.get(key) value = func(*args, **kwargs) cache_obj.upsert(key, value, ttl) return value
Store the output from the decorated function in the cache and pull it from the cache on future invocations without rerunning. Normally, the value will be stored under a key which takes into account all of the parameters that are passed into it, thereby caching different invocations separately. If you specify a key, all invocations will be cached under that key, and different invocations will return the same value, which may be unexpected. So, be careful! If the cache is disabled, the decorated function will just run normally. Unlike the other functions in this module, you must pass a custom cache_obj to this() in order to operate on the non-global cache. This is because of wonky behavior when using decorator.decorator from a class method. :param func: (expensive?) function to decorate :param cache_obj: cache to a specific object (for use from the cache object itself) :param key: optional key to store the value under :param ttl: optional expiry to apply to the cached value :param *args: arg tuple to pass to the decorated function :param **kwargs: kwarg dict to pass to the decorated function
def group_required(groups=None): def wrapper(func): @wraps(func) def wrapped(*args, **kwargs): if g.user is None: return redirect( url_for(current_app.config['LDAP_LOGIN_VIEW'], next=request.path)) match = [group for group in groups if group in g.ldap_groups] if not match: abort(401) return func(*args, **kwargs) return wrapped return wrapper
When applied to a view function, any unauthenticated requests will be redirected to the view named in LDAP_LOGIN_VIEW. Authenticated requests are only permitted if they belong to one of the listed groups. The login view is responsible for asking for credentials, checking them, and setting ``flask.g.user`` to the name of the authenticated user and ``flask.g.ldap_groups`` to the authenticated user's groups if the credentials are acceptable. :param list groups: List of groups that should be able to access the view function.
def version_info(self): if self._api_version is None: self.query_api_version() return self._api_version['api-major-version'],\ self._api_version['api-minor-version']
Returns API version information for the HMC. This operation does not require authentication. Returns: :term:`HMC API version`: The HMC API version supported by the HMC. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.ConnectionError`
def get_did_providers(self, did): register_values = self.contract_concise.getDIDRegister(did) if register_values and len(register_values) == 5: return DIDRegisterValues(*register_values).providers return None
Return the list providers registered on-chain for the given did. :param did: hex str the id of an asset on-chain :return: list of addresses None if asset has no registerd providers
def compile(self, db): sql = self.expression if self.alias: sql += (' AS ' + db.quote_column(self.alias)) return sql
Building the sql expression :param db: the database instance
def _is_readable(self, obj): try: read = getattr(obj, 'read') except AttributeError: return False else: return is_method(read, max_arity=1)
Check if the argument is a readable file-like object.
def text(self, text): ret = u"".join(text.content) if 'url' in text.properties: return u"`%s`_" % ret if 'bold' in text.properties: return u"**%s**" % ret if 'italic' in text.properties: return u"*%s*" % ret if 'sub' in text.properties: return ur"\ :sub:`%s`\ " % ret if 'super' in text.properties: return ur"\ :sup:`%s`\ " % ret return ret
process a pyth text and return the formatted string
def threadsafe_call(self, fn, *args, **kwargs): def handler(): try: fn(*args, **kwargs) except Exception: warn("error caught while excecuting async callback\n%s\n", format_exc()) def greenlet_wrapper(): gr = greenlet.greenlet(handler) gr.switch() self._async_session.threadsafe_call(greenlet_wrapper)
Wrapper around `AsyncSession.threadsafe_call`.
def soldOutForRole(event, role): if not isinstance(event, Event) or not isinstance(role, DanceRole): return None return event.soldOutForRole(role)
This tag allows one to determine whether any event is sold out for any particular role.
def connectRelay(self): self.protocol = self.connector.buildProtocol(None) self.connected = True self.protocol.makeConnection(self)
Builds the target protocol and connects it to the relay transport.
def broadcast(self, fcn, args): results = self.map(_lockstep_fcn, [(len(self), fcn, args)] * len(self)) _numdone.value = 0 return results
Do a function call on every worker. Parameters ---------- fcn: funtion Function to call. args: tuple The arguments for Pool.map
def dispatch(self, request, *args, **kwargs): if request.GET.get(self.xeditable_fieldname_param): return self.get_ajax_xeditable_choices(request, *args, **kwargs) return super(XEditableMixin, self).dispatch(request, *args, **kwargs)
Introduces the ``ensure_csrf_cookie`` decorator and handles xeditable choices ajax.
def get_env_str(env): return ' '.join("{0}='{1}'".format(k, v) for k, v in env.items())
Gets a string representation of a dict as though it contained environment variable values.
def tzname(self, dt): tt = _localtime(_mktime((dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1))) return _time.tzname[tt.tm_isdst > 0]
datetime -> string name of time zone.
def upload_GitHub_deploy_key(deploy_repo, ssh_key, *, read_only=False, title="Doctr deploy key for pushing to gh-pages from Travis", **login_kwargs): DEPLOY_KEY_URL = "https://api.github.com/repos/{deploy_repo}/keys".format(deploy_repo=deploy_repo) data = { "title": title, "key": ssh_key, "read_only": read_only, } return GitHub_post(data, DEPLOY_KEY_URL, **login_kwargs)
Uploads a GitHub deploy key to ``deploy_repo``. If ``read_only=True``, the deploy_key will not be able to write to the repo.
def load_plugin_classes(classes, category=None, overwrite=False): load_errors = [] for klass in classes: for pcat, pinterface in _plugins_interface.items(): if category is not None and not pcat == category: continue if all([hasattr(klass, attr) for attr in pinterface]): if klass.plugin_name in _all_plugins[pcat] and not overwrite: err = '{0} is already set for {1}'.format( klass.plugin_name, pcat) load_errors.append((klass.__name__, '{}'.format(err))) continue _all_plugins[pcat][klass.plugin_name] = klass() else: load_errors.append(( klass.__name__, 'does not match {} interface: {}'.format(pcat, pinterface) )) return load_errors
load plugins from class objects Parameters ---------- classes: list list of classes category : None or str if str, apply for single plugin category overwrite : bool if True, allow existing plugins to be overwritten Examples -------- >>> from pprint import pprint >>> pprint(view_plugins()) {'decoders': {}, 'encoders': {}, 'parsers': {}} >>> class DecoderPlugin(object): ... plugin_name = 'example' ... plugin_descript = 'a decoder for dicts containing _example_ key' ... dict_signature = ('_example_',) ... >>> errors = load_plugin_classes([DecoderPlugin]) >>> pprint(view_plugins()) {'decoders': {'example': 'a decoder for dicts containing _example_ key'}, 'encoders': {}, 'parsers': {}} >>> unload_all_plugins()
def get_version(self, service_id, version_number): content = self._fetch("/service/%s/version/%d" % (service_id, version_number)) return FastlyVersion(self, content)
Get the version for a particular service.
def compute_statistic(self, layout): data = self.data if not len(data): return type(data)() params = self.stat.setup_params(data) data = self.stat.use_defaults(data) data = self.stat.setup_data(data) data = self.stat.compute_layer(data, params, layout) self.data = data
Compute & return statistics for this layer
def load(file): with open(file, 'r') as f: contents = f.read() lambder.load_events(contents)
Load events from a json file
def get_special_location(self, special_location=0): try: return(self.process.GetSpecialLocation(special_location)) except Exception as e: print(e) print("Could not retreive special location")
SpecialLocation 0 - Gets the path to the Backup Folders folder location. 1 - Gets the path to the Unfiled Notes folder location. 2 - Gets the path to the Default Notebook folder location.
def add(self, type, orig, replace): ret = libxml2mod.xmlACatalogAdd(self._o, type, orig, replace) return ret
Add an entry in the catalog, it may overwrite existing but different entries.
def clear_knowledge_category(self): if (self.get_knowledge_category_metadata().is_read_only() or self.get_knowledge_category_metadata().is_required()): raise errors.NoAccess() self._my_map['knowledgeCategoryId'] = self._knowledge_category_default
Clears the knowledge category. raise: NoAccess - ``Metadata.isRequired()`` or ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.*
def _get_editable(self, request): try: editable_settings = self._editable_caches[request] except KeyError: editable_settings = self._editable_caches[request] = self._load() return editable_settings
Get the dictionary of editable settings for a given request. Settings are fetched from the database once per request and then stored in ``_editable_caches``, a WeakKeyDictionary that will automatically discard each entry when no more references to the request exist.
def render(self, rect, data): num_elements = len(self.elements) col_width = (rect.w-self.margin*(num_elements-1)) / float(num_elements) x = rect.x for element in self.elements: if element is not None: element.render(datatypes.Rectangle( x, rect.y, col_width, rect.h ), data) x += col_width + self.margin
Draws the columns.
def set_brightness(self, brightness): brightness = min([1.0, max([brightness, 0.0])]) self.state.brightness = brightness self._repeat_last_frame() sequence_number = self.zmq_publisher.publish_brightness(brightness) logging.debug("Set brightness to {brightPercent:05.1f}%".format(brightPercent=brightness*100)) return (True, sequence_number, "OK")
set general brightness in range 0...1
def attach_issue(resource_id, table, user_id): data = schemas.issue.post(flask.request.json) issue = _get_or_create_issue(data) if table.name == 'jobs': join_table = models.JOIN_JOBS_ISSUES else: join_table = models.JOIN_COMPONENTS_ISSUES key = '%s_id' % table.name[0:-1] query = join_table.insert().values({ 'user_id': user_id, 'issue_id': issue['id'], key: resource_id }) try: flask.g.db_conn.execute(query) except sa_exc.IntegrityError: raise dci_exc.DCICreationConflict(join_table.name, '%s, issue_id' % key) result = json.dumps({'issue': dict(issue)}) return flask.Response(result, 201, content_type='application/json')
Attach an issue to a specific job.
def publish_properties(self): publish = self.publish publish(b"$homie", b"3.0.1") publish(b"$name", self.settings.DEVICE_NAME) publish(b"$state", b"init") publish(b"$fw/name", b"Microhomie") publish(b"$fw/version", __version__) publish(b"$implementation", bytes(sys.platform, "utf-8")) publish(b"$localip", utils.get_local_ip()) publish(b"$mac", utils.get_local_mac()) publish(b"$stats", b"interval,uptime,freeheap") publish(b"$stats/interval", self.stats_interval) publish(b"$nodes", b",".join(self.node_ids)) for node in self.nodes: try: for propertie in node.get_properties(): if propertie: publish(*propertie) except NotImplementedError: raise except Exception as error: self.node_error(node, error)
publish device and node properties
def _get_fields_info(self, cols, model_schema, filter_rel_fields, **kwargs): ret = list() for col in cols: page = page_size = None col_args = kwargs.get(col, {}) if col_args: page = col_args.get(API_PAGE_INDEX_RIS_KEY, None) page_size = col_args.get(API_PAGE_SIZE_RIS_KEY, None) ret.append( self._get_field_info( model_schema.fields[col], filter_rel_fields.get(col, []), page=page, page_size=page_size, ) ) return ret
Returns a dict with fields detail from a marshmallow schema :param cols: list of columns to show info for :param model_schema: Marshmallow model schema :param filter_rel_fields: expects add_query_rel_fields or edit_query_rel_fields :param kwargs: Receives all rison arguments for pagination :return: dict with all fields details
def resume(self, obj): if isinstance(obj, str): savefile = open(obj, 'rb') else: savefile = obj game = pickle.loads(zlib.decompress(savefile.read())) if savefile is not obj: savefile.close() game.random_generator = random.Random() game.random_generator.setstate(game.random_state) del game.random_state return game
Returns an Adventure game saved to the given file.
def get_next_slip(raw): if not is_slip(raw): return None, raw length = raw[1:].index(SLIP_END) slip_packet = decode(raw[1:length+1]) new_raw = raw[length+2:] return slip_packet, new_raw
Get the next slip packet from raw data. Returns the extracted packet plus the raw data with the remaining data stream.
def draw(self, labels): y_lower = 10 colors = color_palette(self.colormap, self.n_clusters_) for idx in range(self.n_clusters_): values = self.silhouette_samples_[labels == idx] values.sort() size = values.shape[0] y_upper = y_lower + size color = colors[idx] self.ax.fill_betweenx( np.arange(y_lower, y_upper), 0, values, facecolor=color, edgecolor=color, alpha=0.5 ) self.ax.text(-0.05, y_lower + 0.5 * size, str(idx)) y_lower = y_upper + 10 self.ax.axvline( x=self.silhouette_score_, color="red", linestyle="--" ) return self.ax
Draw the silhouettes for each sample and the average score. Parameters ---------- labels : array-like An array with the cluster label for each silhouette sample, usually computed with ``predict()``. Labels are not stored on the visualizer so that the figure can be redrawn with new data.
def make(parser): s = parser.add_subparsers( title='commands', metavar='COMMAND', help='description', ) def gen_pass_f(args): gen_pass() gen_pass_parser = s.add_parser('gen-pass', help='generate the password') gen_pass_parser.set_defaults(func=gen_pass_f) def cmd_f(args): cmd(args.user, args.hosts.split(','), args.key_filename, args.password, args.run) cmd_parser = s.add_parser('cmd', help='run command line on the target host') cmd_parser.add_argument('--run', help='the command running on the remote node', action='store', default=None, dest='run') cmd_parser.set_defaults(func=cmd_f)
DEPRECATED prepare OpenStack basic environment
def get_graph(cls, response): if cls.is_graph(response): return response if hasattr(response, '__getitem__'): if len(response) > 0 and \ cls.is_graph(response[0]): return response[0]
Given a Flask response, find the rdflib Graph
def enable(self): payload = { "new_state": "1" } data = json.dumps(payload) req = self.request(self.mist_client.uri+'/clouds/'+self.id, data=data) req.post() self.enabled = True self.mist_client.update_clouds()
Enable the Cloud. :returns: A list of mist.clients' updated clouds.
def _normalize(parsed, **options): if options.get("exact"): return parsed if isinstance(parsed, time): now = options["now"] or datetime.now() return datetime( now.year, now.month, now.day, parsed.hour, parsed.minute, parsed.second, parsed.microsecond, ) elif isinstance(parsed, date) and not isinstance(parsed, datetime): return datetime(parsed.year, parsed.month, parsed.day) return parsed
Normalizes the parsed element. :param parsed: The parsed elements. :type parsed: Parsed :rtype: Parsed
def get_default_runner(udf_class, input_col_delim=',', null_indicator='NULL', stdin=None): proto = udf.get_annotation(udf_class) in_types, out_types = parse_proto(proto) stdin = stdin or sys.stdin arg_parser = ArgParser(in_types, stdin, input_col_delim, null_indicator) stdin_feed = make_feed(arg_parser) collector = StdoutCollector(out_types) ctor = _get_runner_class(udf_class) return ctor(udf_class, stdin_feed, collector)
Create a default runner with specified udf class.
def _ReceiveOp(self): try: fs_msg, received_bytes = self._fs.Recv() except (IOError, struct.error): logging.critical("Broken local Fleetspeak connection (read end).") raise received_type = fs_msg.data.TypeName() if not received_type.endswith("grr.GrrMessage"): raise ValueError( "Unexpected proto type received through Fleetspeak: %r; expected " "grr.GrrMessage." % received_type) stats_collector_instance.Get().IncrementCounter("grr_client_received_bytes", received_bytes) grr_msg = rdf_flows.GrrMessage.FromSerializedString(fs_msg.data.value) grr_msg.auth_state = jobs_pb2.GrrMessage.AUTHENTICATED self._threads["Worker"].QueueMessages([grr_msg])
Receives a single message through Fleetspeak.
def _extract_local_archive(working_dir, cleanup_functions, env_name, local_archive): with zipfile.ZipFile(local_archive) as z: z.extractall(working_dir) archive_filenames = z.namelist() root_elements = {m.split(posixpath.sep, 1)[0] for m in archive_filenames} abs_archive_filenames = [os.path.abspath(os.path.join(working_dir, f)) for f in root_elements] def cleanup(): for fn in abs_archive_filenames: if os.path.isdir(fn): shutil.rmtree(fn) else: os.unlink(fn) cleanup_functions.append(cleanup) env_dir = os.path.join(working_dir, env_name) _fix_permissions(env_dir) return env_dir
Helper internal function for extracting a zipfile and ensure that a cleanup is queued. Parameters ---------- working_dir : str cleanup_functions : List[() -> NoneType] env_name : str local_archive : str
def launch_run(self, command, project=None, entity=None, run_id=None): query = gql( ) patch = BytesIO() if self.git.dirty: self.git.repo.git.execute(['git', 'diff'], output_stream=patch) patch.seek(0) cwd = "." if self.git.enabled: cwd = cwd + os.getcwd().replace(self.git.repo.working_dir, "") return self.gql(query, variable_values={ 'entity': entity or self.settings('entity'), 'model': project or self.settings('project'), 'command': command, 'runId': run_id, 'patch': patch.read().decode("utf8"), 'cwd': cwd })
Launch a run in the cloud. Args: command (str): The command to run program (str): The file to run project (str): The project to scope the runs to entity (str, optional): The entity to scope this project to. Defaults to public models run_id (str, optional): The run_id to scope to Returns: [{"podName","status"}]
def close(self): if self.fileobj is None: return if self.mode == WRITE: self.close_member() self.fileobj = None elif self.mode == READ: self.fileobj = None if self.myfileobj: self.myfileobj.close() self.myfileobj = None
Closes the gzip with care to handle multiple members.
def can_create_objective_bank_with_record_types(self, objective_bank_record_types): if self._catalog_session is not None: return self._catalog_session.can_create_catalog_with_record_types(catalog_record_types=objective_bank_record_types) return True
Tests if this user can create a single ``ObjectiveBank`` using the desired record types. While ``LearningManager.getObjectiveBankRecordTypes()`` can be used to examine which records are supported, this method tests which record(s) are required for creating a specific ``ObjectiveBank``. Providing an empty array tests if an ``ObjectiveBank`` can be created with no records. arg: objective_bank_record_types (osid.type.Type[]): array of objective bank record types return: (boolean) - ``true`` if ``ObjectiveBank`` creation using the specified ``Types`` is supported, ``false`` otherwise raise: NullArgument - ``objective_bank_record_types`` is ``null`` *compliance: mandatory -- This method must be implemented.*
def declareProvisioner(self, *args, **kwargs): return self._makeApiCall(self.funcinfo["declareProvisioner"], *args, **kwargs)
Update a provisioner Declare a provisioner, supplying some details about it. `declareProvisioner` allows updating one or more properties of a provisioner as long as the required scopes are possessed. For example, a request to update the `aws-provisioner-v1` provisioner with a body `{description: 'This provisioner is great'}` would require you to have the scope `queue:declare-provisioner:aws-provisioner-v1#description`. The term "provisioner" is taken broadly to mean anything with a provisionerId. This does not necessarily mean there is an associated service performing any provisioning activity. This method takes input: ``v1/update-provisioner-request.json#`` This method gives output: ``v1/provisioner-response.json#`` This method is ``experimental``
def _read_config_file(args): stage = args.stage with open(args.config, 'rt') as f: config = yaml.safe_load(f.read()) STATE['stages'] = config['stages'] config['config'] = _decrypt_item(config['config'], stage=stage, key='', render=True) return config['stages'], config['config']
Decrypt config file, returns a tuple with stages and config.
def recv(self, timeout=None): if timeout: try: testsock = self._zmq.select([self.socket], [], [], timeout)[0] except zmq.ZMQError as e: if e.errno == errno.EINTR: testsock = None else: raise if not testsock: return rv = self.socket.recv(self._zmq.NOBLOCK) return LogRecord.from_dict(json.loads(rv)) else: return super(ZeroMQPullSubscriber, self).recv(timeout)
Overwrite standard recv for timeout calls to catch interrupt errors.
def ot_tnrs_match_names(name_list, context_name=None, do_approximate_matching=True, include_dubious=False, include_deprecated=True, tnrs_wrapper=None): if tnrs_wrapper is None: from peyotl.sugar import tnrs tnrs_wrapper = tnrs match_obj = tnrs_wrapper.match_names(name_list, context_name=context_name, do_approximate_matching=do_approximate_matching, include_deprecated=include_deprecated, include_dubious=include_dubious, wrap_response=True) return match_obj
Uses a peyotl wrapper around an Open Tree web service to get a list of OTT IDs matching the `name_list`. The tnrs_wrapper can be None (in which case the default wrapper from peyotl.sugar will be used. All other arguments correspond to the arguments of the web-service call. A ValueError will be raised if the `context_name` does not match one of the valid names for a taxonomic context. This uses the wrap_response option to create and return a TNRSRespose object around the response.
def get_service_resources(cls, model): key = cls.get_model_key(model) return cls.get_service_name_resources(key)
Get resource models by service model
def paginate(self, request, offset=0, limit=None): return self.collection.offset(offset).limit(limit), self.collection.count()
Paginate queryset.
def set(self, attr_dict): for key in attr_dict: if key == self._id_attribute: setattr(self, self._id_attribute, attr_dict[key]) else: setattr(self, u"_" + key, attr_dict[key]) return self
Sets attributes of this user object. :type attr_dict: dict :param attr_dict: Parameters to set, with attribute keys. :rtype: :class:`.Base` :return: The current object.
def _processChanges(self, unused_output): for branch in self.branches + self.bookmarks: rev = yield self._getHead(branch) if rev is None: continue yield self._processBranchChanges(rev, branch)
Send info about pulled changes to the master and record current. HgPoller does the recording by moving the working dir to the head of the branch. We don't update the tree (unnecessary treatment and waste of space) instead, we simply store the current rev number in a file. Recall that hg rev numbers are local and incremental.
def strip_command(self, command_string, output): backspace_char = "\x08" if backspace_char in output: output = output.replace(backspace_char, "") output_lines = output.split(self.RESPONSE_RETURN) new_output = output_lines[1:] return self.RESPONSE_RETURN.join(new_output) else: command_length = len(command_string) return output[command_length:]
Strip command_string from output string Cisco IOS adds backspaces into output for long commands (i.e. for commands that line wrap) :param command_string: The command string sent to the device :type command_string: str :param output: The returned output as a result of the command string sent to the device :type output: str
def _handle_uninitialized_read(self, addr, inspect=True, events=True): if self._uninitialized_read_handler is None: v = self.state.solver.Unconstrained("%s_%s" % (self.id, addr), self.width*self.state.arch.byte_width, key=self.variable_key_prefix + (addr,), inspect=inspect, events=events) return v.reversed if self.endness == "Iend_LE" else v else: return self._uninitialized_read_handler(self, addr, inspect=inspect, events=events)
The default uninitialized read handler. Returns symbolic bytes.
def get_shiftfile_row(self): if self.fit is not None: rowstr = '%s %0.6f %0.6f %0.6f %0.6f %0.6f %0.6f\n'%( self.name,self.fit['offset'][0],self.fit['offset'][1], self.fit['rot'],self.fit['scale'][0], self.fit['rms'][0],self.fit['rms'][1]) else: rowstr = None return rowstr
Return the information for a shiftfile for this image to provide compatability with the IRAF-based MultiDrizzle.
def random_split(valid_pct:float, *arrs:NPArrayableList)->SplitArrayList: "Randomly split `arrs` with `valid_pct` ratio. good for creating validation set." assert (valid_pct>=0 and valid_pct<=1), 'Validation set percentage should be between 0 and 1' is_train = np.random.uniform(size=(len(arrs[0]),)) > valid_pct return arrays_split(is_train, *arrs)
Randomly split `arrs` with `valid_pct` ratio. good for creating validation set.
def WaitForSnapshotCompleted(snapshot): print 'Waiting for snapshot %s to be completed...' % (snapshot) while True: snapshot.update() sys.stdout.write('.') sys.stdout.flush() if snapshot.status == 'completed': break time.sleep(5) return
Blocks until snapshot is complete.
def rename(self, name): r = self._h._http_resource( method='PUT', resource=('apps', self.name), data={'app[name]': name} ) return r.ok
Renames app to given name.