code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def on_service_add(self, service): self.launch_thread(service.name, self.check_loop, service)
When a new service is added, a worker thread is launched to periodically run the checks for that service.
def make_server( server_class, handler_class, authorizer_class, filesystem_class, host_port, file_access_user=None, **handler_options): from . import compat if isinstance(handler_class, compat.string_type): handler_class = import_class(handler_class) if isinstance(authorizer_class, c...
make server instance :host_port: (host, port) :file_access_user: 'spam' handler_options: * timeout * passive_ports * masquerade_address * certfile * keyfile
def get_playlist_songs(self, playlist_id, limit=1000): url = 'http://music.163.com/weapi/v3/playlist/detail?csrf_token=' csrf = '' params = {'id': playlist_id, 'offset': 0, 'total': True, 'limit': limit, 'n': 1000, 'csrf_token': csrf} result = self.post_request(url, par...
Get a playlists's all songs. :params playlist_id: playlist id. :params limit: length of result returned by weapi. :return: a list of Song object.
def check_or(state, *tests): success = False first_feedback = None for test in iter_tests(tests): try: multi(state, test) success = True except TestFail as e: if not first_feedback: first_feedback = e.feedback if success: ...
Test whether at least one SCT passes. If all of the tests fail, the feedback of the first test will be presented to the student. Args: state: State instance describing student and solution code, can be omitted if used with Ex() tests: one or more sub-SCTs to run :Example: The SCT ...
def publish(idx=None): if idx is None: idx = '' else: idx = '-r ' + idx run('python setup.py register {}'.format(idx)) run('twine upload {} dist/*.whl dist/*.egg dist/*.tar.gz'.format(idx))
Publish packaged distributions to pypi index
def is_on_curve(self, point): X, Y = point.X, point.Y return ( pow(Y, 2, self.P) - pow(X, 3, self.P) - self.a * X - self.b ) % self.P == 0
Checks whether a point is on the curve. Args: point (AffinePoint): Point to be checked. Returns: bool: True if point is on the curve, False otherwise.
def picard_fixmate(picard, align_bam): base, ext = os.path.splitext(align_bam) out_file = "%s-sort%s" % (base, ext) if not file_exists(out_file): with tx_tmpdir(picard._config) as tmp_dir: with file_transaction(picard._config, out_file) as tx_out_file: opts = [("INPUT", a...
Run Picard's FixMateInformation generating an aligned output file.
def parse(self): self.cmd = None self.comments = [] self.entrypoint = None self.environ = [] self.files = [] self.install = [] self.labels = [] self.ports = [] self.test = None self.volumes = [] if self.recipe: self.line...
parse is the base function for parsing the recipe, whether it be a Dockerfile or Singularity recipe. The recipe is read in as lines, and saved to a list if needed for the future. If the client has it, the recipe type specific _parse function is called. Instructions for makin...
def install(module): ret = { 'old': None, 'new': None, } old_info = show(module) cmd = 'cpan -i {0}'.format(module) out = __salt__['cmd.run'](cmd) if "don't know what it is" in out: ret['error'] = 'CPAN cannot identify this package' return ret new_info = show(...
Install a Perl module from CPAN CLI Example: .. code-block:: bash salt '*' cpan.install Template::Alloy
def persistant_error(request, message, extra_tags='', fail_silently=False, *args, **kwargs): add_message(request, ERROR_PERSISTENT, message, extra_tags=extra_tags, fail_silently=fail_silently, *args, **kwargs)
Adds a persistant message with the ``ERROR`` level.
def get_port_binding_level(filters): session = db.get_reader_session() with session.begin(): return (session.query(ml2_models.PortBindingLevel). filter_by(**filters). order_by(ml2_models.PortBindingLevel.level). all())
Returns entries from PortBindingLevel based on the specified filters.
def list(self): import IPython data = [{'name': version['name'].split()[-1], 'deploymentUri': version['deploymentUri'], 'createTime': version['createTime']} for version in self.get_iterator()] IPython.display.display( datalab.utils.commands.render_dictionary(data, ['name', '...
List versions under the current model in a table view. Raises: Exception if it is called in a non-IPython environment.
def wgs84_to_pixel(lng, lat, transform, utm_epsg=None, truncate=True): east, north = wgs84_to_utm(lng, lat, utm_epsg) row, column = utm_to_pixel(east, north, transform, truncate=truncate) return row, column
Convert WGS84 coordinates to pixel image coordinates given transform and UTM CRS. If no CRS is given it will be calculated it automatically. :param lng: longitude of point :type lng: float :param lat: latitude of point :type lat: float :param transform: georeferencing transform of the image, e....
def _get_broadcast_shape(shape1, shape2): if shape1 == shape2: return shape1 length1 = len(shape1) length2 = len(shape2) if length1 > length2: shape = list(shape1) else: shape = list(shape2) i = max(length1, length2) - 1 for a, b in zip(shape1[::-1], shape2[::-1]): ...
Given two shapes that are not identical, find the shape that both input shapes can broadcast to.
def _calculate(self, field): base_offset = 0 if self.base_field is not None: base_offset = self.base_field.offset target_offset = self._field.offset if (target_offset is None) or (base_offset is None): return 0 return target_offset - base_offset
If the offset is unknown, return 0
def list(self, mask=None): if mask is None: mask = "mask[id, name, createDate, rule, guestCount, backendRouter[id, hostname]]" groups = self.client.call('Account', 'getPlacementGroups', mask=mask, iter=True) return groups
List existing placement groups Calls SoftLayer_Account::getPlacementGroups
def run(self, code: str) -> Output: output = self._execute(code) if self.echo and output.text: print(output.text) if self.check: output.raise_for_status() return output
Run some code in the managed Spark session. :param code: The code to run.
def _decompress_into_buffer(self, out_buffer): zresult = lib.ZSTD_decompressStream(self._decompressor._dctx, out_buffer, self._in_buffer) if self._in_buffer.pos == self._in_buffer.size: self._in_buffer.src = ffi.NULL self._in_buffer.pos...
Decompress available input into an output buffer. Returns True if data in output buffer should be emitted.
def click_exists(self, timeout=0): e = self.get(timeout=timeout, raise_error=False) if e is None: return False e.click() return True
Wait element and perform click Args: timeout (float): timeout for wait Returns: bool: if successfully clicked
def string(s): @Parser def string_parser(text, index=0): slen, tlen = len(s), len(text) if text[index:index + slen] == s: return Value.success(index + slen, s) else: matched = 0 while matched < slen and index + matched < tlen and text[index + matched] ...
Parser a string.
def tags(self): result = [] a = javabridge.call(self.jobject, "getTags", "()Lweka/core/Tag;]") length = javabridge.get_env().get_array_length(a) wrapped = javabridge.get_env().get_object_array_elements(a) for i in range(length): result.append(Tag(javabridge.get_env()....
Returns the associated tags. :return: the list of Tag objects :rtype: list
def handle_template(self, template, subdir): if template is None: return six.text_type(os.path.join(yacms.__path__[0], subdir)) return super(Command, self).handle_template(template, subdir)
Use yacms's project template by default. The method of picking the default directory is copied from Django's TemplateCommand.
def import_gwf_library(library, package=__package__): try: return importlib.import_module('.%s' % library, package=package) except ImportError as exc: exc.args = ('Cannot import %s frame API: %s' % (library, str(exc)),) raise
Utility method to import the relevant timeseries.io.gwf frame API This is just a wrapper around :meth:`importlib.import_module` with a slightly nicer error message
def teetext(table, source=None, encoding=None, errors='strict', template=None, prologue=None, epilogue=None): assert template is not None, 'template is required' return TeeTextView(table, source=source, encoding=encoding, errors=errors, template=template, prologue=prologue, ep...
Return a table that writes rows to a text file as they are iterated over.
def log_error(self, message, *args, **kwargs): self._service.log(logging.ERROR, message, *args, **kwargs)
Log server error
def init_with_instance(self, instance): self._uid = api.get_uid(instance) self._brain = None self._catalog = self.get_catalog_for(instance) self._instance = instance
Initialize with an instance object
def string_to_identity(identity_str): m = _identity_regexp.match(identity_str) result = m.groupdict() log.debug('parsed identity: %s', result) return {k: v for k, v in result.items() if v}
Parse string into Identity dictionary.
def object_properties_count(self, o): o_type = type(o) if isinstance(o, (dict, list, tuple, set)): return len(o) elif isinstance(o, (type(None), bool, float, str, int, bytes, types.ModuleType, type...
returns the number of user browsable properties of an object.
def dir_list(directory): try: content = listdir(directory) return content except WindowsError as winErr: print("Directory error: " + str((winErr)))
Returns the list of all files in the directory.
def find_field(item_list, cond, comparator, target_field): for item in item_list: if comparator(item, cond) and target_field in item: return item[target_field] return None
Finds the value of a field in a dict object that satisfies certain conditions. Args: item_list: A list of dict objects. cond: A param that defines the condition. comparator: A function that checks if an dict satisfies the condition. target_field: Name of the field whose value to...
def create_insert_func(self, wb_url, wb_prefix, host_prefix, top_url, env, is_framed, coll='', include_ts=True, ...
Create the function used to render the header insert template for the current request. :param rewrite.wburl.WbUrl wb_url: The WbUrl for the request this template is being rendered for :param str wb_prefix: The URL prefix pywb is serving the content using (e.g. http://localhost:8080/live/) :para...
def _finish(self): if self._process.returncode is None: self._process.stdin.flush() self._process.stdin.close() self._process.wait() self.closed = True
Closes and waits for subprocess to exit.
def collect_single_file(self, file_path): lines = FileToList.to_list(file_path) file_anchors = {} file_duplicates = [] for i in range(len(lines)): self._try_switches(lines, i) if self._no_switches_on(): for s in self._strategies: ...
Takes in a list of strings, usually the lines in a text file, and collects the AnchorHub tags and auto-generated anchors for the file according to the Collector's converter, strategies, and switches :param file_path: string file path of file to examine :return: A dictionary mapping Anc...
def _can_for_object(self, func_name, object_id, method_name): can_for_session = self._can(func_name) if (can_for_session or self._object_catalog_session is None or self._override_lookup_session is None): return can_for_session override_auths = self._ov...
Checks if agent can perform function for object
def bootstrap_methods(self) -> BootstrapMethod: bootstrap = self.attributes.find_one(name='BootstrapMethods') if bootstrap is None: bootstrap = self.attributes.create( ATTRIBUTE_CLASSES['BootstrapMethods'] ) return bootstrap.table
Returns the bootstrap methods table from the BootstrapMethods attribute, if one exists. If it does not, one will be created. :returns: Table of `BootstrapMethod` objects.
def uniquify_list(L): return [e for i, e in enumerate(L) if L.index(e) == i]
Same order unique list using only a list compression.
def drop(self, ex): "helper for apply_sql in DropX case" if ex.name not in self: if ex.ifexists: return raise KeyError(ex.name) table_ = self[ex.name] parent = table_.parent_table if table_.child_tables: if not ex.cascade: raise table.IntegrityError('delete_parent_...
helper for apply_sql in DropX case
def on_trial_complete(self, trial_id, result=None, error=False, early_terminated=False): skopt_trial_info = self._live_trial_mapping.pop(trial_id) if result: self._skopt_opt.tell(skopt_tri...
Passes the result to skopt unless early terminated or errored. The result is internally negated when interacting with Skopt so that Skopt Optimizers can "maximize" this value, as it minimizes on default.
def kill(self) -> None: self._proc.kill() self._loop.run_in_executor(None, self._proc.communicate)
Kill ffmpeg job.
def set_result(self, result): for future in self.traverse(): future.set_result(result) if not self.done(): super().set_result(result)
Complete all tasks.
def ecdsa_sign_compact(msg32, seckey): output64 = ffi.new("unsigned char[65]") recid = ffi.new("int *") lib.secp256k1_ecdsa_recoverable_signature_serialize_compact( ctx, output64, recid, _ecdsa_sign_recoverable(msg32, seckey) ) r = ffi.buffer(output64)[:64] + struct.p...
Takes the same message and seckey as _ecdsa_sign_recoverable Returns an unsigned char array of length 65 containing the signed message
def nz(value, none_value, strict=True): if not DEBUG: debug = False else: debug = False if debug: print("START nz frameworkutilities.py ----------------------\n") if value is None and strict: return_val = none_value elif strict and value is not None: return_val = valu...
This function is named after an old VBA function. It returns a default value if the passed in value is None. If strict is False it will treat an empty string as None as well. example: x = None nz(x,"hello") --> "hello" nz(x,"") --> "" y = "" ...
def contrail_error_handler(f): @wraps(f) def wrapper(*args, **kwargs): try: return f(*args, **kwargs) except HttpError as e: if e.details: e.message, e.details = e.details, e.message e.args = ("%s (HTTP %s)" % (e.message, e.http_status),) ...
Handle HTTP errors returned by the API server
def load_eidos_curation_table(): url = 'https://raw.githubusercontent.com/clulab/eidos/master/' + \ 'src/main/resources/org/clulab/wm/eidos/english/confidence/' + \ 'rule_summary.tsv' res = StringIO(requests.get(url).text) table = pandas.read_table(res, sep='\t') table = table.drop(table...
Return a pandas table of Eidos curation data.
def _hexencode(bytestring, insert_spaces = False): _checkString(bytestring, description='byte string') separator = '' if not insert_spaces else ' ' byte_representions = [] for c in bytestring: byte_representions.append( '{0:02X}'.format(ord(c)) ) return separator.join(byte_representions).str...
Convert a byte string to a hex encoded string. For example 'J' will return '4A', and ``'\\x04'`` will return '04'. Args: bytestring (str): Can be for example ``'A\\x01B\\x45'``. insert_spaces (bool): Insert space characters between pair of characters to increase readability. Returns: ...
def get_param_arg(param, idx, klass, arg, attr='id'): if isinstance(arg, klass): return getattr(arg, attr) elif isinstance(arg, (int, str)): return arg else: raise TypeError( "%s[%d] must be int, str, or %s, not %s" % ( param, idx, klass.__name__, type(arg...
Return the correct value for a fabric from `arg`.
def unpackb(packed, **kwargs): unpacker = Unpacker(None, **kwargs) unpacker.feed(packed) try: ret = unpacker._unpack() except OutOfData: raise UnpackValueError("Data is not enough.") if unpacker._got_extradata(): raise ExtraData(ret, unpacker._get_extradata()) return ret
Unpack an object from `packed`. Raises `ExtraData` when `packed` contains extra bytes. See :class:`Unpacker` for options.
def checksum(digits): sum_mod11 = sum(map(operator.mul, digits, Provider.scale1)) % 11 if sum_mod11 < 10: return sum_mod11 sum_mod11 = sum(map(operator.mul, digits, Provider.scale2)) % 11 return 0 if sum_mod11 == 10 else sum_mod11
Calculate checksum of Estonian personal identity code. Checksum is calculated with "Modulo 11" method using level I or II scale: Level I scale: 1 2 3 4 5 6 7 8 9 1 Level II scale: 3 4 5 6 7 8 9 1 2 3 The digits of the personal code are multiplied by level I scale and summed; if remainder of modulo...
def get_temperature_from_pressure(self): self._init_pressure() temp = 0 data = self._pressure.pressureRead() if (data[2]): temp = data[3] return temp
Returns the temperature in Celsius from the pressure sensor
def compute_actor_handle_id_non_forked(actor_handle_id, current_task_id): assert isinstance(actor_handle_id, ActorHandleID) assert isinstance(current_task_id, TaskID) handle_id_hash = hashlib.sha1() handle_id_hash.update(actor_handle_id.binary()) handle_id_hash.update(current_task_id.binary()) h...
Deterministically compute an actor handle ID in the non-forked case. This code path is used whenever an actor handle is pickled and unpickled (for example, if a remote function closes over an actor handle). Then, whenever the actor handle is used, a new actor handle ID will be generated on the fly as a...
def list_vmss_sub(access_token, subscription_id): endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.Compute/virtualMachineScaleSets', '?api-version=', COMP_API]) return do_get_next(endpoint, ac...
List VM Scale Sets in a subscription. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of VM scale sets.
def unpack_bytes(self, obj_bytes, encoding=None): assert self.bytes_to_dict or self.string_to_dict encoding = encoding or self.default_encoding LOGGER.debug('%r decoding %d bytes with encoding of %s', self, len(obj_bytes), encoding) if self.bytes_to_dict: ...
Unpack a byte stream into a dictionary.
def load(fnames, tag=None, sat_id=None, fake_daily_files_from_monthly=False, flatten_twod=True): import pysatCDF if len(fnames) <= 0 : return pysat.DataFrame(None), None else: if fake_daily_files_from_monthly: fname = fnames[0][0:-11] date = pysat.d...
Load NASA CDAWeb CDF files. This routine is intended to be used by pysat instrument modules supporting a particular NASA CDAWeb dataset. Parameters ------------ fnames : (pandas.Series) Series of filenames tag : (str or NoneType) tag or None (default=None) sat_id : (str...
def main(dbfile, pidfile, mode): Inspector(dbfile, pidfile).reuse_snapshot().snapshot(mode)
Main analyzer routine.
def schedule(ident, cron=None, minute='*', hour='*', day_of_week='*', day_of_month='*', month_of_year='*'): source = get_source(ident) if cron: minute, hour, day_of_month, month_of_year, day_of_week = cron.split() crontab = PeriodicTask.Crontab( minute=str(minute), hour=...
Schedule an harvesting on a source given a crontab
def stop(self): if ( self.dev == None ): return '' buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0] return self.write(buf);
Stop internal color pattern playing
def _get_column_ends(self): ends = collections.Counter() for line in self.text.splitlines(): for matchobj in re.finditer('\s{2,}', line.lstrip()): ends[matchobj.end()] += 1 return ends
Guess where the ends of the columns lie.
def _print_routes(api_provider, host, port): grouped_api_configs = {} for api in api_provider.get_all(): key = "{}-{}".format(api.function_name, api.path) config = grouped_api_configs.get(key, {}) config.setdefault("methods", []) config["function_name"] = ...
Helper method to print the APIs that will be mounted. This method is purely for printing purposes. This method takes in a list of Route Configurations and prints out the Routes grouped by path. Grouping routes by Function Name + Path is the bulk of the logic. Example output: Mountin...
def get_driver(secret_key=config.DEFAULT_SECRET_KEY, userid=config.DEFAULT_USERID, provider=config.DEFAULT_PROVIDER): if hasattr(config, 'get_driver'): logger.debug('get_driver %s' % config.get_driver) return config.get_driver() else: logger.debug('get_driver {0}@{1}'.form...
A driver represents successful authentication. They become stale, so obtain them as late as possible, and don't cache them.
def _sanitize_url_components(comp_list, field): if not comp_list: return '' elif comp_list[0].startswith('{0}='.format(field)): ret = '{0}=XXXXXXXXXX&'.format(field) comp_list.remove(comp_list[0]) return ret + _sanitize_url_components(comp_list, field) else: ret = '{0...
Recursive function to sanitize each component of the url.
def register(self, src, trg, trg_mask=None, src_mask=None): ccreg = registration.CrossCorr() model = ccreg.fit(src, reference=trg) translation = [-x for x in model.toarray().tolist()[0]] warp_matrix = np.eye(2, 3) warp_matrix[0, 2] = translation[1] warp_matrix[1, 2] = tra...
Implementation of pair-wise registration using thunder-registration For more information on the model estimation, refer to https://github.com/thunder-project/thunder-registration This function takes two 2D single channel images and estimates a 2D translation that best aligns the pair. The estim...
def update_ports(self, ports, id_or_uri): ports = merge_default_values(ports, {'type': 'port'}) uri = self._client.build_uri(id_or_uri) + "/update-ports" return self._client.update(uri=uri, resource=ports)
Updates the switch ports. Only the ports under the management of OneView and those that are unlinked are supported for update. Note: This method is available for API version 300 or later. Args: ports: List of Switch Ports. id_or_uri: Can be either the switch...
def interpolate_xml_array(data, low_res_coords, shape, chunks): xpoints, ypoints = low_res_coords return interpolate_xarray_linear(xpoints, ypoints, data, shape, chunks=chunks)
Interpolate arbitrary size dataset to a full sized grid.
def stop(self): self._flush() filesize = self.file.tell() super(BLFWriter, self).stop() header = [b"LOGG", FILE_HEADER_SIZE, APPLICATION_ID, 0, 0, 0, 2, 6, 8, 1] header.extend([filesize, self.uncompressed_size, self.count_of_objects, 0]) ...
Stops logging and closes the file.
def error_leader(self, infile=None, lineno=None): "Emit a C-compiler-like, Emacs-friendly error-message leader." if infile is None: infile = self.infile if lineno is None: lineno = self.lineno return "\"%s\", line %d: " % (infile, lineno)
Emit a C-compiler-like, Emacs-friendly error-message leader.
def run(self): while self.should_run: try: self.logger.debug('Sending heartbeat, seq ' + last_sequence) self.ws.send(json.dumps({ 'op': 1, 'd': last_sequence })) except Exception as e: ...
Runs the thread This method handles sending the heartbeat to the Discord websocket server, so the connection can remain open and the bot remain online for those commands that require it to be. Args: None
def drawing_end(self): from MAVProxy.modules.mavproxy_map import mp_slipmap if self.draw_callback is None: return self.draw_callback(self.draw_line) self.draw_callback = None self.map.add_object(mp_slipmap.SlipDefaultPopup(self.default_popup, combine=True)) se...
end line drawing
def readrows(self): num_rows = 0 while True: for row in self.log_reader.readrows(): yield self.replace_timestamp(row) time.sleep(next(self.eps_timer)) num_rows += 1 if self.max_rows and (num_rows >= self.max_rows): ...
Using the BroLogReader this method yields each row of the log file replacing timestamps, looping and emitting rows based on EPS rate
def plot_f(self, plot_limits=None, fixed_inputs=None, resolution=None, apply_link=False, which_data_ycols='all', which_data_rows='all', visible_dims=None, levels=20, samples=0, lower=2.5, upper=97.5, plot_density=False, pl...
Convinience function for plotting the fit of a GP. This is the same as plot, except it plots the latent function fit of the GP! If you want fine graned control use the specific plotting functions supplied in the model. You can deactivate the legend for this one plot by supplying None to label. Give t...
def get(self, key, default): with self._lock: try: return self._dict[key].copy() except KeyError: return default
If the key is set, return a copy of the list stored at key. Otherwise return default.
def _parent_tile(tiles): parent = None for t in tiles: if parent is None: parent = t else: parent = common_parent(parent, t) return parent
Find the common parent tile for a sequence of tiles.
def get_outcome_group(self, group): from canvasapi.outcome import OutcomeGroup outcome_group_id = obj_or_id(group, "group", (OutcomeGroup,)) response = self.__requester.request( 'GET', 'global/outcome_groups/{}'.format(outcome_group_id) ) return OutcomeGro...
Returns the details of the Outcome Group with the given id. :calls: `GET /api/v1/global/outcome_groups/:id \ <https://canvas.instructure.com/doc/api/outcome_groups.html#method.outcome_groups_api.show>`_ :param group: The outcome group object or ID to return. :type group: :class:`ca...
def compile_msg_payload(self, invite): self.l.info("Compiling the outbound message payload") update_invite = False if "to_addr" in invite.invite: to_addr = invite.invite["to_addr"] else: update_invite = True to_addr = get_identity_address(invite.identi...
Determine recipient, message content, return it as a dict that can be Posted to the message sender
def star_expr_check(self, original, loc, tokens): return self.check_py("35", "star unpacking (add 'match' to front to produce universal code)", original, loc, tokens)
Check for Python 3.5 star unpacking.
def convex_conj(self): r if self.operator is None: tmp = IndicatorZero(space=self.domain, constant=-self.constant) if self.vector is None: return tmp else: return tmp.translated(self.vector) if self.vector is None: r...
r"""The convex conjugate functional of the quadratic form. Notes ----- The convex conjugate of the quadratic form :math:`<x, Ax> + <b, x> + c` is given by .. math:: (<x, Ax> + <b, x> + c)^* (x) = <(x - b), A^-1 (x - b)> - c = <x , A^-1 x> - <...
def _get_app_path(url): app_path = urlparse(url).path.rstrip("/") if not app_path.startswith("/"): app_path = "/" + app_path return app_path
Extract the app path from a Bokeh server URL Args: url (str) : Returns: str
def sysinit(systype, conf, project): click.secho(get_config( systype, conf=ConfModule(conf).configurations[0], conf_path=conf, project_name=project, ))
Outputs configuration for system initialization subsystem.
def _RunInTransaction(self, function, readonly=False): start_query = "START TRANSACTION;" if readonly: start_query = "START TRANSACTION WITH CONSISTENT SNAPSHOT, READ ONLY;" for retry_count in range(_MAX_RETRY_COUNT): with contextlib.closing(self.pool.get()) as connection: try: ...
Runs function within a transaction. Allocates a connection, begins a transaction on it and passes the connection to function. If function finishes without raising, the transaction is committed. If function raises, the transaction will be rolled back, if a retryable database error is raised, the o...
def set_dry_run(xml_root, value=True): value_str = str(value).lower() assert value_str in ("true", "false") if xml_root.tag == "testsuites": _set_property(xml_root, "polarion-dry-run", value_str) elif xml_root.tag in ("testcases", "requirements"): _set_property(xml_root, "dry-run", value...
Sets dry-run so records are not updated, only log file is produced.
def crit_met(self): if True in (self.pulls < 3): return False else: return self.criteria[self.criterion](self.stop_value)
Determine if stopping criterion has been met. Returns ------- bool
def _bfs_from_cluster_tree(tree, bfs_root): result = [] to_process = [bfs_root] while to_process: result.extend(to_process) to_process = tree['child'][np.in1d(tree['parent'], to_process)].tolist() return result
Perform a breadth first search on a tree in condensed tree format
def read_inquiry_mode(sock): old_filter = sock.getsockopt( bluez.SOL_HCI, bluez.HCI_FILTER, 14) flt = bluez.hci_filter_new() opcode = bluez.cmd_opcode_pack(bluez.OGF_HOST_CTL, bluez.OCF_READ_INQUIRY_MODE) bluez.hci_filter_set_ptype(flt, bluez.HCI_EVENT_PKT) bluez.hci_filter_set_event(fl...
returns the current mode, or -1 on failure
def wait_while_exceptions( predicate, timeout_seconds=120, sleep_seconds=1, noisy=False): start_time = time_module.time() timeout = Deadline.create_deadline(timeout_seconds) while True: try: result = predicate() return result except Exc...
waits for a predicate, ignoring exceptions, returning the result. Predicate is a function. Exceptions will trigger the sleep and retry; any non-exception result will be returned. A timeout will throw a TimeoutExpired Exception.
def _validate_type_scalar(self, value): if isinstance( value, _int_types + (_str_type, float, date, datetime, bool) ): return True
Is not a list or a dict
def _new_temp_file(self, hint='warcrecsess'): return wpull.body.new_temp_file( directory=self._temp_dir, hint=hint )
Return new temp file.
def cmd(self, cmd_name): return "{0}.tube.{1}:{2}".format(self.queue.lua_queue_name, self.name, cmd_name)
Returns tarantool queue command name for current tube.
def edges_to_path(edges): if not edges: return None G = edges_to_graph(edges) path = nx.topological_sort(G) return path
Connect edges and return a path.
def zoomset_cb(self, setting, value, chviewer, info): return self.zoomset(chviewer, info.chinfo)
This callback is called when a channel window is zoomed.
def go_to(self, x, y, z, yaw, duration_s, relative=False, group_mask=ALL_GROUPS): self._send_packet(struct.pack('<BBBfffff', self.COMMAND_GO_TO, group_mask, relative, ...
Go to an absolute or relative position :param x: x (m) :param y: y (m) :param z: z (m) :param yaw: yaw (radians) :param duration_s: time it should take to reach the position (s) :param relative: True if x, y, z is relative to the current position :param group_mas...
def create_db_instance(self, params): if not self.connect_to_aws_rds(): return False try: database = self.rdsc.create_dbinstance( id=params['id'], allocated_storage=params['size'], instance_class='db.t1.micro', engin...
Create db instance
def inject_closure_values(func, **kwargs): wrapped_by = None if isinstance(func, property): fget, fset, fdel = func.fget, func.fset, func.fdel if fget: fget = fix_func(fget, **kwargs) if fset: fset = fix_func(fset, **kwargs) if fdel: fdel = fix_func(fdel, **kwargs) wrappe...
Returns a new function identical to the previous one except that it acts as though global variables named in `kwargs` have been closed over with the values specified in the `kwargs` dictionary. Works on properties, class/static methods and functions. This can be useful for mocking and other nefarious ...
def iterateBlocksBackFrom(block): count = 0 while block.isValid() and count < MAX_SEARCH_OFFSET_LINES: yield block block = block.previous() count += 1
Generator, which iterates QTextBlocks from block until the Start of a document But, yields not more than MAX_SEARCH_OFFSET_LINES
def _cron_profile(): from os import path cronpath = path.expanduser("~/.cron_profile") if not path.isfile(cronpath): from os import getenv xmlpath = getenv("PYCI_XML") contents = ['source /usr/local/bin/virtualenvwrapper.sh', 'export PYCI_XML="{}"'.format(xmlp...
Sets up the .cron_profile file if it does not already exist.
def save(self, filename, strip_prefix=''): arg_dict = {} for param in self.values(): weight = param._reduce() if not param.name.startswith(strip_prefix): raise ValueError( "Prefix '%s' is to be striped before saving, but Parameter's " ...
Save parameters to file. Parameters ---------- filename : str Path to parameter file. strip_prefix : str, default '' Strip prefix from parameter names before saving.
def models_of_config(config): resources = resources_of_config(config) models = [] for resource in resources: if not hasattr(resource, '__table__') and hasattr(resource, 'model'): models.append(resource.model) else: models.append(resource) return models
Return list of models from all resources in config.
def _deduplicate_items(cls, items): "Deduplicates assigned paths by incrementing numbering" counter = Counter([path[:i] for path, _ in items for i in range(1, len(path)+1)]) if sum(counter.values()) == len(counter): return items new_items = [] counts = defaultdict(lam...
Deduplicates assigned paths by incrementing numbering
def get_day(self): year = super(BuildableDayArchiveView, self).get_year() month = super(BuildableDayArchiveView, self).get_month() day = super(BuildableDayArchiveView, self).get_day() fmt = self.get_day_format() dt = date(int(year), int(month), int(day)) return dt.strftim...
Return the day from the database in the format expected by the URL.
def path_fraction_id_offset(points, fraction, relative_offset=False): if not (0. <= fraction <= 1.0): raise ValueError("Invalid fraction: %.3f" % fraction) pts = np.array(points)[:, COLS.XYZ] lengths = np.linalg.norm(np.diff(pts, axis=0), axis=1) cum_lengths = np.cumsum(lengths) offset = cum...
Find the segment which corresponds to the fraction of the path length along the piecewise linear curve which is constructed from the set of points. Args: points: an iterable of indexable objects with indices 0, 1, 2 correspoding to 3D cartesian coordinates fraction: path length frac...
def ginput(self, data_set=0, **kwargs): import warnings import matplotlib.cbook warnings.filterwarnings("ignore",category=matplotlib.cbook.mplDeprecation) _s.tweaks.raise_figure_window(data_set+self['first_figure']) return _p.ginput(**kwargs)
Pops up the figure for the specified data set. Returns value from pylab.ginput(). kwargs are sent to pylab.ginput()
def read_count(self, space, start, end): read_counts = 0 for read in self._bam.fetch(space, start, end): read_counts += 1 return self._normalize(read_counts, self._total)
Retrieve the normalized read count in the provided region.