code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def check_in_out_dates(self): if self.checkout and self.checkin: if self.checkin < self.date_order: raise ValidationError(_('Check-in date should be greater than \ the current date.')) if self.checkout < self.checkin: raise ValidationError(_('Check-out date should be greater \ than Check-in date.'))
When date_order is less then check-in date or Checkout date should be greater than the check-in date.
def get_sdb_by_id(self, sdb_id): sdb_resp = get_with_retry(self.cerberus_url + '/v2/safe-deposit-box/' + sdb_id, headers=self.HEADERS) throw_if_bad_response(sdb_resp) return sdb_resp.json()
Return the details for the given safe deposit box id Keyword arguments: sdb_id -- this is the id of the safe deposit box, not the path.
def flatten(self, df, column_name): _exp_list = [[md5, x] for md5, value_list in zip(df['md5'], df[column_name]) for x in value_list] return pd.DataFrame(_exp_list, columns=['md5',column_name])
Flatten a column in the dataframe that contains lists
def interact_GxG(pheno,snps1,snps2=None,K=None,covs=None): if K is None: K=SP.eye(N) N=snps1.shape[0] if snps2 is None: snps2 = snps1 return interact_GxE(snps=snps1,pheno=pheno,env=snps2,covs=covs,K=K)
Epistasis test between two sets of SNPs Args: pheno: [N x 1] SP.array of 1 phenotype for N individuals snps1: [N x S1] SP.array of S1 SNPs for N individuals snps2: [N x S2] SP.array of S2 SNPs for N individuals K: [N x N] SP.array of LMM-covariance/kinship koefficients (optional) If not provided, then linear regression analysis is performed covs: [N x D] SP.array of D covariates for N individuals Returns: pv: [S2 x S1] SP.array of P values for epistasis tests beten all SNPs in snps1 and snps2
def add_device_net(self, name, destname=None): if not self.running: return False if os.path.exists("/sys/class/net/%s/phy80211/name" % name): with open("/sys/class/net/%s/phy80211/name" % name) as fd: phy = fd.read().strip() if subprocess.call(['iw', 'phy', phy, 'set', 'netns', str(self.init_pid)]) != 0: return False if destname: def rename_interface(args): old, new = args return subprocess.call(['ip', 'link', 'set', 'dev', old, 'name', new]) return self.attach_wait(rename_interface, (name, destname), namespaces=(CLONE_NEWNET)) == 0 return True if not destname: destname = name if not os.path.exists("/sys/class/net/%s/" % name): return False return subprocess.call(['ip', 'link', 'set', 'dev', name, 'netns', str(self.init_pid), 'name', destname]) == 0
Add network device to running container.
def operations_happening_at_same_time_as( self, scheduled_operation: ScheduledOperation ) -> List[ScheduledOperation]: overlaps = self.query( time=scheduled_operation.time, duration=scheduled_operation.duration) return [e for e in overlaps if e != scheduled_operation]
Finds operations happening at the same time as the given operation. Args: scheduled_operation: The operation specifying the time to query. Returns: Scheduled operations that overlap with the given operation.
def resolve_widget(self, field): if hasattr(field, 'field'): widget = field.field.widget else: widget = field.widget return widget
Given a Field or BoundField, return widget instance. Todo: Raise an exception if given field object does not have a widget. Arguments: field (Field or BoundField): A field instance. Returns: django.forms.widgets.Widget: Retrieved widget from given field.
def jdn_to_gdate(jdn): l = jdn + 68569 n = (4 * l) // 146097 l = l - (146097 * n + 3) // 4 i = (4000 * (l + 1)) // 1461001 l = l - (1461 * i) // 4 + 31 j = (80 * l) // 2447 day = l - (2447 * j) // 80 l = j // 11 month = j + 2 - (12 * l) year = 100 * (n - 49) + i + l return datetime.date(year, month, day)
Convert from the Julian day to the Gregorian day. Algorithm from 'Julian and Gregorian Day Numbers' by Peter Meyer. Return: day, month, year
def order(self, order): given = self.given surname = self.surname if order in (ORDER_MAIDEN_GIVEN, ORDER_GIVEN_MAIDEN): surname = self.maiden or self.surname given = ("1" + given) if given else "2" surname = ("1" + surname) if surname else "2" if order in (ORDER_SURNAME_GIVEN, ORDER_MAIDEN_GIVEN): return (surname, given) elif order in (ORDER_GIVEN_SURNAME, ORDER_GIVEN_MAIDEN): return (given, surname) else: raise ValueError("unexpected order: {}".format(order))
Returns name order key. Returns tuple with two strings that can be compared to other such tuple obtained from different name. Note that if you want locale-dependent ordering then you need to compare strings using locale-aware method (e.g. ``locale.strxfrm``). :param order: One of the ORDER_* constants. :returns: tuple of two strings
def to_json(self): return { 'wind_speed': self.wind_speed, 'wind_direction': self.wind_direction, 'rain': self.rain, 'snow_on_ground': self.snow_on_ground }
Convert the Wind Condition to a dictionary.
def get_patient_vcf(job, patient_dict): temp = job.fileStore.readGlobalFile(patient_dict['mutation_vcf'], os.path.join(os.getcwd(), 'temp.gz')) if is_gzipfile(temp): outfile = job.fileStore.writeGlobalFile(gunzip(temp)) job.fileStore.deleteGlobalFile(patient_dict['mutation_vcf']) else: outfile = patient_dict['mutation_vcf'] return outfile
Convenience function to get the vcf from the patient dict :param dict patient_dict: dict of patient info :return: The vcf :rtype: toil.fileStore.FileID
def datetime_to_time(date, time): if (255 in date) or (255 in time): raise RuntimeError("specific date and time required") time_tuple = ( date[0]+1900, date[1], date[2], time[0], time[1], time[2], 0, 0, -1, ) return _mktime(time_tuple)
Take the date and time 4-tuples and return the time in seconds since the epoch as a floating point number.
def add_service_spec(self, service_spec): assert service_spec is not None if service_spec.name in self.service_specs: raise ThriftCompilerError( 'Cannot define service "%s". That name is already taken.' % service_spec.name ) self.service_specs[service_spec.name] = service_spec
Registers the given ``ServiceSpec`` into the scope. Raises ``ThriftCompilerError`` if the name has already been used.
def process_documentline(line, nanopubs_metadata): matches = re.match('SET DOCUMENT\s+(\w+)\s+=\s+"?(.*?)"?$', line) key = matches.group(1) val = matches.group(2) nanopubs_metadata[key] = val return nanopubs_metadata
Process SET DOCUMENT line in BEL script
def delete_role_policy(role_name, policy_name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) _policy = get_role_policy(role_name, policy_name, region, key, keyid, profile) if not _policy: return True try: conn.delete_role_policy(role_name, policy_name) log.info('Successfully deleted policy %s for IAM role %s.', policy_name, role_name) return True except boto.exception.BotoServerError as e: log.debug(e) log.error('Failed to delete policy %s for IAM role %s.', policy_name, role_name) return False
Delete a role policy. CLI Example: .. code-block:: bash salt myminion boto_iam.delete_role_policy myirole mypolicy
def expand_uri(self, **kwargs): kwargs = dict([(k, v if v != 0 else '0') for k, v in kwargs.items()]) return uritemplate.expand(self.link.uri, kwargs)
Returns the template uri expanded with the current arguments
def from_key(api_key, **kwargs): h = Heroku(**kwargs) h.authenticate(api_key) return h
Returns an authenticated Heroku instance, via API Key.
def addConcept(self, conceptUri, weight, label = None, conceptType = None): assert isinstance(weight, (float, int)), "weight value has to be a positive or negative integer" concept = {"uri": conceptUri, "wgt": weight} if label != None: concept["label"] = label if conceptType != None: concept["type"] = conceptType self.topicPage["concepts"].append(concept)
add a relevant concept to the topic page @param conceptUri: uri of the concept to be added @param weight: importance of the provided concept (typically in range 1 - 50)
def get_deposit_address(self, currency): self._validate_currency(currency) self._log('get deposit address for {}'.format(currency)) coin_name = self.major_currencies[currency] return self._rest_client.post( endpoint='/{}_deposit_address'.format(coin_name) )
Return the deposit address for the given major currency. :param currency: Major currency name in lowercase (e.g. "btc", "eth"). :type currency: str | unicode :return: Deposit address. :rtype: str | unicode
def _json_body_(cls): json = [] for series_name, data in six.iteritems(cls._datapoints): for point in data: json_point = { "measurement": series_name, "fields": {}, "tags": {}, "time": getattr(point, "time") } for field in cls._fields: value = getattr(point, field) if value is not None: json_point['fields'][field] = value for tag in cls._tags: json_point['tags'][tag] = getattr(point, tag) json.append(json_point) return json
Return the JSON body of given datapoints. :return: JSON body of these datapoints.
def session_commit(self, session): if not hasattr(session, 'meepo_unique_id'): self.logger.debug("skipped - session_commit") return self.logger.debug("%s - session_commit" % session.meepo_unique_id) self._session_pub(session) signal("session_commit").send(session) self._session_del(session)
Send session_commit signal in sqlalchemy ``before_commit``. This marks the success of session so the session may enter commit state.
def register_admin(app, admin): category = 'Knowledge' admin.category_icon_classes[category] = "fa fa-mortar-board" admin.add_view( KnowledgeAdmin(app, KnwKB, db.session, name='Knowledge Base', category=category, endpoint="kb") ) admin.add_view( KnwKBRVALAdmin(app, KnwKBRVAL, db.session, name="Knowledge Mappings", category=category, endpoint="kbrval") )
Called on app initialization to register administration interface.
def convert(self, imtls, nsites, idx=0): curves = numpy.zeros(nsites, imtls.dt) for imt in curves.dtype.names: curves_by_imt = curves[imt] for sid in self: curves_by_imt[sid] = self[sid].array[imtls(imt), idx] return curves
Convert a probability map into a composite array of length `nsites` and dtype `imtls.dt`. :param imtls: DictArray instance :param nsites: the total number of sites :param idx: index on the z-axis (default 0)
def change_profile(self, profile_name): self._server_side_completer = self._create_server_side_completer( session=botocore.session.Session(profile=profile_name))
Change the profile used for server side completions.
def sortBy(self, val=None): if val is not None: if _(val).isString(): return self._wrap(sorted(self.obj, key=lambda x, *args: x.get(val))) else: return self._wrap(sorted(self.obj, key=val)) else: return self._wrap(sorted(self.obj))
Sort the object's values by a criterion produced by an iterator.
def compose(*funcs): return lambda x: reduce(lambda v, f: f(v), reversed(funcs), x)
compose a list of functions
def info(args): " Show information about site. " site = find_site(args.PATH) print_header("%s -- install information" % site.get_name()) LOGGER.debug(site.get_info(full=True)) return True
Show information about site.
def register(self, intent): response = self.api.post_intent(intent.serialize) print(response) print() if response['status']['code'] == 200: intent.id = response['id'] elif response['status']['code'] == 409: intent.id = next(i.id for i in self.api.agent_intents if i.name == intent.name) self.update(intent) return intent
Registers a new intent and returns the Intent object with an ID
def set_cell(self, index, value): if self._sort: exists, i = sorted_exists(self._index, index) if not exists: self._insert_row(i, index) else: try: i = self._index.index(index) except ValueError: i = len(self._index) self._add_row(index) self._data[i] = value
Sets the value of a single cell. If the index is not in the current index then a new index will be created. :param index: index value :param value: value to set :return: nothing
def build(self, shutit): if shutit.build['delivery'] in ('docker','dockerfile'): if shutit.get_current_shutit_pexpect_session_environment().install_type == 'apt': shutit.add_to_bashrc('export DEBIAN_FRONTEND=noninteractive') if not shutit.command_available('lsb_release'): shutit.install('lsb-release') shutit.lsb_release() elif shutit.get_current_shutit_pexpect_session_environment().install_type == 'yum': shutit.send('yum update -y', timeout=9999, exit_values=['0', '1']) shutit.pause_point('Anything you want to do to the target host ' + 'before the build starts?', level=2) return True
Initializes target ready for build and updating package management if in container.
def url(self, **kwargs): url = self.fields(self._locale()).get('file', {}).get('url', '') args = ['{0}={1}'.format(k, v) for k, v in kwargs.items()] if args: url += '?{0}'.format('&'.join(args)) return url
Returns a formatted URL for the asset's File with serialized parameters. Usage: >>> my_asset.url() "//images.contentful.com/spaces/foobar/..." >>> my_asset.url(w=120, h=160) "//images.contentful.com/spaces/foobar/...?w=120&h=160"
def _validate_integer(name, val, min_val=0): msg = "'{name:s}' must be an integer >={min_val:d}".format(name=name, min_val=min_val) if val is not None: if is_float(val): if int(val) != val: raise ValueError(msg) val = int(val) elif not (is_integer(val) and val >= min_val): raise ValueError(msg) return val
Checks whether the 'name' parameter for parsing is either an integer OR float that can SAFELY be cast to an integer without losing accuracy. Raises a ValueError if that is not the case. Parameters ---------- name : string Parameter name (used for error reporting) val : int or float The value to check min_val : int Minimum allowed value (val < min_val will result in a ValueError)
def multiple_sequence_alignment(seqs_fp, threads=1): logger = logging.getLogger(__name__) logger.info('multiple_sequence_alignment seqs file %s' % seqs_fp) if threads == 0: threads = -1 if stat(seqs_fp).st_size == 0: logger.warning('msa failed. file %s has no reads' % seqs_fp) return None msa_fp = seqs_fp + '.msa' params = ['mafft', '--quiet', '--preservecase', '--parttree', '--auto', '--thread', str(threads), seqs_fp] sout, serr, res = _system_call(params, stdoutfilename=msa_fp) if not res == 0: logger.info('msa failed for file %s (maybe only 1 read?)' % seqs_fp) logger.debug('stderr : %s' % serr) return None return msa_fp
Perform multiple sequence alignment on FASTA file using MAFFT. Parameters ---------- seqs_fp: string filepath to FASTA file for multiple sequence alignment threads: integer, optional number of threads to use. 0 to use all threads Returns ------- msa_fp : str name of output alignment file or None if error encountered
def delete_user(name, runas=None): if runas is None and not salt.utils.platform.is_windows(): runas = salt.utils.user.get_user() res = __salt__['cmd.run_all']( [RABBITMQCTL, 'delete_user', name], reset_system_locale=False, python_shell=False, runas=runas) msg = 'Deleted' return _format_response(res, msg)
Deletes a user via rabbitmqctl delete_user. CLI Example: .. code-block:: bash salt '*' rabbitmq.delete_user rabbit_user
def md5(self): target = '{}{}'.format( util.md5_object(bytes().join(e._bytes() for e in self.entities)), self.vertices.md5()) return target
An MD5 hash of the current vertices and entities. Returns ------------ md5: str, two appended MD5 hashes
def load_from_file(cls, file_path: str): with open(file_path, "r") as f: data = json.load(f) item = cls.decode(data=data) return item
Read and reconstruct the data from a JSON file.
def returnPorts(self): if self._gotPorts: map(portpicker.return_port, self.ports) self._gotPorts = False self.ports = []
deallocate specific ports on the current machine
def _format_executable(lines, element, spacer=""): rlines = [] rlines.append(element.signature) _format_summary(rlines, element) rlines.append("") rlines.append("PARAMETERS") for p in element.ordered_parameters: _format_value_element(rlines, p) rlines.append("") _format_generic(rlines, element, ["summary"]) if len(element.types) > 0: rlines.append("\nEMBEDDED TYPES") for key, value in list(element.types.items()): _format_type(rlines, value, " ") if len(element.executables) > 0: rlines.append("\nEMBEDDED EXECUTABLES") for key, value in list(element.executables.items()): _format_executable(rlines, value, " ") lines.extend([spacer + l for l in rlines])
Performs formatting specific to a Subroutine or Function code element for relevant docstrings.
def run(self): self.tap = Quartz.CGEventTapCreate( Quartz.kCGSessionEventTap, Quartz.kCGHeadInsertEventTap, Quartz.kCGEventTapOptionDefault, Quartz.CGEventMaskBit(Quartz.kCGEventKeyDown) | Quartz.CGEventMaskBit(Quartz.kCGEventKeyUp) | Quartz.CGEventMaskBit(Quartz.kCGEventFlagsChanged), self.handler, None) loopsource = Quartz.CFMachPortCreateRunLoopSource(None, self.tap, 0) loop = Quartz.CFRunLoopGetCurrent() Quartz.CFRunLoopAddSource(loop, loopsource, Quartz.kCFRunLoopDefaultMode) Quartz.CGEventTapEnable(self.tap, True) while self.listening: Quartz.CFRunLoopRunInMode(Quartz.kCFRunLoopDefaultMode, 5, False)
Creates a listener and loops while waiting for an event. Intended to run as a background thread.
def get_node_instances(nodelist, instances): context = _get_main_context(nodelist) if TemplateAdapter is not None and isinstance(nodelist, TemplateAdapter): nodelist = nodelist.template return _scan_nodes(nodelist, context, instances)
Find the nodes of a given instance. In contract to the standard ``template.nodelist.get_nodes_by_type()`` method, this also looks into ``{% extends %}`` and ``{% include .. %}`` nodes to find all possible nodes of the given type. :param instances: A class Type, or tuple of types to find. :param nodelist: The Template object, or nodelist to scan. :returns: A list of Node objects which inherit from the list of given `instances` to find. :rtype: list
def _convert_nonstring_categoricals(self, param_dict): return {name: (self.categorical_mappings_[name][val] if name in self.categorical_mappings_ else val) for (name, val) in param_dict.items()}
Apply the self.categorical_mappings_ mappings where necessary.
def beta_pdf(x, a, b): bc = 1 / beta(a, b) fc = x ** (a - 1) sc = (1 - x) ** (b - 1) return bc * fc * sc
Beta distirbution probability density function.
def event_return(events): with _get_serv(events, commit=True) as cur: for event in events: tag = event.get('tag', '') data = event.get('data', '') sql = cur.execute(sql, (tag, psycopg2.extras.Json(data), __opts__['id'], time.time()))
Return event to Pg server Requires that configuration be enabled via 'event_return' option in master config.
def draw(self, gdefs, theme): for g in gdefs: g.theme = theme g._set_defaults() return [g.draw() for g in gdefs]
Draw out each guide definition Parameters ---------- gdefs : list of guide_legend|guide_colorbar guide definitions theme : theme Plot theme Returns ------- out : list of matplotlib.offsetbox.Offsetbox A drawing of each legend
def get_free_port(ports=None): if ports is None: with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as _socket: _socket.bind(('', 0)) _, port = _socket.getsockname() return port for port in ports: with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as _socket: try: _socket.bind(('', port)) return port except socket.error as ex: if ex.errno not in (48, 98): raise raise RuntimeError("could not find a free port")
Get a free port. Parameters ---------- ports : iterable ports to check (obtain a random port by default) Returns ------- port : int a free port
def run( paths, output=_I_STILL_HATE_EVERYTHING, recurse=core.flat, sort_by=None, ls=core.ls, stdout=stdout, ): if output is _I_STILL_HATE_EVERYTHING: output = core.columnized if stdout.isatty() else core.one_per_line if sort_by is None: if output == core.as_tree: def sort_by(thing): return ( thing.parent(), thing.basename().lstrip(string.punctuation).lower(), ) else: def sort_by(thing): return thing def _sort_by(thing): return not getattr(thing, "_always_sorts_first", False), sort_by(thing) contents = [ path_and_children for path in paths or (project.from_path(FilePath(".")),) for path_and_children in recurse(path=path, ls=ls) ] for line in output(contents, sort_by=_sort_by): stdout.write(line) stdout.write("\n")
Project-oriented directory and file information lister.
def _find_files(self): files = [] for ext in self.extensions: ext_files = util.find_files(self.root, "*" + ext) log.debug("found {} '*{}' files in '{}'".format( len(ext_files), ext, self.root) ) files.extend(ext_files) return files
Find files recursively in the root path using provided extensions. :return: list of absolute file paths
def _function_add_return_edge(self, return_from_addr, return_to_addr, function_addr): return_to_node = self._nodes.get(return_to_addr, None) if return_to_node is None: return_to_snippet = self._to_snippet(addr=return_to_addr, base_state=self._base_state) to_outside = False else: return_to_snippet = self._to_snippet(cfg_node=return_to_node) to_outside = return_to_node.function_address != function_addr self.kb.functions._add_return_from_call(function_addr, return_from_addr, return_to_snippet, to_outside=to_outside)
Generate CodeNodes for return_to_addr, add this node for function to function manager generating new edge :param int return_from_addr: target address :param int return_to_addr: target address :param int function_addr: address of function :return: None
def commit(self): assert self.batch is not None, "No active batch, call start() first" logger.debug("Comitting batch from %d sources...", len(self.batch)) by_priority = [] for name in self.batch.keys(): priority = self.priorities.get(name, self.default_priority) by_priority.append((priority, name)) for priority, name in sorted(by_priority, key=lambda key: key[0]): logger.debug("Processing items from '%s' (priority=%d)...", name, priority) items = self.batch[name] for handlers in items.values(): for agg, handler in handlers: try: if agg is None: handler() else: handler(agg) except Exception as error: logger.exception("Error while invoking handler.") self.batch = None logger.debug("Batch committed.")
Commit a batch.
def get_file_hash(file_path, block_size=1024, hasher=None): if hasher is None: hasher = hashlib.md5() with open(file_path, 'rb') as f: while True: buffer = f.read(block_size) if len(buffer) <= 0: break hasher.update(buffer) return hasher.hexdigest()
Generate hash for given file :param file_path: Path to file :type file_path: str :param block_size: Size of block to be read at once (default: 1024) :type block_size: int :param hasher: Use specific hasher, defaults to md5 (default: None) :type hasher: _hashlib.HASH :return: Hash of file :rtype: str
def is_ini_file(filename, show_warnings = False): try: config_dict = load_config(filename, file_type = "ini") if config_dict == {}: is_ini = False else: is_ini = True except: is_ini = False return(is_ini)
Check configuration file type is INI Return a boolean indicating wheather the file is INI format or not
def _is_finished(self, as_of): if self.is_one_off(): last_billing_cycle = self.get_billing_cycles()[self.total_billing_cycles - 1] return last_billing_cycle.date_range.upper <= as_of else: return False
Have the specified number of billing cycles been completed? If so, we should not be enacting this RecurringCost.
def claim_keys(self, key_request, timeout=None): content = {"one_time_keys": key_request} if timeout: content["timeout"] = timeout return self._send("POST", "/keys/claim", content=content)
Claims one-time keys for use in pre-key messages. Args: key_request (dict): The keys to be claimed. Format should be <user_id>: { <device_id>: <algorithm> }. timeout (int): Optional. The time (in milliseconds) to wait when downloading keys from remote servers.
def save(self, entity): assert isinstance(entity, Entity), " entity must have an instance of Entity" return self.__collection.save(entity.as_dict())
Maps entity to dict and returns future
def get_column(column_name, node, context): column = try_get_column(column_name, node, context) if column is None: selectable = get_node_selectable(node, context) raise AssertionError( u'Column "{}" not found in selectable "{}". Columns present are {}. ' u'Context is {}.'.format(column_name, selectable.original, [col.name for col in selectable.c], context)) return column
Get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: column, the SQLAlchemy column if found. Raises an AssertionError otherwise.
def remove(env, securitygroup_id, rule_id): mgr = SoftLayer.NetworkManager(env.client) ret = mgr.remove_securitygroup_rule(securitygroup_id, rule_id) if not ret: raise exceptions.CLIAbort("Failed to remove security group rule") table = formatting.Table(REQUEST_BOOL_COLUMNS) table.add_row([ret['requestId']]) env.fout(table)
Remove a rule from a security group.
def clear(self, username, project): method = 'DELETE' url = ('/project/{username}/{project}/build-cache?' 'circle-token={token}'.format(username=username, project=project, token=self.client.api_token)) json_data = self.client.request(method, url) return json_data
Clear the cache for given project.
def render_pdf_file_to_image_files__ghostscript_png(pdf_file_name, root_output_file_path, res_x=150, res_y=150): if not gs_executable: init_and_test_gs_executable(exit_on_fail=True) command = [gs_executable, "-dBATCH", "-dNOPAUSE", "-sDEVICE=pnggray", "-r"+res_x+"x"+res_y, "-sOutputFile="+root_output_file_path+"-%06d.png", pdf_file_name] comm_output = get_external_subprocess_output(command, env=gs_environment) return comm_output
Use Ghostscript to render a PDF file to .png images. The root_output_file_path is prepended to all the output files, which have numbers and extensions added. Return the command output.
def lspcn(body, et, abcorr): body = stypes.stringToCharP(body) et = ctypes.c_double(et) abcorr = stypes.stringToCharP(abcorr) return libspice.lspcn_c(body, et, abcorr)
Compute L_s, the planetocentric longitude of the sun, as seen from a specified body. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lspcn_c.html :param body: Name of central body. :type body: str :param et: Epoch in seconds past J2000 TDB. :type et: float :param abcorr: Aberration correction. :type abcorr: str :return: planetocentric longitude of the sun :rtype: float
def get(self, s3_path, destination_local_path): (bucket, key) = self._path_to_bucket_and_key(s3_path) self.s3.meta.client.download_file(bucket, key, destination_local_path)
Get an object stored in S3 and write it to a local path.
def get_new_ip(self): attempts = 0 while True: if attempts == self.new_ip_max_attempts: raise TorIpError("Failed to obtain a new usable Tor IP") attempts += 1 try: current_ip = self.get_current_ip() except (RequestException, TorIpError): self._obtain_new_ip() continue if not self._ip_is_usable(current_ip): self._obtain_new_ip() continue self._manage_used_ips(current_ip) break return current_ip
Try to obtain new a usable TOR IP. :returns bool :raises TorIpError
def edit(community): form = EditCommunityForm(formdata=request.values, obj=community) deleteform = DeleteCommunityForm() ctx = mycommunities_ctx() ctx.update({ 'form': form, 'is_new': False, 'community': community, 'deleteform': deleteform, }) if form.validate_on_submit(): for field, val in form.data.items(): setattr(community, field, val) file = request.files.get('logo', None) if file: if not community.save_logo(file.stream, file.filename): form.logo.errors.append(_( 'Cannot add this file as a logo. Supported formats: ' 'PNG, JPG and SVG. Max file size: 1.5 MB.')) if not form.logo.errors: db.session.commit() flash("Community successfully edited.", category='success') return redirect(url_for('.edit', community_id=community.id)) return render_template( current_app.config['COMMUNITIES_EDIT_TEMPLATE'], **ctx )
Create or edit a community.
def getcomments(self): comment_list = self.bugzilla.get_comments([self.bug_id]) return comment_list['bugs'][str(self.bug_id)]['comments']
Returns an array of comment dictionaries for this bug
def worklogs(self, issue): r_json = self._get_json('issue/' + str(issue) + '/worklog') worklogs = [Worklog(self._options, self._session, raw_worklog_json) for raw_worklog_json in r_json['worklogs']] return worklogs
Get a list of worklog Resources from the server for an issue. :param issue: ID or key of the issue to get worklogs from :rtype: List[Worklog]
def delete(self, key_id=None): url = self.bitbucket.url('DELETE_SSH_KEY', key_id=key_id) return self.bitbucket.dispatch('DELETE', url, auth=self.bitbucket.auth)
Delete one of the ssh keys associated with your account. Please use with caution as there is NO confimation and NO undo.
def is_answer_valid(self, ans): return ans in [str(i+1) for i in range(len(self.choices))]
Validate user's answer against available choices.
def mode(self, mode): _LOGGER.debug("Setting new mode: %s", mode) if self.mode == Mode.Boost and mode != Mode.Boost: self.boost = False if mode == Mode.Boost: self.boost = True return elif mode == Mode.Away: end = datetime.now() + self._away_duration return self.set_away(end, self._away_temp) elif mode == Mode.Closed: return self.set_mode(0x40 | int(EQ3BT_OFF_TEMP * 2)) elif mode == Mode.Open: return self.set_mode(0x40 | int(EQ3BT_ON_TEMP * 2)) if mode == Mode.Manual: temperature = max(min(self._target_temperature, self.max_temp), self.min_temp) return self.set_mode(0x40 | int(temperature * 2)) else: return self.set_mode(0)
Set the operation mode.
def corr(self): cov = self.cov() N = cov.shape[0] corr = ndarray((N,N)) for r in range(N): for c in range(r): corr[r,c] = corr[c,r] = cov[r,c]/sqrt(cov[r,r]*cov[c,c]) corr[r,r] = 1. return corr
The correlation matrix
def _find_git_info(self, gitdir): res = {'remotes': None, 'tag': None, 'commit': None, 'dirty': None} try: logger.debug('opening %s as git.Repo', gitdir) repo = Repo(path=gitdir, search_parent_directories=False) res['commit'] = repo.head.commit.hexsha res['dirty'] = repo.is_dirty(untracked_files=True) res['remotes'] = {} for rmt in repo.remotes: urls = [u for u in rmt.urls] if len(urls) > 0: res['remotes'][rmt.name] = urls[0] for tag in repo.tags: if tag.commit.hexsha == res['commit']: res['tag'] = tag.name except Exception: logger.debug('Exception getting git information', exc_info=True) return res
Find information about the git repository, if this file is in a clone. :param gitdir: path to the git repo's .git directory :type gitdir: str :returns: information about the git clone :rtype: dict
def widget_status(self): widget_status_list = [] for i in self.widgetlist: widget_status_list += [[i.name, i.status]] return widget_status_list
This method will return the status of all of the widgets in the widget list
def blast_seqs(seqs, blast_constructor, blast_db=None, blast_mat_root=None, params={}, add_seq_names=True, out_filename=None, WorkingDir=None, SuppressStderr=None, SuppressStdout=None, input_handler=None, HALT_EXEC=False ): if blast_db: params["-d"] = blast_db if out_filename: params["-o"] = out_filename ih = input_handler or guess_input_handler(seqs, add_seq_names) blast_app = blast_constructor( params=params, blast_mat_root=blast_mat_root, InputHandler=ih, WorkingDir=WorkingDir, SuppressStderr=SuppressStderr, SuppressStdout=SuppressStdout, HALT_EXEC=HALT_EXEC) return blast_app(seqs)
Blast list of sequences. seqs: either file name or list of sequence objects or list of strings or single multiline string containing sequences. WARNING: DECISION RULES FOR INPUT HANDLING HAVE CHANGED. Decision rules for data are as follows. If it's s list, treat as lines, unless add_seq_names is true (in which case treat as list of seqs). If it's a string, test whether it has newlines. If it doesn't have newlines, assume it's a filename. If it does have newlines, it can't be a filename, so assume it's a multiline string containing sequences. If you want to skip the detection and force a specific type of input handler, use input_handler='your_favorite_handler'. add_seq_names: boolean. if True, sequence names are inserted in the list of sequences. if False, it assumes seqs is a list of lines of some proper format that the program can handle
def url_to_attrs_dict(url, url_attr): result = dict() if isinstance(url, six.string_types): url_value = url else: try: url_value = url["url"] except TypeError: raise BootstrapError( 'Function "url_to_attrs_dict" expects a string or a dict with key "url".' ) crossorigin = url.get("crossorigin", None) integrity = url.get("integrity", None) if crossorigin: result["crossorigin"] = crossorigin if integrity: result["integrity"] = integrity result[url_attr] = url_value return result
Sanitize url dict as used in django-bootstrap3 settings.
def init_backend(self, *args, **kwargs): self.model.attrs = {} self.use_memory_cache = kwargs.get('use_memory_cache', True) if self.use_memory_cache: self.init_memory_cache() self.use_disk_cache = kwargs.get('use_disk_cache', False) if self.use_disk_cache: self.init_disk_cache() self.load_model() self.model.unpicklable += ['_backend']
Initialize the backend.
def draw(self, renderer): dpi_cor = renderer.points_to_pixels(1.) self.dpi_transform.clear() self.dpi_transform.scale(dpi_cor, dpi_cor) for c in self._children: c.draw(renderer) self.stale = False
Draw the children
def get_category_aliases_under(parent_alias=None): return [ch.alias for ch in get_cache().get_children_for(parent_alias, only_with_aliases=True)]
Returns a list of category aliases under the given parent. Could be useful to pass to `ModelWithCategory.enable_category_lists_editor` in `additional_parents_aliases` parameter. :param str|None parent_alias: Parent alias or None to categories under root :rtype: list :return: a list of category aliases
def unregister(self, observer): self.observer_manager.observers.remove(observer) observer.manager = None
Remove the observers of the observers list. It will not receive any more notifications when occurs changes. :param UpdatesObserver observer: Observer you will not receive any more notifications then occurs changes.
def _find_metadata_vars(self, ds, refresh=False): if self._metadata_vars.get(ds, None) and refresh is False: return self._metadata_vars[ds] self._metadata_vars[ds] = [] for name, var in ds.variables.items(): if name in self._find_ancillary_vars(ds) or name in self._find_coord_vars(ds): continue if name in ('platform_name', 'station_name', 'instrument_name', 'station_id', 'platform_id', 'surface_altitude'): self._metadata_vars[ds].append(name) elif getattr(var, 'cf_role', '') != '': self._metadata_vars[ds].append(name) elif getattr(var, 'standard_name', None) is None and len(var.dimensions) == 0: self._metadata_vars[ds].append(name) return self._metadata_vars[ds]
Returns a list of netCDF variable instances for those that are likely metadata variables :param netCDF4.Dataset ds: An open netCDF dataset :param bool refresh: if refresh is set to True, the cache is invalidated. :rtype: list :return: List of variable names (str) that are likely metadata variable candidates.
def speak(self): if self.quiet is False: bot.info('[helper|%s]' %(self.name)) self._speak()
a function for the helper to announce him or herself, depending on the level specified. If you want your client to have additional announced things here, then implement the class `_speak` for your client.
def get_form(self, form, name): kwargs = self.get_kwargs(form, name) form_class = self.get_form_class(form, name) composite_form = form_class( data=form.data if form.is_bound else None, files=form.files if form.is_bound else None, **kwargs) return composite_form
Get an instance of the form.
def data_gen(n_ops=100): while True: X = np.random.uniform(size=(64, 64)) yield dict(X=costly_function(X, n_ops), y=np.random.randint(10, size=(1,)))
Yield data, while optionally burning compute cycles. Parameters ---------- n_ops : int, default=100 Number of operations to run between yielding data. Returns ------- data : dict A object which looks like it might come from some machine learning problem, with X as features, and y as targets.
def _build_relations_config(self, yamlconfig): config = {} for element in yamlconfig: if isinstance(element, str): config[element] = {'relation_name': element, 'schemas': []} elif isinstance(element, dict): if 'relation_name' not in element or 'schemas' not in element: self.log.warning("Unknown element format for relation element %s", element) continue if not isinstance(element['schemas'], list): self.log.warning("Expected a list of schemas for %s", element) continue name = element['relation_name'] config[name] = {'relation_name': name, 'schemas': element['schemas']} else: self.log.warning('Unhandled relations config type: {}'.format(element)) return config
Builds a dictionary from relations configuration while maintaining compatibility
def next_lookup(self, symbol): result = [] if symbol == self.initialsymbol: result.append(EndSymbol()) for production in self.productions: if symbol in production.rightside: nextindex = production.rightside.index(symbol) + 1 while nextindex < len(production.rightside): nextsymbol = production.rightside[nextindex] firstlist = self.first_lookup(nextsymbol) cleanfirstlist = Choice([x for x in firstlist if x != NullSymbol()]) result.append(cleanfirstlist) if NullSymbol() not in firstlist: break else: result += self.next_lookup(production.leftside[0]) return result
Returns the next TerminalSymbols produced by the input symbol within this grammar definition
def update_savings_goal_data(self) -> None: response = get( _url( "/account/{0}/savings-goals".format(self._account_uid), self._sandbox ), headers=self._auth_headers ) response.raise_for_status() response = response.json() response_savings_goals = response.get('savingsGoalList', {}) returned_uids = [] for goal in response_savings_goals: uid = goal.get('savingsGoalUid') returned_uids.append(uid) if uid not in self.savings_goals: self.savings_goals[uid] = SavingsGoal( self._auth_headers, self._sandbox, self._account_uid ) self.savings_goals[uid].update(goal) for uid in list(self.savings_goals): if uid not in returned_uids: self.savings_goals.pop(uid)
Get the latest savings goal information for the account.
def saved_search(self, sid, **kw): path = 'data/v1/searches/%s/results' % sid params = self._params(kw) return self._get(self._url(path), body_type=models.Items, params=params).get_body()
Execute a saved search by search id. :param sid string: The id of the search :returns: :py:class:`planet.api.models.Items` :raises planet.api.exceptions.APIException: On API error. :Options: * page_size (int): Size of response pages * sort (string): Sorting order in the form `field (asc|desc)`
def disable_scanners_by_group(self, group): if group == 'all': self.logger.debug('Disabling all scanners') return self.zap.ascan.disable_all_scanners() try: scanner_list = self.scanner_group_map[group] except KeyError: raise ZAPError( 'Invalid group "{0}" provided. Valid groups are: {1}'.format( group, ', '.join(self.scanner_groups) ) ) self.logger.debug('Disabling scanner group {0}'.format(group)) return self.disable_scanners_by_ids(scanner_list)
Disables the scanners in the group if it matches one in the scanner_group_map.
def get_per_object_threshold(method, image, threshold, mask=None, labels=None, threshold_range_min = None, threshold_range_max = None, **kwargs): if labels is None: labels = np.ones(image.shape,int) if not mask is None: labels[np.logical_not(mask)] = 0 label_extents = scipy.ndimage.find_objects(labels,np.max(labels)) local_threshold = np.ones(image.shape,image.dtype) for i, extent in enumerate(label_extents, start=1): label_mask = labels[extent]==i if not mask is None: label_mask = np.logical_and(mask[extent], label_mask) values = image[extent] per_object_threshold = get_global_threshold( method, values, mask = label_mask, **kwargs) local_threshold[extent][label_mask] = per_object_threshold return local_threshold
Return a matrix giving threshold per pixel calculated per-object image - image to be thresholded mask - mask out "don't care" pixels labels - a label mask indicating object boundaries threshold - the global threshold
def register_model(self, model_id, properties, parameters, outputs, connector): self.validate_connector(connector) try: return self.registry.register_model( model_id, properties, parameters, outputs, connector ) except DuplicateKeyError as ex: raise ValueError(str(ex))
Register a new model with the engine. Expects connection information for RabbitMQ to submit model run requests to workers. Raises ValueError if the given model identifier is not unique. Parameters ---------- model_id : string Unique model identifier properties : Dictionary Dictionary of model specific properties. parameters : list(scodata.attribute.AttributeDefinition) List of attribute definitions for model run parameters outputs : ModelOutputs Description of model outputs connector : dict Connection information to communicate with model workers. Expected to contain at least the connector name 'connector'. Returns ------- ModelHandle
def _unkown_type(self, uridecodebin, decodebin, caps): streaminfo = caps.to_string() if not streaminfo.startswith('audio/'): return self.read_exc = UnknownTypeError(streaminfo) self.ready_sem.release()
The callback for decodebin's "unknown-type" signal.
def _make_definition(self, definition): if not definition: return EndpointDefinition() if isinstance(definition, EndpointDefinition): return definition elif len(definition) == 1: return EndpointDefinition( func=definition[0], ) elif len(definition) == 2: return EndpointDefinition( func=definition[0], response_schema=definition[1], ) elif len(definition) == 3: return EndpointDefinition( func=definition[0], request_schema=definition[1], response_schema=definition[2], ) elif len(definition) == 4: return EndpointDefinition( func=definition[0], request_schema=definition[1], response_schema=definition[2], header_func=definition[3], )
Generate a definition. The input might already be a `EndpointDefinition` or it might be a tuple.
def get_unused_code(self, min_confidence=0, sort_by_size=False): if not 0 <= min_confidence <= 100: raise ValueError('min_confidence must be between 0 and 100.') def by_name(item): return (item.filename.lower(), item.first_lineno) def by_size(item): return (item.size,) + by_name(item) unused_code = (self.unused_attrs + self.unused_classes + self.unused_funcs + self.unused_imports + self.unused_props + self.unused_vars + self.unreachable_code) confidently_unused = [obj for obj in unused_code if obj.confidence >= min_confidence] return sorted(confidently_unused, key=by_size if sort_by_size else by_name)
Return ordered list of unused Item objects.
def generate_property_deprecation_message(to_be_removed_in_version, old_name, new_name, new_attribute, module_name='Client'): message = "Call to deprecated property '{name}'. This property will be removed in version '{version}'".format( name=old_name, version=to_be_removed_in_version, ) message += " Please use the '{new_name}' property on the '{module_name}.{new_attribute}' attribute moving forward.".format( new_name=new_name, module_name=module_name, new_attribute=new_attribute, ) return message
Generate a message to be used when warning about the use of deprecated properties. :param to_be_removed_in_version: Version of this module the deprecated property will be removed in. :type to_be_removed_in_version: str :param old_name: Deprecated property name. :type old_name: str :param new_name: Name of the new property name to use. :type new_name: str :param new_attribute: The new attribute where the new property can be found. :type new_attribute: str :param module_name: Name of the module containing the new method to use. :type module_name: str :return: Full deprecation warning message for the indicated property. :rtype: str
def denoise_z15(): hparams = xmoe2_dense_0() hparams.decoder_type = "denoising" hparams.noising_spec_train = {"type": "random_zipfian", "prob": 0.15} hparams.noising_use_eval_during_train = 0.25 return hparams
Replace tokens instead of masking.
def add_group_role(request, role, group, domain=None, project=None): manager = keystoneclient(request, admin=True).roles return manager.grant(role=role, group=group, domain=domain, project=project)
Adds a role for a group on a domain or project.
def register_arguments(cls, parser): if hasattr(cls, "_dont_register_arguments"): return prefix = cls.configuration_key_prefix() cfgkey = cls.configuration_key parser.add_argument("--%s-%s" % (prefix, cfgkey), action="store_true", dest="%s_%s" % (prefix, cfgkey), default=False, help="%s: %s" % (cls.__name__, cls.help())) args = cls.init_argnames() defaults = cls._init_argdefaults() for arg in args[0:len(args) - len(defaults)]: parser.add_argument("--%s-%s-%s" % (prefix, cfgkey, arg), dest="%s_%s_%s" % (prefix, cfgkey, arg), help="") for i, arg in enumerate(args[len(args) - len(defaults):]): parser.add_argument("--%s-%s-%s" % (prefix, cfgkey, arg), dest="%s_%s_%s" % (prefix, cfgkey, arg), default=defaults[i], help="default: %(default)s")
Register command line options. Implement this method for normal options behavior with protection from OptionConflictErrors. If you override this method and want the default --$name option(s) to be registered, be sure to call super().
def get_outcome_results(self, course_id, include=None, outcome_ids=None, user_ids=None): path = {} data = {} params = {} path["course_id"] = course_id if user_ids is not None: params["user_ids"] = user_ids if outcome_ids is not None: params["outcome_ids"] = outcome_ids if include is not None: params["include"] = include self.logger.debug("GET /api/v1/courses/{course_id}/outcome_results with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("GET", "/api/v1/courses/{course_id}/outcome_results".format(**path), data=data, params=params, no_data=True)
Get outcome results. Gets the outcome results for users and outcomes in the specified context.
def read_instance(self, cls, sdmxobj, offset=None, first_only=True): if offset: try: base = self._paths[offset](sdmxobj._elem)[0] except IndexError: return None else: base = sdmxobj._elem result = self._paths[cls](base) if result: if first_only: return cls(self, result[0]) else: return [cls(self, i) for i in result]
If cls in _paths and matches, return an instance of cls with the first XML element, or, if first_only is False, a list of cls instances for all elements found, If no matches were found, return None.
def search(self, pattern): for node in self.nodes.values(): match, following = pattern.match(self, node) if match: return match, following return [], None
Searches the graph for a sub-graph that matches the given pattern and returns the first match it finds.
def _imm_dir(self): dir0 = set(dir(self.__class__)) dir0.update(self.__dict__.keys()) dir0.update(six.iterkeys(_imm_value_data(self))) return sorted(list(dir0))
An immutable object's dir function should list not only its attributes, but also its un-cached lazy values.
def pack(self, out: IO): out.write(self.access_flags.pack()) out.write(pack('>HH', self._name_index, self._descriptor_index)) self.attributes.pack(out)
Write the Field to the file-like object `out`. .. note:: Advanced usage only. You will typically never need to call this method as it will be called for you when saving a ClassFile. :param out: Any file-like object providing `write()`
def value(self, value): if value not in self.options: if len(self.labels) == len(self.options): self.options[-1] = value else: self.options.append(value) self._value = value
Setter for value. :param value: The value. :type value: object