code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def add_user( self, username, first_name, last_name, email, role, password="", hashed_password="", ): try: user = self.user_model() user.first_name = first_name user.last_name = last_name user.use...
Generic function to create user
def get_template_vars(self, slides): try: head_title = slides[0]['title'] except (IndexError, TypeError): head_title = "Untitled Presentation" for slide_index, slide_vars in enumerate(slides): if not slide_vars: continue self.num_sl...
Computes template vars from slides html source code.
def delete_lbaas_port(self, lb_id): lb_id = lb_id.replace('-', '') req = dict(instance_id=lb_id) instances = self.get_vms_for_this_req(**req) for vm in instances: LOG.info("deleting lbaas vm %s " % vm.name) self.delete_vm_function(vm.port_id, vm)
send vm down event and delete db. :param lb_id: vip id for v1 and lbaas_id for v2
def send_registered_email(self, user, user_email, request_email_confirmation): if not self.user_manager.USER_ENABLE_EMAIL: return if not self.user_manager.USER_SEND_REGISTERED_EMAIL: return email = user_email.email if user_email else user.email if request_email_confirmation: ...
Send the 'user has registered' notification email.
def _prep_cnv_file(in_file, work_dir, somatic_info): out_file = os.path.join(work_dir, "%s-prep%s" % utils.splitext_plus(os.path.basename(in_file))) if not utils.file_uptodate(out_file, in_file): with file_transaction(somatic_info.tumor_data, out_file) as tx_out_file: with open(in_file) as i...
Prepare Battenberg CNV file for ingest by PhyloWGS. The PhyloWGS preparation script does not handle 'chr' prefixed chromosomes (hg19 style) correctly. This converts them over to GRCh37 (no 'chr') style to match preparation work in _prep_vrn_file.
def get_sorted_attachments(self): inf = float("inf") order = self.get_attachments_order() attachments = self.get_attachments() def att_cmp(att1, att2): _n1 = att1.get('UID') _n2 = att2.get('UID') _i1 = _n1 in order and order.index(_n1) + 1 or inf ...
Returns a sorted list of analysis info dictionaries
def is_native_xmon_op(op: ops.Operation) -> bool: return (isinstance(op, ops.GateOperation) and is_native_xmon_gate(op.gate))
Check if the gate corresponding to an operation is a native xmon gate. Args: op: Input operation. Returns: True if the operation is native to the xmon, false otherwise.
def convert_unicode_2_utf8(input): if isinstance(input, dict): try: return dict((convert_unicode_2_utf8(key), convert_unicode_2_utf8(value)) for key, value in input.iteritems()) except AttributeError: return eval( ) elif isi...
Return a copy of `input` with every str component encoded from unicode to utf-8.
def RaiseIfLastError(result, func = None, arguments = ()): code = GetLastError() if code != ERROR_SUCCESS: raise ctypes.WinError(code) return result
Error checking for Win32 API calls with no error-specific return value. Regardless of the return value, the function calls GetLastError(). If the code is not C{ERROR_SUCCESS} then a C{WindowsError} exception is raised. For this to work, the user MUST call SetLastError(ERROR_SUCCESS) prior to calling t...
def install(): tmp_weboob_dir = '/tmp/weboob' while (os.path.exists(tmp_weboob_dir)): tmp_weboob_dir += '1' print 'Fetching sources in temporary dir {}'.format(tmp_weboob_dir) result = cmd_exec('git clone {} {}'.format(WEBOOB_REPO, tmp_weboob_dir)) if (result['error']): print result[...
Install weboob system-wide
def to_dict(self): return { "gates": [km.to_dict() for km in self.gates], "assignment_probs": {str(qid): a.tolist() for qid, a in self.assignment_probs.items()}, }
Create a JSON serializable representation of the noise model. For example:: { "gates": [ # list of embedded dictionary representations of KrausModels here [...] ] "assignment_probs": { "0": [[.8, .1], ...
def get(s, delimiter='', format="diacritical"): return delimiter.join(_pinyin_generator(u(s), format=format))
Return pinyin of string, the string must be unicode
def withNamedValues(cls, **values): enums = set(cls.namedValues.items()) enums.update(values.items()) class X(cls): namedValues = namedval.NamedValues(*enums) subtypeSpec = cls.subtypeSpec + constraint.SingleValueConstraint( *values.values()) X.__n...
Create a subclass with discreet named values constraint. Reduce fully duplicate enumerations along the way.
def receive_data_chunk(self, raw_data, start): self.file.write(raw_data) eventlet.sleep(0)
Over-ridden method to circumvent the worker timeouts on large uploads.
def get_app_settings_from_arguments(args): config_filepath = os.path.abspath(args.config_uri) return get_appsettings(config_filepath, name=args.config_name)
Parse ``argparse`` style arguments into app settings. Given an ``argparse`` set of arguments as ``args`` parse the arguments to return the application settings. This assumes the parser was created using ``create_parser``.
def get_context_from_gdoc(self): try: start = int(time.time()) if not self.data or start > self.expires: self.data = self._get_context_from_gdoc(self.project.SPREADSHEET_KEY) end = int(time.time()) ttl = getattr(self.project, 'SPREADSHEET_C...
Wrap getting context from Google sheets in a simple caching mechanism.
def addFollowOnFn(self, fn, *args, **kwargs): if PromisedRequirement.convertPromises(kwargs): return self.addFollowOn(PromisedRequirementFunctionWrappingJob.create(fn, *args, **kwargs)) else: return self.addFollowOn(FunctionWrappingJob(fn, *args, **kwargs))
Adds a function as a follow-on job. :param fn: Function to be run as a follow-on job with ``*args`` and ``**kwargs`` as \ arguments to this function. See toil.job.FunctionWrappingJob for reserved \ keyword arguments used to specify resource requirements. :return: The new follow-on job t...
def set_prob_type(cls, problem_type, classification_type, eval_type): assert problem_type in problem_type_list, 'Need to set Problem Type' if problem_type == 'classification': assert classification_type in classification_type_list,\ 'Need to set Cl...
Set problem type
def _simplify_block(self, ail_block, stack_pointer_tracker=None): simp = self.project.analyses.AILBlockSimplifier(ail_block, stack_pointer_tracker=stack_pointer_tracker) return simp.result_block
Simplify a single AIL block. :param ailment.Block ail_block: The AIL block to simplify. :param stack_pointer_tracker: The RegisterDeltaTracker analysis instance. :return: A simplified AIL block.
def get_unicode_str(obj): if isinstance(obj, six.text_type): return obj if isinstance(obj, six.binary_type): return obj.decode("utf-8", errors="ignore") return six.text_type(obj)
Makes sure obj is a unicode string.
def _setweights(self): for name_w in self.weights: raw_w = getattr(self.module, name_w + '_raw') w = torch.nn.functional.dropout(raw_w, p=self.dropout, training=self.training) if hasattr(self.module, name_w): delattr(self.module, name_w) setattr(se...
Uses pytorch's built-in dropout function to apply dropout to the parameters of the wrapped module. Args: None Returns: None
def run(self): cmd = list(self.vasp_cmd) if self.auto_gamma: vi = VaspInput.from_directory(".") kpts = vi["KPOINTS"] if kpts.style == Kpoints.supported_modes.Gamma \ and tuple(kpts.kpts[0]) == (1, 1, 1): if self.gamma_vasp_cmd is no...
Perform the actual VASP run. Returns: (subprocess.Popen) Used for monitoring.
def total_items(self, request): n_total = 0 for item in self.get_queryset(request): n_total += item.quantity return Response(data={"quantity": n_total}, status=status.HTTP_200_OK)
Get total number of items in the basket
def get_finder(sources=None, pip_command=None, pip_options=None): if not pip_command: pip_command = get_pip_command() if not sources: sources = [ {"url": "https://pypi.org/simple", "name": "pypi", "verify_ssl": True} ] if not pip_options: pip_options = get_pip_opt...
Get a package finder for looking up candidates to install :param sources: A list of pipfile-formatted sources, defaults to None :param sources: list[dict], optional :param pip_command: A pip command instance, defaults to None :type pip_command: :class:`~pip._internal.cli.base_command.Command` :para...
def ActionEnum(ctx): return Enum( ctx, interact=0, stop=1, ai_interact=2, move=3, add_attribute=5, give_attribute=6, ai_move=10, resign=11, spec=15, waypoint=16, stance=18, guard=19, follow=20, pa...
Action Enumeration.
def allow_unconfirmed_email(view_function): @wraps(view_function) def decorator(*args, **kwargs): g._flask_user_allow_unconfirmed_email = True try: user_manager = current_app.user_manager allowed = _is_logged_in_with_confirmed_email(user_manager) if not allowe...
This decorator ensures that the user is logged in, but allows users with or without a confirmed email addresses to access this particular view. It works in tandem with the ``USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL=True`` setting. .. caution:: | Use ``USER_ALLOW_LOGIN_WITHOUT_CONFIRMED_EMAIL=...
def update_dataset(self, dataset, fields, retry=DEFAULT_RETRY): partial = dataset._build_resource(fields) if dataset.etag is not None: headers = {"If-Match": dataset.etag} else: headers = None api_response = self._call_api( retry, method="PATCH", path=...
Change some fields of a dataset. Use ``fields`` to specify which fields to update. At least one field must be provided. If a field is listed in ``fields`` and is ``None`` in ``dataset``, it will be deleted. If ``dataset.etag`` is not ``None``, the update will only succeed if th...
def get_predicate_indices(tags: List[str]) -> List[int]: return [ind for ind, tag in enumerate(tags) if 'V' in tag]
Return the word indices of a predicate in BIO tags.
def remove(self, key, cas=0, quiet=None, persist_to=0, replicate_to=0): return _Base.remove(self, key, cas=cas, quiet=quiet, persist_to=persist_to, replicate_to=replicate_to)
Remove the key-value entry for a given key in Couchbase. :param key: A string which is the key to remove. The format and type of the key follows the same conventions as in :meth:`upsert` :type key: string, dict, or tuple/list :param int cas: The CAS to use for the remov...
def GMailer(recipients, username, password, subject='Log message from lggr.py'): import smtplib srvr = smtplib.SMTP('smtp.gmail.com', 587) srvr.ehlo() srvr.starttls() srvr.ehlo() srvr.login(username, password) if not (isinstance(recipients, list) or isinstance(recipients, tuple)): re...
Sends messages as emails to the given list of recipients, from a GMail account.
def disable_napp(mgr): if mgr.is_enabled(): LOG.info(' Disabling...') mgr.disable() LOG.info(' Disabled.') else: LOG.error(" NApp isn't enabled.")
Disable a NApp.
def service_running(service_name, **kwargs): if init_is_systemd(): return service('is-active', service_name) else: if os.path.exists(_UPSTART_CONF.format(service_name)): try: cmd = ['status', service_name] for key, value in six.iteritems(kwargs): ...
Determine whether a system service is running. :param service_name: the name of the service :param **kwargs: additional args to pass to the service command. This is used to pass additional key=value arguments to the service command line for managing specific instance ...
def find_ignored_languages(source): for (index, line) in enumerate(source.splitlines()): match = RSTCHECK_COMMENT_RE.match(line) if match: key_and_value = line[match.end():].strip().split('=') if len(key_and_value) != 2: raise Error('Expected "key=value" synta...
Yield ignored languages. Languages are ignored via comment. For example, to ignore C++, JSON, and Python: >>> list(find_ignored_languages(''' ... Example ... ======= ... ... .. rstcheck: ignore-language=cpp,json ... ... .. rstcheck: ignore-language=python ... ''')) ['cpp',...
def to_web_include( project: 'projects.Project', file_path: str ) -> WEB_INCLUDE: if not file_path.endswith('.css') and not file_path.endswith('.js'): return None slug = file_path[len(project.source_directory):] url = '/{}' \ .format(slug) \ .replace('\\', '/') \ ...
Converts the given file_path into a WEB_INCLUDE instance that represents the deployed version of this file to be loaded into the results project page :param project: Project in which the file_path resides :param file_path: Absolute path to the source file for which the WEB_INCLUDE insta...
def register_on_snapshot_deleted(self, callback): event_type = library.VBoxEventType.on_snapshot_deleted return self.event_source.register_callback(callback, event_type)
Set the callback function to consume on snapshot deleted events. Callback receives a ISnapshotDeletedEvent object. Returns the callback_id
def to_binary(value, encoding='utf-8'): if not value: return b'' if isinstance(value, six.binary_type): return value if isinstance(value, six.text_type): return value.encode(encoding) return to_text(value).encode(encoding)
Convert value to binary string, default encoding is utf-8 :param value: Value to be converted :param encoding: Desired encoding
def check_aggregate(df, variable, components=None, exclude_on_fail=False, multiplier=1, **kwargs): fdf = df.filter(**kwargs) if len(fdf.data) > 0: vdf = fdf.check_aggregate(variable=variable, components=components, exclude_on_fail=exclude_on_fail, ...
Check whether the timeseries values match the aggregation of sub-categories Parameters ---------- df: IamDataFrame instance args: see IamDataFrame.check_aggregate() for details kwargs: passed to `df.filter()`
def spin_sz(self): return conversions.secondary_spin(self.mass1, self.mass2, self.spin1z, self.spin2z)
Returns the z-component of the spin of the secondary mass.
def validate(method): @wraps(method) def mod_run(self, rinput): self.validate_input(rinput) result = method(self, rinput) self.validate_result(result) return result return mod_run
Decorate run method, inputs and outputs are validated
def process_commission(self, commission): asset = commission['asset'] cost = commission['cost'] self.position_tracker.handle_commission(asset, cost) self._cash_flow(-cost)
Process the commission. Parameters ---------- commission : zp.Event The commission being paid.
def error(self, msgid, error): self.requests[msgid].errback(error) del self.requests[msgid]
Handle a error message.
def fuse_wheels(to_wheel, from_wheel, out_wheel): to_wheel, from_wheel, out_wheel = [ abspath(w) for w in (to_wheel, from_wheel, out_wheel)] with InTemporaryDirectory(): zip2dir(to_wheel, 'to_wheel') zip2dir(from_wheel, 'from_wheel') fuse_trees('to_wheel', 'from_wheel') r...
Fuse `from_wheel` into `to_wheel`, write to `out_wheel` Parameters --------- to_wheel : str filename of wheel to fuse into from_wheel : str filename of wheel to fuse from out_wheel : str filename of new wheel from fusion of `to_wheel` and `from_wheel`
def get_server_url(self): server_host = self.driver_wrapper.config.get('Server', 'host') server_port = self.driver_wrapper.config.get('Server', 'port') server_username = self.driver_wrapper.config.get_optional('Server', 'username') server_password = self.driver_wrapper.config.get_optiona...
Return the configured server url :returns: server url
def duplicates(inlist): dups = [] for i in range(len(inlist)): if inlist[i] in inlist[i+1:]: dups.append(inlist[i]) return dups
Returns duplicate items in the FIRST dimension of the passed list. Usage: duplicates (inlist)
def loads(string): d = _loads(string) for k, v in d.items(): FILTERS[dr.get_component(k) or k] = set(v)
Loads the filters dictionary given a string.
def _export_module_attachments(meta_graph): added_attachments = tf_v1.get_collection(_ATTACHMENT_COLLECTION_INTERNAL) if not added_attachments: return unique_attachments = collections.OrderedDict( (attachment.key, attachment) for attachment in added_attachments) meta_graph.collection_def[ATTACHMENT_...
Exports ModuleAttachments from the current tf.Graph into `meta_graph`.
def js_adaptor(buffer): buffer = re.sub('true', 'True', buffer) buffer = re.sub('false', 'False', buffer) buffer = re.sub('none', 'None', buffer) buffer = re.sub('NaN', '"NaN"', buffer) return buffer
convert javascript objects like true, none, NaN etc. to quoted word. Arguments: buffer: string to be converted Returns: string after conversion
def is_article(self, response, url): site = self.__sites_object[url] heuristics = self.__get_enabled_heuristics(url) self.log.info("Checking site: %s", response.url) statement = self.__get_condition(url) self.log.debug("Condition (original): %s", statement) for heuristic,...
Tests if the given response is an article by calling and checking the heuristics set in config.cfg and sitelist.json :param obj response: The response of the site. :param str url: The base_url (needed to get the site-specific config from the JSON-file) :return bo...
def select(*signals: Signal, **kwargs) -> List[Signal]: class CleanUp(Interrupt): pass timeout = kwargs.get("timeout", None) if not isinstance(timeout, (float, int, type(None))): raise ValueError("The timeout keyword parameter can be either None or a number.") def wait_one(signal: Signal...
Allows the current process to wait for multiple concurrent signals. Waits until one of the signals turns on, at which point this signal is returned. :param timeout: If this parameter is not ``None``, it is taken as a delay at the end of which the process times out, and stops waiting on the set ...
def _update(self, dataFile, handle): self._cache.remove((dataFile, handle)) self._add(dataFile, handle)
Update the priority of the file handle. The element is first removed and then added to the left of the deque.
def create_divisao_dc(self): return DivisaoDc( self.networkapi_url, self.user, self.password, self.user_ldap)
Get an instance of divisao_dc services facade.
def global_state_code(self): self._generate_func_code() if not self._compile_regexps: return '\n'.join( [ 'from fastjsonschema import JsonSchemaException', '', '', ] ) regexs = ['"...
Returns global variables for generating function from ``func_code`` as code. Includes compiled regular expressions and imports.
def first(self, values, axis=0): values = np.asarray(values) return self.unique, np.take(values, self.index.sorter[self.index.start], axis)
return values at first occurance of its associated key Parameters ---------- values : array_like, [keys, ...] values to pick the first value of per group axis : int, optional alternative reduction axis for values Returns ------- unique: n...
def sunset_utc(self, date, latitude, longitude, observer_elevation=0): try: return self._calc_time(90 + 0.833, SUN_SETTING, date, latitude, longitude, observer_elevation) except ValueError as exc: if exc.args[0] == "math domain error": raise AstralError( ...
Calculate sunset time in the UTC timezone. :param date: Date to calculate for. :type date: :class:`datetime.date` :param latitude: Latitude - Northern latitudes should be positive :type latitude: float :param longitude: Longitude - Eastern longitudes should be...
def restart(self, force=False, wait_for_available=True, operation_timeout=None): body = {'force': force} self.manager.session.post(self.uri + '/operations/restart', body=body) if wait_for_available: time.sleep(10) self.manager.client.wait_for_available( ...
Restart the HMC represented by this Console object. Once the HMC is online again, this Console object, as well as any other resource objects accessed through this HMC, can continue to be used. An automatic re-logon will be performed under the covers, because the HMC restart invalidates ...
def show_agent(self, agent, **_params): return self.get(self.agent_path % (agent), params=_params)
Fetches information of a certain agent.
def __iterate_value(self, value): if hasattr(value, '__dict__') or isinstance(value, dict): return self.__find_object_children(value) elif isinstance(value, (list, tuple, set)): return self.__construct_list(value) return self.safe_values(value)
Return value for JSON serialization
def add_corpus(self, customization_id, corpus_name, corpus_file, allow_overwrite=None, **kwargs): if customization_id is None: raise ValueError('customization_id must be provided') if corpus_name i...
Add a corpus. Adds a single corpus text file of new training data to a custom language model. Use multiple requests to submit multiple corpus text files. You must use credentials for the instance of the service that owns a model to add a corpus to it. Adding a corpus does not affect the...
def get_cache_buster(src_path, method='importtime'): try: fn = _BUST_METHODS[method] except KeyError: raise KeyError('Unsupported busting method value: %s' % method) return fn(src_path)
Return a string that can be used as a parameter for cache-busting URLs for this asset. :param src_path: Filesystem path to the file we're generating a cache-busting value for. :param method: Method for cache-busting. Supported values: importtime, mtime, md5 The default is 'importti...
def validate(self,value): if self.validator is not None: try: valid = self.validator(value) except Exception as e: import pdb; pdb.set_trace() if isinstance(valid, tuple) and len(valid) == 2: valid, errormsg = valid ...
Validate the parameter
def event_exists(self, client, check): return self.api_request( 'get', 'events/{}/{}'.format(client, check) ).status_code == 200
Query Sensu API for event.
def bwrite(stream, obj): handle = None if not hasattr(stream, "write"): stream = handle = open(stream, "wb") try: stream.write(bencode(obj)) finally: if handle: handle.close()
Encode a given object to a file or stream.
def create(self, to, from_, parameters=values.unset): data = values.of({'To': to, 'From': from_, 'Parameters': serialize.object(parameters), }) payload = self._version.create( 'POST', self._uri, data=data, ) return ExecutionInstance(self._version, payl...
Create a new ExecutionInstance :param unicode to: The Contact phone number to start a Studio Flow Execution. :param unicode from_: The Twilio phone number to send messages or initiate calls from during the Flow Execution. :param dict parameters: JSON data that will be added to your flow's conte...
def predictions_iter(self): for fname in self.forecast_names: yield self.predictions.get(col_names=fname)
property decorated prediction iterator Returns ------- iterator : iterator iterator on prediction sensitivity vectors (matrix)
def determine_version(self, request, api_version=None): if api_version is False: api_version = None for version in self.versions: if version and "v{0}".format(version) in request.path: api_version = version break request_ver...
Determines the appropriate version given the set api_version, the request header, and URL query params
def enable(self, trigger_ids=[]): trigger_ids = ','.join(trigger_ids) url = self._service_url(['triggers', 'enabled'], params={'triggerIds': trigger_ids, 'enabled': 'true'}) self._put(url, data=None, parse_json=False)
Enable triggers. :param trigger_ids: List of trigger definition ids to enable
def register_vcs_handler(vcs, method): def decorate(f): if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate
Decorator to mark a method as the handler for a particular VCS.
def calc_checksum(sentence): if sentence.startswith('$'): sentence = sentence[1:] sentence = sentence.split('*')[0] return reduce(xor, map(ord, sentence))
Calculate a NMEA 0183 checksum for the given sentence. NMEA checksums are a simple XOR of all the characters in the sentence between the leading "$" symbol, and the "*" checksum separator. Args: sentence (str): NMEA 0183 formatted sentence
def convert(source, ext=COMPLETE, fmt=HTML, dname=None): if dname and not ext & COMPATIBILITY: if os.path.isfile(dname): dname = os.path.abspath(os.path.dirname(dname)) source, _ = _expand_source(source, dname, fmt) _MMD_LIB.markdown_to_string.argtypes = [ctypes.c_char_p, ctypes.c_ul...
Converts a string of MultiMarkdown text to the requested format. Transclusion is performed if the COMPATIBILITY extension is not set, and dname is set to a valid directory Keyword arguments: source -- string containing MultiMarkdown text ext -- extension bitfield to pass to conversion process f...
def load(self, name): name = ctypes.util.find_library(name) return ctypes.cdll.LoadLibrary(name)
Loads and returns foreign library.
def _check_load_parameters(self, **kwargs): rset = self._meta_data['required_load_parameters'] check = _missing_required_parameters(rset, **kwargs) if check: check.sort() error_message = 'Missing required params: %s' % check raise MissingRequiredReadParameter(...
Params given to load should at least satisfy required params. :params: kwargs :raises: MissingRequiredReadParameter
def region_by_identifier(self, identifier): if identifier < 0: raise(ValueError("Identifier must be a positive integer.")) if not np.equal(np.mod(identifier, 1), 0): raise(ValueError("Identifier must be a positive integer.")) if identifier == 0: raise(ValueErr...
Return region of interest corresponding to the supplied identifier. :param identifier: integer corresponding to the segment of interest :returns: `jicbioimage.core.region.Region`
def open(self): if not self.handle: try: path = self.system_dir except AttributeError: path = '' self.__handle = lvm_init(path) if not bool(self.__handle): raise HandleError("Failed to initialize LVM handle.")
Obtains the lvm handle. Usually you would never need to use this method unless you are trying to do operations using the ctypes function wrappers in conversion.py *Raises:* * HandleError
def filter_by_analysis_period(self, analysis_period): self._check_analysis_period(analysis_period) _filtered_data = self.filter_by_moys(analysis_period.moys) _filtered_data.header._analysis_period = analysis_period return _filtered_data
Filter a Data Collection based on an analysis period. Args: analysis period: A Ladybug analysis period Return: A new Data Collection with filtered data
def page_should_not_contain_text(self, text, loglevel='INFO'): if self._is_text_present(text): self.log_source(loglevel) raise AssertionError("Page should not have contained text '%s'" % text) self._info("Current page does not contains text '%s'." % text)
Verifies that current page not contains `text`. If this keyword fails, it automatically logs the page source using the log level specified with the optional `loglevel` argument. Giving `NONE` as level disables logging.
def upsample(self, factor): self.checkforpilimage() if type(factor) != type(0): raise RuntimeError, "Upsample factor must be an integer !" if self.verbose: print "Upsampling by a factor of %i" % factor self.pilimage = self.pilimage.resize((self.pilimage.size[0] * ...
The inverse operation of rebin, applied on the PIL image. Do this before writing text or drawing on the image ! The coordinates will be automatically converted for you
def set_proto_message_event( pb_message_event, span_data_message_event): pb_message_event.type = span_data_message_event.type pb_message_event.id = span_data_message_event.id pb_message_event.uncompressed_size = \ span_data_message_event.uncompressed_size_bytes pb_message_event.c...
Sets properties on the protobuf message event. :type pb_message_event: :class: `~opencensus.proto.trace.Span.TimeEvent.MessageEvent` :param pb_message_event: protobuf message event :type span_data_message_event: :class: `~opencensus.trace.time_event.MessageEvent` :param span_data_messa...
def to_dqflags(self, bits=None, minlen=1, dtype=float, round=False): from ..segments import DataQualityDict out = DataQualityDict() bitseries = self.get_bit_series(bits=bits) for bit, sts in bitseries.items(): out[bit] = sts.to_dqflag(name=bit, minlen=minlen, round=round, ...
Convert this `StateVector` into a `~gwpy.segments.DataQualityDict` The `StateTimeSeries` for each bit is converted into a `~gwpy.segments.DataQualityFlag` with the bits combined into a dict. Parameters ---------- minlen : `int`, optional, default: 1 minimum number of...
def get_categorical_feature_names(example): features = get_example_features(example) return sorted([ feature_name for feature_name in features if features[feature_name].WhichOneof('kind') == 'bytes_list' ])
Returns a list of feature names for byte type features. Args: example: An example. Returns: A list of categorical feature names (e.g. ['education', 'marital_status'] )
def _check_and_handle_includes(self, from_file): logger.debug("Check/handle includes from %s", from_file) try: paths = self._parser.get("INCLUDE", "paths") except (config_parser.NoSectionError, config_parser.NoOptionError) as exc: logger.debug("_check_and_...
Look for an optional INCLUDE section in the given file path. If the parser set `paths`, it is cleared so that they do not keep showing up when additional files are parsed.
def _print_results(file, status): file_color = c.Fore.GREEN status_color = c.Fore.RED if status == 'Success': status_color = c.Fore.GREEN elif status == 'Skipped': status_color = c.Fore.YELLOW print( '{}{!s:<13}{}{!s:<35}{}{!s:<8}{}{}'.format( ...
Print the download results. Args: file (str): The filename. status (str): The file download status.
def rebalance_replicas( self, max_movement_count=None, max_movement_size=None, ): movement_count = 0 movement_size = 0 for partition in six.itervalues(self.cluster_topology.partitions): count, size = self._rebalance_partition_replicas( ...
Balance replicas across replication-groups. :param max_movement_count: The maximum number of partitions to move. :param max_movement_size: The maximum total size of the partitions to move. :returns: A 2-tuple whose first element is the number of partitions moved and whose second el...
def _send_script(self, device_info, control_info, script, progress_callback): for i in range(0, len(script), 20): chunk = script[i:i+20] self._send_rpc(device_info, control_info, 8, 0x2101, chunk, 0.001, 1.0) if progress_callback is not None: progress_callback...
Send a script by repeatedly sending it as a bunch of RPCs. This function doesn't do anything special, it just sends a bunch of RPCs with each chunk of the script until it's finished.
def filter_dependencies(self): dependencies = self.event['check'].get('dependencies', None) if dependencies is None or not isinstance(dependencies, list): return for dependency in self.event['check']['dependencies']: if not str(dependency): continue ...
Determine whether a check has dependencies.
def get_parameters(self, params, graph=None): g = graph if graph is not None else self.tf_graph with g.as_default(): with tf.Session() as self.tf_session: self.tf_saver.restore(self.tf_session, self.model_path) out = {} for par in params: ...
Get the parameters of the model. :param params: dictionary of keys (str names) and values (tensors). :return: evaluated tensors in params
def txn_getAssociation(self, server_url, handle=None): if handle is not None: self.db_get_assoc(server_url, handle) else: self.db_get_assocs(server_url) rows = self.cur.fetchall() if len(rows) == 0: return None else: associations = ...
Get the most recent association that has been set for this server URL and handle. str -> NoneType or Association
def pysal_Moran(self, **kwargs): if self.weights is None: self.raster_weights(**kwargs) rasterf = self.raster.flatten() rasterf = rasterf[rasterf.mask==False] self.Moran = pysal.Moran(rasterf, self.weights, **kwargs)
Compute Moran's I measure of global spatial autocorrelation for GeoRaster Usage: geo.pysal_Moran(permutations = 1000, rook=True) arguments passed to raster_weights() and pysal.Moran See help(gr.raster_weights), help(pysal.Moran) for options
async def _connect_sentinel(self, address, timeout, pools): try: with async_timeout(timeout, loop=self._loop): pool = await create_pool( address, minsize=1, maxsize=2, parser=self._parser_class, loop=self._loop) ...
Try to connect to specified Sentinel returning either connections pool or exception.
def calculate_size(self, modules_per_line, number_of_lines, dpi=300): width = 2 * self.quiet_zone + modules_per_line * self.module_width height = 2.0 + self.module_height * number_of_lines if self.font_size and self.text: height += pt2mm(self.font_size) / 2 + self.text_distance ...
Calculates the size of the barcode in pixel. :parameters: modules_per_line : Integer Number of modules in one line. number_of_lines : Integer Number of lines of the barcode. dpi : Integer DPI to calculate. :returns: Wi...
def menu_text(self, request=None): source_field_name = settings.PAGE_FIELD_FOR_MENU_ITEM_TEXT if( source_field_name != 'menu_text' and hasattr(self, source_field_name) ): return getattr(self, source_field_name) return self.title
Return a string to use as link text when this page appears in menus.
def discover(package, cls_match_func): matched_classes = set() for _, module_name, _ in pkgutil.walk_packages( package.__path__, prefix=package.__name__ + '.', ): module = __import__(module_name, fromlist=[str('__trash')], level=0) for _, imported_class in inspect.get...
Returns a set of classes in the directory matched by cls_match_func Args: path - A Python package cls_match_func - Function taking a class and returning true if the class is to be included in the output.
def addif(self, iname): _runshell([brctlexe, 'addif', self.name, iname], "Could not add interface %s to %s." % (iname, self.name))
Add an interface to the bridge
def repeats(seq, size): seq = str(seq) n_mers = [seq[i:i + size] for i in range(len(seq) - size + 1)] counted = Counter(n_mers) found_repeats = [(key, value) for key, value in counted.iteritems() if value > 1] return found_repeats
Count times that a sequence of a certain size is repeated. :param seq: Input sequence. :type seq: coral.DNA or coral.RNA :param size: Size of the repeat to count. :type size: int :returns: Occurrences of repeats and how many :rtype: tuple of the matched sequence and how many times it occurs
def make(target="all", dir=".", **kwargs): if not fs.isfile(fs.path(dir, "Makefile")): raise NoMakefileError("No makefile in '{}'".format(fs.abspath(dir))) fs.cd(dir) if "timeout" not in kwargs: kwargs["timeout"] = 300 ret, out, err = system.run(["make", target], **kwargs) fs.cdpop() if ...
Run make. Arguments: target (str, optional): Name of the target to build. Defaults to "all". dir (str, optional): Path to directory containing Makefile. **kwargs (optional): Any additional arguments to be passed to system.run(). Returns: (int, str, str): T...
def certify_bool(value, required=True): if certify_required( value=value, required=required, ): return if not isinstance(value, bool): raise CertifierTypeError( message="expected bool, but value is of type {cls!r}".format( cls=value.__class__.__nam...
Certifier for boolean values. :param value: The value to be certified. :param bool required: Whether the value can be `None`. Defaults to True. :raises CertifierTypeError: The type is invalid
def batch_iter(iterator, batch_size, return_func=None, padding=None): for batch in zip_longest(*[iter(iterator)]*batch_size, fillvalue=padding): gen = (thing for thing in batch if thing is not padding) if return_func is None: yield gen else: yield return_func(gen)
Break an iterable into batches of size batch_size Note that `padding` should be set to something (anything) which is NOT a valid member of the iterator. For example, None works for [0,1,2,...10], but not for ['a', None, 'c', 'd']. Parameters ---------- iterator : iterable A python obje...
def parse_peddy_sexcheck(handle: TextIO): data = {} samples = csv.DictReader(handle) for sample in samples: data[sample['sample_id']] = { 'predicted_sex': sample['predicted_sex'], 'het_ratio': float(sample['het_ratio']), 'error': True if sample['error'] == 'True' ...
Parse Peddy sexcheck output.
def get_string_camel_patterns(cls, name, min_length=0): patterns = [] abbreviations = list(set(cls._get_abbreviations(name, output_length=min_length))) abbreviations.sort(key=len, reverse=True) for abbr in abbreviations: casing_permutations = list(set(cls._get_casing_permutat...
Finds all permutations of possible camel casing of the given name :param name: str, the name we need to get all possible permutations and abbreviations for :param min_length: int, minimum length we want for abbreviations :return: list(list(str)), list casing permutations of list of abbreviation...
def ensure_dim(core, dim, dim_): if dim is None: dim = dim_ if not dim: return core, 1 if dim_ == dim: return core, int(dim) if dim > dim_: key_convert = lambda vari: vari[:dim_] else: key_convert = lambda vari: vari + (0,)*(dim-dim_) new_core = {} for...
Ensure that dim is correct.
def extern_call(self, context_handle, func, args_ptr, args_len): c = self._ffi.from_handle(context_handle) runnable = c.from_value(func[0]) args = tuple(c.from_value(arg[0]) for arg in self._ffi.unpack(args_ptr, args_len)) return self.call(c, runnable, args)
Given a callable, call it.