code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def tokenizer(self): domain = 0 if domain not in self.domains: self.register_domain(domain=domain) return self.domains[domain].tokenizer
A property to link into IntentEngine's tokenizer. Warning: this is only for backwards compatiblility and should not be used if you intend on using domains. Return: the domains tokenizer from its IntentEngine
def update_redirect(self): if self.last_child: self._resolved_pid.redirect(self.last_child) elif any(map(lambda pid: pid.status not in [PIDStatus.DELETED, PIDStatus.REGISTERED, PIDStatus.R...
Update the parent redirect to the current last child. This method should be called on the parent PID node. Use this method when the status of a PID changed (ex: draft changed from RESERVED to REGISTERED)
def ReadItems(self, collection_link, feed_options=None): if feed_options is None: feed_options = {} return self.QueryItems(collection_link, None, feed_options)
Reads all documents in a collection. :param str collection_link: The link to the document collection. :param dict feed_options: :return: Query Iterable of Documents. :rtype: query_iterable.QueryIterable
def object(self, key): return _object.Object(self._name, key, context=self._context)
Retrieves a Storage Object for the specified key in this bucket. The object need not exist. Args: key: the key of the object within the bucket. Returns: An Object instance representing the specified key.
def _get_distance_term(self, C, rrup, backarc): distance_scale = -np.log10(np.sqrt(rrup ** 2 + 3600.0)) distance_scale[backarc] += (C["c2"] * rrup[backarc]) idx = np.logical_not(backarc) distance_scale[idx] += (C["c1"] * rrup[idx]) return distance_scale
Returns the distance scaling term, which varies depending on whether the site is in the forearc or the backarc
def appendContour(self, contour, offset=None): contour = normalizers.normalizeContour(contour) if offset is None: offset = (0, 0) offset = normalizers.normalizeTransformationOffset(offset) return self._appendContour(contour, offset)
Append a contour containing the same data as ``contour`` to this glyph. >>> contour = glyph.appendContour(contour) This will return a :class:`BaseContour` object representing the new contour in the glyph. ``offset`` indicates the x and y shift values that should be applied ...
def delete_dataset(self, dataset): uri = URITemplate(self.baseuri + '/{owner}/{id}').expand( owner=self.username, id=dataset) return self.session.delete(uri)
Deletes a single dataset, including all of the features that it contains. Parameters ---------- dataset : str The dataset id. Returns ------- HTTP status code.
def is_equal(self, another, limit=0.8): if another is None: raise Exception("Parameter another is null") if isinstance(another, int): distance = self.hamming_distance(another) elif isinstance(another, Simhash): assert self.hash_bit_number == another.hash_bit_n...
Determine two simhash are similar or not similar. :param another: another simhash. :param limit: a limit of the similarity. :return: if similarity greater than limit return true and else return false.
def exit(self): if self.nvml_ready: try: pynvml.nvmlShutdown() except Exception as e: logger.debug("pynvml failed to shutdown correctly ({})".format(e)) super(Plugin, self).exit()
Overwrite the exit method to close the GPU API.
def _translate_limit(self, len_, start, num): if start > len_ or num <= 0: return 0, 0 return min(start, len_), num
Translate limit to valid bounds.
def get_remote_executors(hub_ip, port = 4444): resp = requests.get("http://%s:%s/grid/console" %(hub_ip, port)) remote_hosts = () if resp.status_code == 200: remote_hosts = re.findall("remoteHost: ([\w/\.:]+)",resp.text) return [host + "/wd/hub" for host in remote_hosts]
Get remote hosts from Selenium Grid Hub Console @param hub_ip: hub ip of selenium grid hub @param port: hub port of selenium grid hub
def set_quality_index(self): window_start = self.parent.value('window_start') window_length = self.parent.value('window_length') qual = self.annot.get_stage_for_epoch(window_start, window_length, attr='quality') if qual is None: s...
Set the current signal quality in combobox.
def send_slack_message(channel, text): http = httplib2.Http() return http.request(SLACK_MESSAGE_URL, 'POST', body=json.dumps({ 'channel': channel, 'text': text, }))
Send a message to Slack
def make_datastore_api(client): parse_result = six.moves.urllib_parse.urlparse(client._base_url) host = parse_result.netloc if parse_result.scheme == "https": channel = make_secure_channel(client._credentials, DEFAULT_USER_AGENT, host) else: channel = insecure_channel(host) return da...
Create an instance of the GAPIC Datastore API. :type client: :class:`~google.cloud.datastore.client.Client` :param client: The client that holds configuration details. :rtype: :class:`.datastore.v1.datastore_client.DatastoreClient` :returns: A datastore API instance with the proper credentials.
def get_all_keys(self): all_keys = [] for keys in self._index.values(): all_keys.extend(keys) return all_keys
Get all keys indexed. :return: All keys :rtype: list(str)
def get_service_uid_from(self, analysis): analysis = api.get_object(analysis) return api.get_uid(analysis.getAnalysisService())
Return the service from the analysis
def table(self, name=DEFAULT_TABLE, **options): if name in self._table_cache: return self._table_cache[name] table_class = options.pop('table_class', self._cls_table) table = table_class(self._cls_storage_proxy(self._storage, name), name, **options) self._table_cache[name] = ...
Get access to a specific table. Creates a new table, if it hasn't been created before, otherwise it returns the cached :class:`~tinydb.Table` object. :param name: The name of the table. :type name: str :param cache_size: How many query results to cache. :param table_cla...
def search(self, title=None, libtype=None, **kwargs): args = {} if title: args['title'] = title if libtype: args['type'] = utils.searchType(libtype) for attr, value in kwargs.items(): args[attr] = value key = '/library/all%s' % utils.joinArgs(a...
Searching within a library section is much more powerful. It seems certain attributes on the media objects can be targeted to filter this search down a bit, but I havent found the documentation for it. Example: "studio=Comedy%20Central" or "year=1999" "title=Kung Fu" all work. Other...
def get_all_netting_channel_events( chain: BlockChainService, token_network_address: TokenNetworkAddress, netting_channel_identifier: ChannelID, contract_manager: ContractManager, from_block: BlockSpecification = GENESIS_BLOCK_NUMBER, to_block: BlockSpecification = 'lates...
Helper to get all events of a NettingChannelContract.
def _and_join(self, close_group=False): if not self.initialized: raise ValueError("You must add a search term before adding an operator.") else: self._operator("AND", close_group=close_group) return self
Combine terms with AND. There must be a term added before using this method. Arguments: close_group (bool): If ``True``, will end the current group and start a new one. If ``False``, will continue current group. Example:: If ...
def copy_model_instance(obj): meta = getattr(obj, '_meta') return {f.name: getattr(obj, f.name) for f in meta.get_fields(include_parents=False) if not f.auto_created}
Copy Django model instance as a dictionary excluding automatically created fields like an auto-generated sequence as a primary key or an auto-created many-to-one reverse relation. :param obj: Django model object :return: copy of model instance as dictionary
def listen(manifest, config, model_mock=False): config['manifest'] = manifest config['model_mock'] = model_mock IRC = IrcBot(config) try: IRC.start() except KeyboardInterrupt: pass
IRC listening process.
def _fix_syscall_ip(state): try: bypass = o.BYPASS_UNSUPPORTED_SYSCALL in state.options stub = state.project.simos.syscall(state, allow_unsupported=bypass) if stub: state.ip = stub.addr except AngrUnsupportedSyscallError: pass
Resolve syscall information from the state, get the IP address of the syscall SimProcedure, and set the IP of the state accordingly. Don't do anything if the resolution fails. :param SimState state: the program state. :return: None
def rolling_count(self, window_start, window_end): agg_op = '__builtin__nonnull__count__' return SArray(_proxy=self.__proxy__.builtin_rolling_apply(agg_op, window_start, window_end, 0))
Count the number of non-NULL values of different subsets over this SArray. The subset that the count is executed on is defined as an inclusive range relative to the position to each value in the SArray, using `window_start` and `window_end`. For a better understanding of this, s...
async def _get(self, key: Text) -> Dict[Text, Any]: try: with await self.pool as r: return ujson.loads(await r.get(self.register_key(key))) except (ValueError, TypeError): return {}
Get the value for the key. It is automatically deserialized from JSON and returns an empty dictionary by default.
def require_int(self, key: str) -> int: v = self.get_int(key) if v is None: raise ConfigMissingError(self.full_key(key)) return v
Returns a configuration value, as an int, by its given key. If it doesn't exist, or the configuration value is not a legal int, an error is thrown. :param str key: The requested configuration key. :return: The configuration key's value. :rtype: int :raises ConfigMissingError: T...
def create(self, port, value, timestamp=None): session = self._session datapoint_class = self._datapoint_class attributes = { 'port': port, 'value': value, } if timestamp is not None: attributes['timestamp'] = to_iso_date(timestamp) att...
Post a new reading to a timeseries. A reading is comprised of a `port`, a `value` and a timestamp. A port is like a tag for the given reading and gives an indication of the meaning of the value. The value of the reading can be any valid json value. The timestamp is considered...
def rebuildtable(cls): cls._closure_model.objects.all().delete() cls._closure_model.objects.bulk_create([cls._closure_model( parent_id=x['pk'], child_id=x['pk'], depth=0 ) for x in cls.objects.values("pk")]) for node in cls.objects.all(): n...
Regenerate the entire closuretree.
def _create_column(data, col, value): with suppress(AttributeError): if not value.index.equals(data.index): if len(value) == len(data): value.index = data.index else: value.reset_index(drop=True, inplace=True) if data.index.empty: try: ...
Create column in dataframe Helper method meant to deal with problematic column values. e.g When the series index does not match that of the data. Parameters ---------- data : pandas.DataFrame dataframe in which to insert value col : column label Column name value : obje...
def read_sources_from_numpy_file(npfile): srcs = np.load(npfile).flat[0]['sources'] roi = ROIModel() roi.load_sources(srcs.values()) return roi.create_table()
Open a numpy pickle file and read all the new sources into a dictionary Parameters ---------- npfile : file name The input numpy pickle file Returns ------- tab : `~astropy.table.Table`
def windowed_run_count_ufunc(x, window): return xr.apply_ufunc(windowed_run_count_1d, x, input_core_dims=[['time'], ], vectorize=True, dask='parallelized', output_dtypes=[np.int, ], ...
Dask-parallel version of windowed_run_count_1d, ie the number of consecutive true values in array for runs at least as long as given duration. Parameters ---------- x : bool array Input array window : int Minimum duration of consecutive run to accumulate values. Returns -------...
def reset_tag(self, name): id_ = str(uuid.uuid4()).replace('-', '') self._store.forever(self.tag_key(name), id_) return id_
Reset the tag and return the new tag identifier. :param name: The tag :type name: str :rtype: str
def remove_boards_gui(hwpack=''): if not hwpack: if len(hwpack_names()) > 1: hwpack = psidialogs.choice(hwpack_names(), 'select hardware package to select board from!', title='select') else: hwpack ...
remove boards by GUI.
def shutdown(self): if self.sock: self.sock.close() self.sock = None self.connected = False
close socket, immediately.
def _get_zoom_mat(sw:float, sh:float, c:float, r:float)->AffineMatrix: "`sw`,`sh` scale width,height - `c`,`r` focus col,row." return [[sw, 0, c], [0, sh, r], [0, 0, 1.]]
`sw`,`sh` scale width,height - `c`,`r` focus col,row.
def create_logger(name, formatter=None, handler=None, level=None): logger = logging.getLogger(name) logger.handlers = [] if handler is None: handler = logging.StreamHandler(sys.stdout) if formatter is not None: handler.setFormatter(formatter) if level is None: level = logging...
Returns a new logger for the specified name.
def _handle_exc(exception): bugzscout.ext.celery_app.submit_error.delay( 'http://fogbugz/scoutSubmit.asp', 'error-user', 'MyAppProject', 'Errors', 'An error occurred in MyApp: {0}'.format(exception.message), extra=traceback.extract_tb(*sys.exc_info())) return ['']
Record exception with stack trace to FogBugz via BugzScout, asynchronously. Returns an empty string. Note that this will not be reported to FogBugz until a celery worker processes this task. :param exception: uncaught exception thrown in app
def format_label(self, field, counter): return '<label for="id_formfield_%s" %s>%s</label>' % ( counter, field.field.required and 'class="required"', field.label)
Format the label for each field
def range(self, count): if count <= 1: raise ValueError("Range size must be greater than 1.") dom = self._domain distance = dom[-1] - dom[0] props = [ self(dom[0] + distance * float(x)/(count-1)) for x in range(count) ] return props
Create a list of colors evenly spaced along this scale's domain. :param int count: The number of colors to return. :rtype: list :returns: A list of spectra.Color objects.
def add(self, value): added = self.redis.sadd( self.key, value ) if self.redis.scard(self.key) < 2: self.redis.expire(self.key, self.expire) return added
Add value to set.
def get_grade_system_ids_by_gradebooks(self, gradebook_ids): id_list = [] for grade_system in self.get_grade_systems_by_gradebooks(gradebook_ids): id_list.append(grade_system.get_id()) return IdList(id_list)
Gets the list of ``GradeSystem Ids`` corresponding to a list of ``Gradebooks``. arg: gradebook_ids (osid.id.IdList): list of gradebook ``Ids`` return: (osid.id.IdList) - list of grade systems ``Ids`` raise: NullArgument - ``gradebook_ids`` is ``null`` raise: Operati...
def nsuriLogic(self): if self.parentClass: return 'ns = %s.%s.schema' %(self.parentClass, self.getClassName()) return 'ns = %s.%s.schema' %(self.getNSAlias(), self.getClassName())
set a variable "ns" that represents the targetNamespace in which this item is defined. Used for namespacing local elements.
def flip(self, reactions): for reaction in reactions: if reaction in self._flipped: self._flipped.remove(reaction) else: self._flipped.add(reaction)
Flip the specified reactions.
def setup_fields_processors(config, model_cls, schema): properties = schema.get('properties', {}) for field_name, props in properties.items(): if not props: continue processors = props.get('_processors') backref_processors = props.get('_backref_processors') if process...
Set up model fields' processors. :param config: Pyramid Configurator instance. :param model_cls: Model class for field of which processors should be set up. :param schema: Dict of model JSON schema.
def usernames( self ): try: return list(set([tweet.username for tweet in self])) except: log.error("error -- possibly a problem with tweets stored")
This function returns the list of unique usernames corresponding to the tweets stored in self.
def _ProcessGrepSource(self, source): attributes = source.base_source.attributes paths = artifact_utils.InterpolateListKbAttributes( attributes["paths"], self.knowledge_base, self.ignore_interpolation_errors) regex = utils.RegexListDisjunction(attributes["content_regex_list"]) condition ...
Find files fulfilling regex conditions.
def get_consumers(self, _Consumer, channel): return [_Consumer(queues=[self.queue(channel)], callbacks=[self.main_callback], prefetch_count=self.prefetch_count)]
| ConsumerMixin requirement. | Get the consumers list. :returns: All the consumers. :rtype: list.
def list_labels(self, bucket): for name in self.z.namelist(): container, label = self._nf(name.encode("utf-8")) if container == bucket and label != MD_FILE: yield label
List labels for the given bucket. Due to zipfiles inherent arbitrary ordering, this is an expensive operation, as it walks the entire archive searching for individual 'buckets' :param bucket: bucket to list labels for. :return: iterator for the labels in the specified bucket.
def advice_dcv_method(cls, csr, package, altnames, dcv_method, cert_id=None): params = {'csr': csr, 'package': package, 'dcv_method': dcv_method} if cert_id: params['cert_id'] = cert_id result = cls.call('cert.get_dcv_params', params) if dcv_method =...
Display dcv_method information.
def auth(username, password): django_auth_path = __opts__['django_auth_path'] if django_auth_path not in sys.path: sys.path.append(django_auth_path) os.environ.setdefault('DJANGO_SETTINGS_MODULE', __opts__['django_auth_settings']) __django_auth_setup() if not is_connection_usable(): ...
Simple Django auth
def _handle_tag_scriptlimits(self): obj = _make_object("ScriptLimits") obj.MaxRecursionDepth = unpack_ui16(self._src) obj.ScriptTimeoutSeconds = unpack_ui16(self._src) return obj
Handle the ScriptLimits tag.
def _make_bridge_request_msg(self, channel, netfn, command): head = bytearray((constants.IPMI_BMC_ADDRESS, constants.netfn_codes['application'] << 2)) check_sum = _checksum(*head) boday = bytearray((0x81, self.seqlun, constants.IPMI_SEND_MESSAGE_CMD, ...
This function generate message for bridge request. It is a part of ipmi payload.
def _route(self, attr, args, kwargs, **fkwargs): return self.cluster.hosts.keys()
Perform routing and return db_nums
def find_cached_job(jid): serial = salt.payload.Serial(__opts__) proc_dir = os.path.join(__opts__['cachedir'], 'minion_jobs') job_dir = os.path.join(proc_dir, six.text_type(jid)) if not os.path.isdir(job_dir): if not __opts__.get('cache_jobs'): return ('Local jobs cache directory not...
Return the data for a specific cached job id. Note this only works if cache_jobs has previously been set to True on the minion. CLI Example: .. code-block:: bash salt '*' saltutil.find_cached_job <job id>
def apply_ants_transform(transform, data, data_type="point", reference=None, **kwargs): return transform.apply(data, data_type, reference, **kwargs)
Apply ANTsTransform to data ANTsR function: `applyAntsrTransform` Arguments --------- transform : ANTsTransform transform to apply to image data : ndarray/list/tuple data to which transform will be applied data_type : string type of data Options : ...
def dirichlet_like(x, theta): R x = np.atleast_2d(x) theta = np.atleast_2d(theta) if (np.shape(x)[-1] + 1) != np.shape(theta)[-1]: raise ValueError('The dimension of x in dirichlet_like must be k-1.') return flib.dirichlet(x, theta)
R""" Dirichlet log-likelihood. This is a multivariate continuous distribution. .. math:: f(\mathbf{x}) = \frac{\Gamma(\sum_{i=1}^k \theta_i)}{\prod \Gamma(\theta_i)}\prod_{i=1}^{k-1} x_i^{\theta_i - 1} \cdot\left(1-\sum_{i=1}^{k-1}x_i\right)^\theta_k :Parameters: x : (n, k-1) ar...
def SetValue(self, row, col, value, refresh=True): value = "".join(value.split("\n")) key = row, col, self.grid.current_table old_code = self.grid.code_array(key) if old_code is None: old_code = "" if value != old_code: self.grid.actions.set_code(key, valu...
Set the value of a cell, merge line breaks
def dictionary_merge(a, b): for key, value in b.items(): if key in a and isinstance(a[key], dict) and isinstance(value, dict): dictionary_merge(a[key], b[key]) continue a[key] = b[key] return a
merges dictionary b into a Like dict.update, but recursive
def add(self, word): if not word or word.strip() == '': return self.words[word]=word
Add a word to the dictionary
def saveVarsInMat(filename, varNamesStr, outOf=None, **opts): from mlabwrap import mlab filename, varnames, outOf = __saveVarsHelper( filename, varNamesStr, outOf, '.mat', **opts) try: for varname in varnames: mlab._set(varname, outOf[varname]) mlab._do("save('%s','%s')" ...
Hacky convinience function to dump a couple of python variables in a .mat file. See `awmstools.saveVars`.
def set_disk_timeout(timeout, power='ac', scheme=None): return _set_powercfg_value( scheme=scheme, sub_group='SUB_DISK', setting_guid='DISKIDLE', power=power, value=timeout)
Set the disk timeout in minutes for the given power scheme Args: timeout (int): The amount of time in minutes before the disk will timeout power (str): Set the value for AC or DC power. Default is ``ac``. Valid options are: - ``ac`` (AC Power) ...
def __setRouterSelectionJitter(self, iRouterJitter): print 'call _setRouterSelectionJitter' try: cmd = 'routerselectionjitter %s' % str(iRouterJitter) print cmd return self.__sendCommand(cmd) == 'Done' except Exception, e: ModuleHelper.WriteIntoDeb...
set ROUTER_SELECTION_JITTER parameter for REED to upgrade to Router Args: iRouterJitter: a random period prior to request Router ID for REED Returns: True: successful to set the ROUTER_SELECTION_JITTER False: fail to set ROUTER_SELECTION_JITTER
def GetNumberOfRows(self): if not self._database_object: raise IOError('Not opened.') if self._number_of_rows is None: self._number_of_rows = self._database_object.GetNumberOfRows( self._table_name) return self._number_of_rows
Retrieves the number of rows of the table. Returns: int: number of rows. Raises: IOError: if the file-like object has not been opened. OSError: if the file-like object has not been opened.
def parse_statements(self, end_tokens, drop_needle=False): self.stream.skip_if('colon') self.stream.expect('block_end') result = self.subparse(end_tokens) if self.stream.current.type == 'eof': self.fail_eof(end_tokens) if drop_needle: next(self.stream) ...
Parse multiple statements into a list until one of the end tokens is reached. This is used to parse the body of statements as it also parses template data if appropriate. The parser checks first if the current token is a colon and skips it if there is one. Then it checks for the block...
def _translate_str(self, oprnd1, oprnd2, oprnd3): assert oprnd1.size and oprnd3.size op1_var = self._translate_src_oprnd(oprnd1) op3_var, op3_var_constrs = self._translate_dst_oprnd(oprnd3) if oprnd3.size > oprnd1.size: result = smtfunction.zero_extend(op1_var, op3_var.size) ...
Return a formula representation of a STR instruction.
def write_error(self, status_code, **kwargs): message = default_message = httplib.responses.get(status_code, '') if 'exc_info' in kwargs: (_, exc, _) = kwargs['exc_info'] if hasattr(exc, 'log_message'): message = str(exc.log_message) or default_message sel...
Log halt_reason in service log and output error page
def issue(self, issuance_spec, metadata, fees): inputs, total_amount = self._collect_uncolored_outputs( issuance_spec.unspent_outputs, 2 * self._dust_amount + fees) return bitcoin.core.CTransaction( vin=[bitcoin.core.CTxIn(item.out_point, item.output.script) for item in inputs], ...
Creates a transaction for issuing an asset. :param TransferParameters issuance_spec: The parameters of the issuance. :param bytes metadata: The metadata to be embedded in the transaction. :param int fees: The fees to include in the transaction. :return: An unsigned transaction for issui...
def _event_to_pb(event): if isinstance(event, (TaskData, Task)): key, klass = 'task', clearly_pb2.TaskMessage elif isinstance(event, (WorkerData, Worker)): key, klass = 'worker', clearly_pb2.WorkerMessage else: raise ValueError('unknown event') keys = ...
Supports converting internal TaskData and WorkerData, as well as celery Task and Worker to proto buffers messages. Args: event (Union[TaskData|Task|WorkerData|Worker]): Returns: ProtoBuf object
def get_queryset(self): qs = super().get_queryset() qs = qs.filter(approved=True) return qs
Returns all the approved topics or posts.
def chunks(data, chunk_size): for i in xrange(0, len(data), chunk_size): yield data[i:i+chunk_size]
Yield chunk_size chunks from data.
def add_header(self, name, value): self._headers.setdefault(_hkey(name), []).append(_hval(value))
Add an additional response header, not removing duplicates.
def _put_attributes_using_post(self, domain_or_name, item_name, attributes, replace=True, expected_value=None): domain, domain_name = self.get_domain_and_name(domain_or_name) params = {'DomainName': domain_name, 'ItemName': item_name} self._build_name_value_list(...
Monkey-patched version of SDBConnection.put_attributes that uses POST instead of GET The GET version is subject to the URL length limit which kicks in before the 256 x 1024 limit for attribute values. Using POST prevents that. https://github.com/BD2KGenomics/toil/issues/502
def request_exception(sender, request, **kwargs): if not isinstance(request, WSGIRequest): logger = logging.getLogger(__name__) level = CRITICAL if request.status_code <= 500 else WARNING logger.log(level, '%s exception occured (%s)', request.status_code, request.reason_ph...
Automated request exception logging. The function can also return an WSGIRequest exception, which does not supply either status_code or reason_phrase.
def do_gh(self, arg): if self.cmdprefix: raise CmdError("prefix not allowed") if arg: raise CmdError("too many arguments") if self.lastEvent: self.lastEvent.continueStatus = win32.DBG_EXCEPTION_HANDLED return self.do_go(arg)
gh - go with exception handled
def set_params(self, arg_params, aux_params, allow_extra=False): for exec_ in self.execs: exec_.copy_params_from(arg_params, aux_params, allow_extra_params=allow_extra)
Assign, i.e. copy parameters to all the executors. Parameters ---------- arg_params : dict A dictionary of name to `NDArray` parameter mapping. aux_params : dict A dictionary of name to `NDArray` auxiliary variable mapping. allow_extra : boolean, optional...
def from_file(cls, file_path, compressed=False, encoded=False): file_id = '.'.join(path.basename(file_path).split('.')[:-1]) file_format = file_path.split('.')[-1] content = cls(file_id, file_format, compressed, encoded) content.file_exists = True content._location = path.dirname...
Create a content object from a file path.
def write_ply(self, output_file): points = np.hstack([self.coordinates, self.colors]) with open(output_file, 'w') as outfile: outfile.write(self.ply_header.format( vertex_count=len(self.coordinates))) np.savetxt(outfile, points, '%f %f ...
Export ``PointCloud`` to PLY file for viewing in MeshLab.
def render(self, doc): d = defer.succeed(doc) for element in self._elements: d.addCallback(element.render) return d
Render all elements using specified document. @param doc: the writable document to render to. @type doc: document.IWritableDocument @return: a deferred fired with the specified document when the rendering is done. @rtype: defer.Deferred
def __cancel_timer(self): if self.__timer is not None: self.__timer.cancel() self.__unbind_call(True) self.__timer_args = None self.__timer = None
Cancels the timer, and calls its target method immediately
def post_change_receiver(self, instance: Model, action: Action, **kwargs): try: old_group_names = instance.__instance_groups.observers[self] except (ValueError, KeyError): old_group_names = set() if action == Action.DELETE: new_group_names = set() else...
Triggers the old_binding to possibly send to its group.
def dependencies(self, kwargs=None, expand_only=False): if not kwargs: kwargs = {} self.proper_kwargs('dependencies', kwargs) sections = self._get_dependency_sections_to_use(kwargs) deps = [] for sect in sections: if expand_only: deps.exten...
Returns all dependencies of this assistant with regards to specified kwargs. If expand_only == False, this method returns list of mappings of dependency types to actual dependencies (keeps order, types can repeat), e.g. Example: [{'rpm', ['rubygems']}, {'gem', ['mygem']}, {'rpm', ['spam...
def get_output_error(cmd): if not isinstance(cmd, list): cmd = [cmd] logging.debug("Running: %s", ' '.join(map(quote, cmd))) try: result = Popen(cmd, stdout=PIPE, stderr=PIPE) except IOError as e: return -1, u(''), u('Failed to run %r: %r' % (cmd, e)) so, se = result.communic...
Return the exit status, stdout, stderr of a command
def spop(self, name, count=None): "Remove and return a random member of set ``name``" args = (count is not None) and [count] or [] return self.execute_command('SPOP', name, *args)
Remove and return a random member of set ``name``
def register(self): self._queue.put(hello_packet(socket.gethostname(), mac(), __version__)) self._queue.put(request_packet(MSG_SERVER_SETTINGS)) self._queue.put(request_packet(MSG_SAMPLE_FORMAT)) self._queue.put(request_packet(MSG_HEADER))
Transact with server.
def close(self): self.log.warning('Closing connection to AVR') self._closing = True if self.protocol.transport: self.protocol.transport.close()
Close the AVR device connection and don't try to reconnect.
def __insert_action(self, revision): revision["patch"]["_id"] = ObjectId(revision.get("master_id")) insert_response = yield self.collection.insert(revision.get("patch")) if not isinstance(insert_response, str): raise DocumentRevisionInsertFailed()
Handle the insert action type. Creates new document to be created in this collection. This allows you to stage a creation of an object :param dict revision: The revision dictionary
def update_payment_request(self, tid, currency=None, amount=None, action=None, ledger=None, callback_uri=None, display_message_uri=None, capture_id=None, additional_amount=None, text=None, refund_id=None, ...
Update payment request, reauthorize, capture, release or abort It is possible to update ledger and the callback URIs for a payment request. Changes are always appended to the open report of a ledger, and notifications are sent to the callback registered at the time of notification. ...
def fromML(vec): if isinstance(vec, newlinalg.DenseVector): return DenseVector(vec.array) elif isinstance(vec, newlinalg.SparseVector): return SparseVector(vec.size, vec.indices, vec.values) else: raise TypeError("Unsupported vector type %s" % type(vec))
Convert a vector from the new mllib-local representation. This does NOT copy the data; it copies references. :param vec: a :py:class:`pyspark.ml.linalg.Vector` :return: a :py:class:`pyspark.mllib.linalg.Vector` .. versionadded:: 2.0.0
def AsRegEx(self): parts = self.__class__.REGEX_SPLIT_PATTERN.split(self._value) result = u"".join(self._ReplaceRegExPart(p) for p in parts) return rdf_standard.RegularExpression(u"(?i)\\A%s\\Z" % result)
Return the current glob as a simple regex. Note: No interpolation is performed. Returns: A RegularExpression() object.
def get_form_kwargs(self): kwargs = super(ApiFormView, self).get_form_kwargs() kwargs['data'] = kwargs.get('initial') return kwargs
Add the 'data' to the form args so you can validate the form data on a get request.
def is_pk_descriptor(descriptor, include_alt=False): if descriptor.pk is True or type(descriptor.pk) is int: return True if include_alt: return descriptor.alt_pk is True or type(descriptor.alt_pk) is int else: return False
Return true if `descriptor` is a primary key.
def create_parameter_group(name, db_parameter_group_family, description, tags=None, region=None, key=None, keyid=None, profile=None): res = __salt__['boto_rds.parameter_group_exists'](name, tags, region, key, ...
Create an RDS parameter group CLI example to create an RDS parameter group:: salt myminion boto_rds.create_parameter_group my-param-group mysql5.6 \ "group description"
def set_nest_transactions_with_savepoints(self, nest_transactions_with_savepoints): if self._transaction_nesting_level > 0: raise DBALConnectionError.may_not_alter_nested_transaction_with_savepoints_in_transaction() if not self._platform.is_savepoints_supported(): raise DBALConne...
Sets if nested transactions should use savepoints. :param nest_transactions_with_savepoints: `True` or `False`
def _build_fluent_table(self): self.fluent_table = collections.OrderedDict() for name, size in zip(self.domain.non_fluent_ordering, self.non_fluent_size): non_fluent = self.domain.non_fluents[name] self.fluent_table[name] = (non_fluent, size) for name, size in zip(self.do...
Builds the fluent table for each RDDL pvariable.
def connection_lost(self, exc: Optional[Exception]) -> None: logger.debug("%s - event = connection_lost(%s)", self.side, exc) self.state = State.CLOSED logger.debug("%s - state = CLOSED", self.side) if not hasattr(self, "close_code"): self.close_code = 1006 if not has...
7.1.4. The WebSocket Connection is Closed.
def allocate_stack(size=DEFAULT_STACK_SIZE): base = libc.mmap( None, size + GUARD_PAGE_SIZE, libc.PROT_READ | libc.PROT_WRITE, libc.MAP_PRIVATE | libc.MAP_ANONYMOUS | libc.MAP_GROWSDOWN | libc.MAP_STACK, -1, 0) try: libc.mprotect(base, GUARD_PAGE_SIZE, libc.PROT_N...
Allocate some memory that can be used as a stack. @return: a ctypes void pointer to the *top* of the stack.
def predict_from_variants( self, variants, transcript_expression_dict=None, gene_expression_dict=None): variants = apply_variant_expression_filters( variants, transcript_expression_dict=transcript_expression_dict, transcript_exp...
Predict epitopes from a Variant collection, filtering options, and optional gene and transcript expression data. Parameters ---------- variants : varcode.VariantCollection transcript_expression_dict : dict Maps from Ensembl transcript IDs to FPKM expression values. ...
def ParseMessage(descriptor, byte_str): result_class = MakeClass(descriptor) new_msg = result_class() new_msg.ParseFromString(byte_str) return new_msg
Generate a new Message instance from this Descriptor and a byte string. Args: descriptor: Protobuf Descriptor object byte_str: Serialized protocol buffer byte string Returns: Newly created protobuf Message object.
def get(self, block=True, timeout=None): _, node_id = self.inner.get(block=block, timeout=timeout) with self.lock: self._mark_in_progress(node_id) return self.get_node(node_id)
Get a node off the inner priority queue. By default, this blocks. This takes the lock, but only for part of it. :param bool block: If True, block until the inner queue has data :param Optional[float] timeout: If set, block for timeout seconds waiting for data. :return Parse...
def find_all(self, prefix): prefix = ip_network(prefix) if not self.prefix.overlaps(prefix) \ or self.prefix[0] > prefix[0] \ or self.prefix[-1] < prefix[-1]: raise NotAuthoritativeError('This node is not authoritative for %r' % prefix)...
Find everything in the given prefix