code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def api_retrieve(self, api_key=None): api_key = api_key or self.default_api_key return self.stripe_class.retrieve( id=self.id, api_key=api_key, expand=self.expand_fields )
Call the stripe API's retrieve operation for this model. :param api_key: The api key to use for this request. Defaults to settings.STRIPE_SECRET_KEY. :type api_key: string
def next(self): if self.idx >= len(self.page_list): raise StopIteration() page = self.page_list[self.idx] self.idx += 1 return page
Provide the next element of the list.
def debugging(): print("In debugging") json_file = r"C:\Scripting\Processing\Cell" \ r"data\outdata\SiBEC\cellpy_batch_bec_exp02.json" b = init(default_log_level="DEBUG") b.load_info_df(json_file) print(b.info_df.head()) b.export_raw = False b.export_cycles = False b.expo...
This one I use for debugging...
def get_member_class(resource): reg = get_current_registry() if IInterface in provided_by(resource): member_class = reg.getUtility(resource, name='member-class') else: member_class = reg.getAdapter(resource, IMemberResource, name='member-class') retu...
Returns the registered member class for the given resource. :param resource: registered resource :type resource: class implementing or instance providing or subclass of a registered resource interface.
def username_or(user, attr): if not settings.ACCOUNTS_NO_USERNAME: attr = "username" value = getattr(user, attr) if callable(value): value = value() return value
Returns the user's username for display, or an alternate attribute if ``ACCOUNTS_NO_USERNAME`` is set to ``True``.
def image_from_simplestreams(server, alias, remote_addr=None, cert=None, key=None, verify_cert=True, aliases=None, pu...
Create an image from simplestreams server : Simplestreams server URI alias : The alias of the image to retrieve remote_addr : An URL to a remote Server, you also have to give cert and key if you provide remote_addr and its a TCP Address! ...
def _has_perm(self, user, permission): if user.is_superuser: return True if user.is_active: perms = [perm.split('.')[1] for perm in user.get_all_permissions()] return permission in perms return False
Check whether the user has the given permission @return True if user is granted with access, False if not.
def nla_parse(tb, maxtype, head, len_, policy): rem = c_int() for nla in nla_for_each_attr(head, len_, rem): type_ = nla_type(nla) if type_ > maxtype: continue if policy: err = validate_nla(nla, maxtype, policy) if err < 0: return err ...
Create attribute index based on a stream of attributes. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L242 Iterates over the stream of attributes and stores a pointer to each attribute in the index array using the attribute type as index to the array. Attribute with a type greater than the ...
def append_string(self, field, header=False): bits = field.split('=', 1) if len(bits) != 2: raise ValueError("Field missing '=' separator.") try: tag_int = int(bits[0]) except ValueError: raise ValueError("Tag value must be an integer") self.ap...
Append a tag=value pair in string format. :param field: String "tag=value" to be appended to this message. :param header: Append to header if True; default to body. The string is split at the first '=' character, and the resulting tag and value strings are appended to the message.
def send_message( self, title=None, body=None, icon=None, data=None, sound=None, badge=None, api_key=None, **kwargs): if self: from .fcm import fcm_send_bulk_message registration_i...
Send notification for all active devices in queryset and deactivate if DELETE_INACTIVE_DEVICES setting is set to True.
def calculate_median(given_list): median = None if not given_list: return median given_list = sorted(given_list) list_length = len(given_list) if list_length % 2: median = given_list[int(list_length / 2)] else: median = (given_list[int(...
Returns the median of values in the given list.
def get_cli_argument(self, command, name): parts = command.split() result = CLIArgumentType() for index in range(0, len(parts) + 1): probe = ' '.join(parts[0:index]) override = self.arguments.get(probe, {}).get(name, None) if override: result.u...
Get the argument for the command after applying the scope hierarchy :param command: The command that we want the argument for :type command: str :param name: The name of the argument :type name: str :return: The CLI command after all overrides in the scope hierarchy have been ap...
def run(configobj=None): acssum(configobj['input'], configobj['output'], exec_path=configobj['exec_path'], time_stamps=configobj['time_stamps'], verbose=configobj['verbose'], quiet=configobj['quiet'] )
TEAL interface for the `acssum` function.
def report_score_vs_rmsd_funnels(designs, path): from matplotlib.backends.backend_pdf import PdfPages import matplotlib.pyplot as plt print "Reporting score vs RMSD funnels..." pdf = PdfPages(path) designs = sorted(designs, key=lambda x: x.fancy_path) for index, design in enumerate(designs): ...
Create a PDF showing the score vs. RMSD funnels for all the reasonable designs. This method was copied from an old version of this script, and does not currently work.
def partition(molList, options): status_field = options.status_field active_label = options.active_label decoy_label = options.decoy_label activeList = [] decoyList = [] for mol in molList: if mol.GetProp(status_field) == active_label: activeList.append(mol) elif mol....
Partition molList into activeList and decoyList
def aligner_from_header(in_bam): from bcbio.pipeline.alignment import TOOLS with pysam.Samfile(in_bam, "rb") as bamfile: for pg in bamfile.header.get("PG", []): for ka in TOOLS.keys(): if pg.get("PN", "").lower().find(ka) >= 0: return ka
Identify aligner from the BAM header; handling pre-aligned inputs.
def trace_start(self): cmd = enums.JLinkTraceCommand.START res = self._dll.JLINKARM_TRACE_Control(cmd, 0) if (res == 1): raise errors.JLinkException('Failed to start trace.') return None
Starts collecting trace data. Args: self (JLink): the ``JLink`` instance. Returns: ``None``
def attach_files(self, files: Sequence[AttachedFile]): assert not self._content, 'content must be empty to attach files.' self.content_type = 'multipart/form-data' self._attached_files = files
Attach a list of files represented as AttachedFile.
def validate_categories(categories): if not set(categories) <= Source.categories: invalid = list(set(categories) - Source.categories) raise ValueError('Invalid categories: %s' % invalid)
Take an iterable of source categories and raise ValueError if some of them are invalid.
def crps(self): return np.sum(self.errors["F_2"].values - self.errors["F_O"].values * 2.0 + self.errors["O_2"].values) / \ (self.thresholds.size * self.num_forecasts)
Calculates the continuous ranked probability score.
def if_not_exists(self): if self.model._has_counter: raise IfNotExistsWithCounterColumn('if_not_exists cannot be used with tables containing counter columns') clone = copy.deepcopy(self) clone._if_not_exists = True return clone
Check the existence of an object before insertion. If the insertion isn't applied, a LWTException is raised.
def blend_palette(colors, n_colors=6, as_cmap=False): name = "-".join(map(str, colors)) pal = mpl.colors.LinearSegmentedColormap.from_list(name, colors) if not as_cmap: pal = pal(np.linspace(0, 1, n_colors)) return pal
Make a palette that blends between a list of colors. Parameters ---------- colors : sequence of matplotlib colors hex, rgb-tuple, or html color name n_colors : int, optional number of colors in the palette as_cmap : bool, optional if True, return as a matplotlib colormap ins...
def refresh_metrics(self): metrics = self.get_metrics() dbmetrics = ( db.session.query(DruidMetric) .filter(DruidMetric.datasource_id == self.datasource_id) .filter(DruidMetric.metric_name.in_(metrics.keys())) ) dbmetrics = {metric.metric_name: metric ...
Refresh metrics based on the column metadata
def set_monitoring(module): def monitoring(is_monitoring, track_data=None, track_grad=None, track_update=None, track_update_ratio=None): module.is_monitoring = is_monitoring module.track_data = track_data if track_data is no...
Defines the monitoring method on the module.
def load_cufflinks(self, filter_ok=True): return \ pd.concat( [self._load_single_patient_cufflinks(patient, filter_ok) for patient in self], copy=False )
Load a Cufflinks gene expression data for a cohort Parameters ---------- filter_ok : bool, optional If true, filter Cufflinks data to row with FPKM_status == "OK" Returns ------- cufflinks_data : Pandas dataframe Pandas dataframe with Cufflinks d...
def token_urlsafe(nbytes=32): tok = os.urandom(nbytes) return base64.urlsafe_b64encode(tok).rstrip(b'=').decode('ascii')
Return a random URL-safe text string, in Base64 encoding. This is taken and slightly modified from the Python 3.6 stdlib. The string has *nbytes* random bytes. If *nbytes* is ``None`` or not supplied, a reasonable default is used. >>> token_urlsafe(16) #doctest:+SKIP 'Drmhze6EPcv0fN_81Bj-nA'
def save(self, filename, format=None, **kwargs): if format is None: format = format_from_extension(filename) with file(filename, 'wb') as fp: self.save_to_file_object(fp, format, **kwargs)
Save the object to file given by filename.
def load_ipython_extension(ip): ip.register_magics(CustomMagics) patch = ("IPython.config.cell_magic_highlight['clrmagic'] = " "{'reg':[/^%%CS/]};") js = display.Javascript(data=patch, lib=["https://github.com/codemirror/CodeMirror/blob/master/mode/clike/clike.js"])
register magics function, can be called from a notebook
def exam_reliability_by_datetime( datetime_axis, datetime_new_axis, reliable_distance): numeric_datetime_axis = [ totimestamp(a_datetime) for a_datetime in datetime_axis ] numeric_datetime_new_axis = [ totimestamp(a_datetime) for a_datetime in datetime_new_axis ] return exam_...
A datetime-version that takes datetime object list as x_axis reliable_distance equals to the time difference in seconds.
def is_lagging(self, wal_position): lag = (self.cluster.last_leader_operation or 0) - wal_position return lag > self.patroni.config.get('maximum_lag_on_failover', 0)
Returns if instance with an wal should consider itself unhealthy to be promoted due to replication lag. :param wal_position: Current wal position. :returns True when node is lagging
def _contains_yieldpoint(children): if isinstance(children, dict): return any(isinstance(i, YieldPoint) for i in children.values()) if isinstance(children, list): return any(isinstance(i, YieldPoint) for i in children) return False
Returns True if ``children`` contains any YieldPoints. ``children`` may be a dict or a list, as used by `MultiYieldPoint` and `multi_future`.
def clear_session(self, response): session.clear() if 'flask_login' in sys.modules: remember_cookie = current_app.config.get('REMEMBER_COOKIE', 'remember_token') response.set_cookie(remember_cookie, '', expires=0, max_age=0)
Clear the session. This method is invoked when the session is found to be invalid. Subclasses can override this method to implement a custom session reset.
async def handle_agent_job_started(self, agent_addr, message: AgentJobStarted): self._logger.debug("Job %s %s started on agent %s", message.job_id[0], message.job_id[1], agent_addr) await ZMQUtils.send_with_addr(self._client_socket, message.job_id[0], BackendJobStarted(message.job_id[1]))
Handle an AgentJobStarted message. Send the data back to the client
def get(self, key, failobj=None, exact=0): if not exact: try: key = self.getfullkey(key) except KeyError: return failobj return self.data.get(key,failobj)
Returns failobj if key is not found or is ambiguous
def load_tiff(filename, crs=None, apply_transform=False, nan_nodata=False, **kwargs): try: import xarray as xr except: raise ImportError('Loading tiffs requires xarray to be installed') with warnings.catch_warnings(): warnings.filterwarnings('ignore') da = xr.open_rasterio(fi...
Returns an RGB or Image element loaded from a geotiff file. The data is loaded using xarray and rasterio. If a crs attribute is present on the loaded data it will attempt to decode it into a cartopy projection otherwise it will default to a non-geographic HoloViews element. Parameters --------...
def trainModel(model, loader, optimizer, device, criterion=F.nll_loss, batches_in_epoch=sys.maxsize, batch_callback=None, progress_bar=None): model.train() if progress_bar is not None: loader = tqdm(loader, **progress_bar) if batches_in_epoch < len(loader): loader.total =...
Train the given model by iterating through mini batches. An epoch ends after one pass through the training set, or if the number of mini batches exceeds the parameter "batches_in_epoch". :param model: pytorch model to be trained :type model: torch.nn.Module :param loader: train dataset loader :type loader:...
def _get_resource_list(self, rsrc_dict): if 'collections' in rsrc_dict: return rsrc_dict['collections'] if 'experiments' in rsrc_dict: return rsrc_dict['experiments'] if 'channels' in rsrc_dict: return rsrc_dict['channels'] if 'coords' in rsrc_dict: ...
Extracts list of resources from the HTTP response. Args: rsrc_dict (dict): HTTP response encoded in a dictionary. Returns: (list[string]): List of a type of resource (collections, experiments, etc). Raises: (RuntimeError): If rsrc_dict does not contain any ...
def dedent(string, indent_str=' ', max_levels=None): if len(indent_str) == 0: return string lines = string.splitlines() def num_indents(line): max_num = int(np.ceil(len(line) / len(indent_str))) for i in range(max_num): if line.startswith(indent_str): li...
Revert the effect of indentation. Examples -------- Remove a simple one-level indentation: >>> text = '''<->This is line 1. ... <->Next line. ... <->And another one.''' >>> print(text) <->This is line 1. <->Next line. <->And another one. >>> print(dedent(text, '<->')) T...
def get_request_token(self, method='GET', decoder=parse_utf8_qsl, key_token='oauth_token', key_token_secret='oauth_token_secret', **kwargs): r = self.get_raw_request_token(method=met...
Return a request token pair. :param method: A string representation of the HTTP method to be used, defaults to `GET`. :type method: str :param decoder: A function used to parse the Response content. Should return a dictionary. :type decoder: func :param k...
def create(self, resource): schema = self.CREATE_SCHEMA json = self.service.encode(schema, resource) schema = self.GET_SCHEMA resp = self.service.create(self.base, json) return self.service.decode(schema, resp)
Create a new config. :param resource: :class:`configs.Config <configs.Config>` object :return: :class:`configs.Config <configs.Config>` object :rtype: configs.Config
def delete_grade(self, grade_id): from dlkit.abstract_osid.id.primitives import Id as ABCId from .objects import Grade collection = JSONClientValidated('grading', collection='GradeSystem', runtime=self._runtime) ...
Deletes a ``Grade``. arg: grade_id (osid.id.Id): the ``Id`` of the ``Grade`` to remove raise: NotFound - ``grade_id`` not found raise: NullArgument - ``grade_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - aut...
def io_surface(timestep, time, fid, fld): fid.write("{} {}".format(timestep, time)) fid.writelines(["%10.2e" % item for item in fld[:]]) fid.writelines(["\n"])
Output for surface files
async def explain(self, *args, analyze=False): query = 'EXPLAIN (FORMAT JSON, VERBOSE' if analyze: query += ', ANALYZE) ' else: query += ') ' query += self._state.query if analyze: tr = self._connection.transaction() await tr.start(...
Return the execution plan of the statement. :param args: Query arguments. :param analyze: If ``True``, the statement will be executed and the run time statitics added to the return value. :return: An object representing the execution plan. This value i...
def put_readme(self, content): logger.debug("Putting readme") key = self.get_readme_key() self.put_text(key, content)
Store the readme descriptive metadata.
def request_sid_cookie(self, username, password): log.debug("Requesting SID cookie") target_url = self._login_url + '?usr={0}&pwd={1}&persist=y'.format( username, password ) cookie = urlopen(target_url).read() return cookie
Request cookie for permanent session token.
def _retrieve_value(self, entity, default=None): return entity._values.get(self._name, default)
Internal helper to retrieve the value for this Property from an entity. This returns None if no value is set, or the default argument if given. For a repeated Property this returns a list if a value is set, otherwise None. No additional transformations are applied.
def transform_rest_response(self, response_body): body_json = json.loads(response_body) return json.dumps(body_json, indent=1, sort_keys=True)
Translates an apiserving REST response so it's ready to return. Currently, the only thing that needs to be fixed here is indentation, so it's consistent with what the live app will return. Args: response_body: A string containing the backend response. Returns: A reformatted version of the...
def override(self, key, value): keys = key.split('.') if len(keys) > 1: if keys[0] != "plugins": raise AttributeError("no such setting: %r" % key) self.plugins.override(keys[1:], value) else: self.overrides[key] = value self._uncach...
Set a setting to the given value. Note that `key` can be in dotted form, eg 'plugins.release_hook.emailer.sender'.
def _serialize_attributes(attributes): result = '' for name, value in attributes.items(): if not value: continue result += ' ' + _unmangle_attribute_name(name) result += '="' + escape(value, True) + '"' return result
Serializes HTML element attributes in a name="value" pair form.
def get_series_as_of_date(self, series_id, as_of_date): as_of_date = pd.to_datetime(as_of_date) df = self.get_series_all_releases(series_id) data = df[df['realtime_start'] <= as_of_date] return data
Get latest data for a Fred series id as known on a particular date. This includes any revision to the data series before or on as_of_date, but ignores any revision on dates after as_of_date. Parameters ---------- series_id : str Fred series id such as 'GDP' as_of_dat...
def set_prompt(self, prompt): if prompt and self.settings.vehicle_name: prompt = self.settings.vehicle_name + ':' + prompt self.mpstate.rl.set_prompt(prompt)
set prompt for command line
def _spawn(self, distribution, executor=None, *args, **kwargs): actual_executor = executor or SubprocessExecutor(distribution) return distribution.execute_java_async(*args, executor=actual_executor, **kwargs)
Returns a processhandler to a process executing java. :param Executor executor: the java subprocess executor to use. If not specified, construct using the distribution. :param Distribution distribution: The JDK or JRE installed. :rtype: ProcessHandler
def get_authorization_ids_by_vault(self, vault_id): id_list = [] for authorization in self.get_authorizations_by_vault(vault_id): id_list.append(authorization.get_id()) return IdList(id_list)
Gets the list of ``Authorization`` ``Ids`` associated with a ``Vault``. arg: vault_id (osid.id.Id): ``Id`` of a ``Vault`` return: (osid.id.IdList) - list of related authorization ``Ids`` raise: NotFound - ``vault_id`` is not found raise: NullArgument - ``vault_id`` is ``null`` ...
def _populate_route_attributes(self): route_schema = self._validate_stone_cfg() self.api.add_route_schema(route_schema) for namespace in self.api.namespaces.values(): env = self._get_or_create_env(namespace.name) for route in namespace.routes: self._popula...
Converts all routes from forward references to complete definitions.
def remove_server(self, name): for i in self._server_list: if i['key'] == name: try: self._server_list.remove(i) logger.debug("Remove server %s from the list" % name) logger.debug("Updated servers list (%s servers): %s" % ( ...
Remove a server from the dict.
def pad_decr(ids): if len(ids) < 1: return list(ids) if not any(ids): return [] idx = -1 while not ids[idx]: idx -= 1 if idx == -1: ids = ids else: ids = ids[:idx + 1] return [i - 1 for i in ids]
Strip ID 0 and decrement ids by 1.
def cublasZtrsm(handle, side, uplo, transa, diag, m, n, alpha, A, lda, B, ldb): status = _libcublas.cublasZtrsm_v2(handle, _CUBLAS_SIDE_MODE[side], _CUBLAS_FILL_MODE[uplo], _CUBLAS_OP[trans], ...
Solve complex triangular system with multiple right-hand sides.
def transfer_state_data(cls, source_entity, target_entity): state_data = cls.get_state_data(source_entity) cls.set_state_data(target_entity, state_data)
Transfers instance state data from the given source entity to the given target entity.
async def helo( self, hostname: str = None, timeout: DefaultNumType = _default ) -> SMTPResponse: if hostname is None: hostname = self.source_address async with self._command_lock: response = await self.execute_command( b"HELO", hostname.encode("ascii"...
Send the SMTP HELO command. Hostname to send for this command defaults to the FQDN of the local host. :raises SMTPHeloError: on unexpected server response code
def get_modified_time(self) -> Optional[datetime.datetime]: stat_result = self._stat() modified = datetime.datetime.utcfromtimestamp(int(stat_result.st_mtime)) return modified
Returns the time that ``self.absolute_path`` was last modified. May be overridden in subclasses. Should return a `~datetime.datetime` object or None. .. versionadded:: 3.1
def print_dependencies(_run): print('Dependencies:') for dep in _run.experiment_info['dependencies']: pack, _, version = dep.partition('==') print(' {:<20} == {}'.format(pack, version)) print('\nSources:') for source, digest in _run.experiment_info['sources']: print(' {:<43} {...
Print the detected source-files and dependencies.
def file_or_stdin() -> Callable: def parse(path): if path is None or path == "-": return sys.stdin else: return data_io.smart_open(path) return parse
Returns a file descriptor from stdin or opening a file from a given path.
async def eap_options(request: web.Request) -> web.Response: return web.json_response(EAP_CONFIG_SHAPE, status=200)
Get request returns the available configuration options for WPA-EAP. Because the options for connecting to WPA-EAP secured networks are quite complex, to avoid duplicating logic this endpoint returns a json object describing the structure of arguments and options for the eap_config arg to /wifi/configu...
def make_c_args(arg_pairs): logging.debug(arg_pairs) c_args = [ '{} {}'.format(arg_type, arg_name) if arg_name else arg_type for dummy_number, arg_type, arg_name in sorted(arg_pairs) ] return ', '.join(c_args)
Build a C argument list from return type and arguments pairs.
def getcal(self): status, cal, cal_error, offset, offset_err, data_type = \ _C.SDgetcal(self._id) _checkErr('getcal', status, 'no calibration record') return cal, cal_error, offset, offset_err, data_type
Retrieve the SDS calibration coefficients. Args:: no argument Returns:: 5-element tuple holding: - cal: calibration factor (attribute 'scale_factor') - cal_error : calibration factor error (attribute 'scale_factor_err') - off...
def build_rdn(self): bits = [] for field in self._meta.fields: if field.db_column and field.primary_key: bits.append("%s=%s" % (field.db_column, getattr(self, field.name))) if not len(bits): raise Exception("Could not...
Build the Relative Distinguished Name for this entry.
def send_message(self, message, sign=True): if sign: message.sign(self.authenticators[self.defauth]) logger.debug("sending %s", LazyStr(message.dump_oneline)) self.transport.write(message.as_string())
Send the given message to the connection. @type message: OmapiMessage @param sign: whether the message needs to be signed @raises OmapiError: @raises socket.error:
def count(self) -> int: counter = 0 for pool in self._host_pools.values(): counter += pool.count() return counter
Return number of connections.
def set_size(self, data_size): if len(str(data_size)) > self.first: raise ValueError( 'Send size is too large for message size-field width!') self.data_size = data_size
Set the data slice size.
def deserialize(self, value, **kwargs): for validator in self.validators: validator.validate(value, **kwargs) return value
Deserialization of value. :return: Deserialized value. :raises: :class:`halogen.exception.ValidationError` exception if value is not valid.
def libvlc_media_player_has_vout(p_mi): f = _Cfunctions.get('libvlc_media_player_has_vout', None) or \ _Cfunction('libvlc_media_player_has_vout', ((1,),), None, ctypes.c_uint, MediaPlayer) return f(p_mi)
How many video outputs does this media player have? @param p_mi: the media player. @return: the number of video outputs.
def _FormatDescription(self, event): date_time_string = timelib.Timestamp.CopyToIsoFormat( event.timestamp, timezone=self._output_mediator.timezone) timestamp_description = event.timestamp_desc or 'UNKNOWN' message, _ = self._output_mediator.GetFormattedMessages(event) if message is None: ...
Formats the description. Args: event (EventObject): event. Returns: str: formatted description field.
async def discover_nupnp(websession): async with websession.get(URL_NUPNP) as res: return [Bridge(item['internalipaddress'], websession=websession) for item in (await res.json())]
Discover bridges via NUPNP.
def get_file_hash(storage, path): contents = storage.open(path).read() file_hash = hashlib.md5(contents).hexdigest() content_type = mimetypes.guess_type(path)[0] or 'application/octet-stream' if settings.is_gzipped and content_type in settings.gzip_content_types: cache_key = get_cache_key('gzip_...
Create md5 hash from file contents.
def define_code_breakpoint(self, dwProcessId, address, condition = True, action = None): process = self.system.get_process(dwProcessId) bp = CodeBreakpoint(address, condition, action) key = (dwProcessId, bp.get_address()) ...
Creates a disabled code breakpoint at the given address. @see: L{has_code_breakpoint}, L{get_code_breakpoint}, L{enable_code_breakpoint}, L{enable_one_shot_code_breakpoint}, L{disable_code_breakpoint}, L{erase_code_breakpoint} @ty...
def main(cls, args=None): if args is None: args = sys.argv[1:] try: o = cls() o.parseOptions(args) except usage.UsageError as e: print(o.getSynopsis()) print(o.getUsage()) print(str(e)) return 1 except CL...
Fill in command-line arguments from argv
def write_bit(self, b, pack=Struct('B').pack): self._output_buffer.append(pack(True if b else False)) return self
Write a single bit. Convenience method for single bit args.
def cumulative_sum(self): from .. import extensions agg_op = "__builtin__cum_sum__" return SArray(_proxy = self.__proxy__.builtin_cumulative_aggregate(agg_op))
Return the cumulative sum of the elements in the SArray. Returns an SArray where each element in the output corresponds to the sum of all the elements preceding and including it. The SArray is expected to be of numeric type (int, float), or a numeric vector type. Returns ------...
def is_uniform(self): pages = self.pages page = pages[0] if page.is_scanimage or page.is_nih: return True try: useframes = pages.useframes pages.useframes = False h = page.hash for i in (1, 7, -1): if pages[i].as...
Return if file contains a uniform series of pages.
def crypto_pwhash_scryptsalsa208sha256_ll(passwd, salt, n, r, p, dklen=64, maxmem=SCRYPT_MAX_MEM): ensure(isinstance(n, integer_types), raising=TypeError) ensure(isinstance(r, integer_types), raising=TypeError) ensure(isinstance(p, integer_type...
Derive a cryptographic key using the ``passwd`` and ``salt`` given as input. The work factor can be tuned by by picking different values for the parameters :param bytes passwd: :param bytes salt: :param bytes salt: *must* be *exactly* :py:const:`.SALTBYTES` long :param int dklen: :para...
def _maybe_extract(compressed_filename, directory, extension=None): logger.info('Extracting {}'.format(compressed_filename)) if extension is None: basename = os.path.basename(compressed_filename) extension = basename.split('.', 1)[1] if 'zip' in extension: with zipfile.ZipFile(compre...
Extract a compressed file to ``directory``. Args: compressed_filename (str): Compressed file. directory (str): Extract to directory. extension (str, optional): Extension of the file; Otherwise, attempts to extract extension from the filename.
def fetch(self): params = values.of({}) payload = self._version.fetch( 'GET', self._uri, params=params, ) return InstalledAddOnExtensionInstance( self._version, payload, installed_add_on_sid=self._solution['installed...
Fetch a InstalledAddOnExtensionInstance :returns: Fetched InstalledAddOnExtensionInstance :rtype: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionInstance
def promote(self, name): return PartitionName(**dict(list(name.dict.items()) + list(self.dict.items())))
Promote to a PartitionName by combining with a bundle Name.
def blockgen(blocks, shape): iterables = [blockgen1d(l, s) for (l, s) in zip(blocks, shape)] return product(*iterables)
Generate a list of slice tuples to be used by combine. The tuples represent regions in an N-dimensional image. :param blocks: a tuple of block sizes :param shape: the shape of the n-dimensional array :return: an iterator to the list of tuples of slices Example: >>> blocks = (500, 512) ...
def insert_query_m(data, table, conn, columns=None, db_type='mysql'): if len(data) > 10000: _chunk_query(data, 10000, columns, conn, table, db_type) else: if db_type == 'sqlite': type_sign = '?' else: type_sign = '%s' type_com = type_sign + ", " ty...
Insert python list of tuples into SQL table Args: data (list): List of tuples table (str): Name of database table conn (connection object): database connection object columns (str): String of column names to use if not assigned then all columns are presumed to be used [Optional] ...
async def exchange_declare(self): await self.channel.exchange_declare( self.exchange, self.exchange_type, durable=self.durable, auto_delete=self.auto_delete, no_wait=self.no_wait, )
Override this method to change how a exchange is declared
def _closure_createlink(self): linkparents = self._closure_model.objects.filter( child__pk=self._closure_parent_pk ).values("parent", "depth") linkchildren = self._closure_model.objects.filter( parent__pk=self.pk ).values("child", "depth") newlinks = [self...
Create a link in the closure tree.
def get_path_list(args, nni_config, trial_content, temp_nni_path): path_list, host_list = parse_log_path(args, trial_content) platform = nni_config.get_config('experimentConfig').get('trainingServicePlatform') if platform == 'local': print_normal('Log path: %s' % ' '.join(path_list)) return ...
get path list according to different platform
def or_where_pivot(self, column, operator=None, value=None): return self.where_pivot(column, operator, value, "or")
Set an or where clause for a pivot table column. :param column: The column of the where clause, can also be a QueryBuilder instance for sub where :type column: str|Builder :param operator: The operator of the where clause :type operator: str :param value: The value of the wher...
def getbr(self, name): for br in self.showall(): if br.name == name: return br raise BridgeException("Bridge does not exist.")
Return a bridge object.
def delete_series(self, database=None, measurement=None, tags=None): database = database or self._database query_str = 'DROP SERIES' if measurement: query_str += ' FROM {0}'.format(quote_ident(measurement)) if tags: tag_eq_list = ["{0}={1}".format(quote_ident(k), ...
Delete series from a database. Series must be filtered by either measurement and tags. This method cannot be used to delete all series, use `drop_database` instead. :param database: the database from which the series should be deleted, defaults to client's current database ...
def get_exception_message(instance): args = getattr(instance, 'args', None) if args: return str(instance) try: return type(instance).__name__ except AttributeError: return str(instance)
Try to get the exception message or the class name.
def create_section( aggregation_summary, analysis_layer, postprocessor_fields, section_header, use_aggregation=True, units_label=None, use_rounding=True, extra_component_args=None): if use_aggregation: return create_section_with_aggregation( aggreg...
Create demographic section context. :param aggregation_summary: Aggregation summary :type aggregation_summary: qgis.core.QgsVectorlayer :param analysis_layer: Analysis layer :type analysis_layer: qgis.core.QgsVectorLayer :param postprocessor_fields: Postprocessor fields to extract :type postp...
def _check_image(self, image_nD): self.input_image = load_image_from_disk(image_nD) if len(self.input_image.shape) < 3: raise ValueError('Input image must be atleast 3D') if np.count_nonzero(self.input_image) == 0: raise ValueError('Input image is completely filled with z...
Sanity checks on the image data
def setPoint(self, targetTemp): self.targetTemp = targetTemp self.Integrator = 0 self.Derivator = 0
Initilize the setpoint of PID.
def make_retry_state(previous_attempt_number, delay_since_first_attempt, last_result=None): required_parameter_unset = (previous_attempt_number is _unset or delay_since_first_attempt is _unset) if required_parameter_unset: raise _make_unset_exception(...
Construct RetryCallState for given attempt number & delay. Only used in testing and thus is extra careful about timestamp arithmetics.
def metarate(self, func, name='values'): setattr(func, name, self.values) return func
Set the values object to the function object's namespace
def tree2array(tree, branches=None, selection=None, object_selection=None, start=None, stop=None, step=None, include_weight=False, weight_name='weight', cache_size=-1): import ROOT ...
Convert a tree into a numpy structured array. Convert branches of strings and basic types such as bool, int, float, double, etc. as well as variable-length and fixed-length multidimensional arrays and 1D or 2D vectors of basic types and strings. ``tree2array`` can also create columns in the output arra...
def load_report(identifier=None): path = os.path.join( report_dir(), identifier + '.pyireport' ) return ProfilerSession.load(path)
Returns the session referred to by identifier
def getLibraryFiles(self, engineRoot, delimiter=' '): return delimiter.join(self.resolveRoot(self.libs, engineRoot))
Returns the list of library files for this library, joined using the specified delimiter