code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def resources_gc_prefix(options, policy_config, policy_collection): policy_regions = {} for p in policy_collection: if p.execution_mode == 'poll': continue policy_regions.setdefault(p.options.region, []).append(p) regions = get_gc_regions(options.regions) for r in regions: ...
Garbage collect old custodian policies based on prefix. We attempt to introspect to find the event sources for a policy but without the old configuration this is implicit.
def sample_grid(self, count=None, step=None): if (count is not None and step is not None): raise ValueError('only step OR count can be specified!') bounds = np.array([-self.primitive.extents, self.primitive.extents]) * .5 if step is not None...
Return a 3D grid which is contained by the box. Samples are either 'step' distance apart, or there are 'count' samples per box side. Parameters ----------- count : int or (3,) int If specified samples are spaced with np.linspace step : float or (3,) float ...
def setup(self, interval): self.trace_counter = 0 self._halt = False self.interval = interval
Prepares the tests for execution, interval in ms
def format_energy_results(energy): if not energy: return {} result = {} cpuenergy = Decimal(0) for pkg, domains in energy.items(): for domain, value in domains.items(): if domain == DOMAIN_PACKAGE: cpuenergy += value result['cpuenergy-pkg{}'.fo...
Take the result of an energy measurement and return a flat dictionary that contains all values.
def get_user_info(self): resp = self.requester.get( urljoin( self.base_url, '/api/mobile/v0.5/my_user_info' ) ) resp.raise_for_status() return Info(resp.json())
Returns a UserInfo object for the logged in user. Returns: UserInfo: object representing the student current grades
def matches_querytime(instance, querytime): if not querytime.active: return True if not querytime.time: return instance.version_end_date is None return (instance.version_start_date <= querytime.time and (instance.version_end_date is None or ...
Checks whether the given instance satisfies the given QueryTime object. :param instance: an instance of Versionable :param querytime: QueryTime value to check against
def resource(resource_id): resource_obj = app.db.resource(resource_id) if 'raw' in request.args: return send_from_directory(os.path.dirname(resource_obj.path), os.path.basename(resource_obj.path)) return render_template('resource.html', resource=resource_obj)
Show a resource.
def query( self, query, job_config=None, job_id=None, job_id_prefix=None, location=None, project=None, retry=DEFAULT_RETRY, ): job_id = _make_job_id(job_id, job_id_prefix) if project is None: project = self.project i...
Run a SQL query. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query Arguments: query (str): SQL query to be executed. Defaults to the standard SQL dialect. Use the ``job_config`` parameter to change dialects. ...
def check_input_sample(input_sample, num_params, num_samples): assert type(input_sample) == np.ndarray, \ "Input sample is not an numpy array" assert input_sample.shape[0] == (num_params + 1) * num_samples, \ "Input sample does not match number of parameters or groups" as...
Check the `input_sample` is valid Checks input sample is: - the correct size - values between 0 and 1 Arguments --------- input_sample : numpy.ndarray num_params : int num_samples : int
async def do_upload(context, files): status = 0 try: await upload_artifacts(context, files) except ScriptWorkerException as e: status = worst_level(status, e.exit_code) log.error("Hit ScriptWorkerException: {}".format(e)) except aiohttp.ClientError as e: status = worst_le...
Upload artifacts and return status. Returns the integer status of the upload. args: context (scriptworker.context.Context): the scriptworker context. files (list of str): list of files to be uploaded as artifacts Raises: Exception: on unexpected exception. Returns: in...
def edit(args): tap = DbTap.find(args.id) options = {} if not args.name is None: options["db_name"]=args.name if args.host is not None: options["db_host"]=args.host if args.user is not None: options["db_user"]=args.user if args.password...
Carefully setup a dict
def simple_profile(self, sex=None): SEX = ["F", "M"] if sex not in SEX: sex = self.random_element(SEX) if sex == 'F': name = self.generator.name_female() elif sex == 'M': name = self.generator.name_male() return { "username": self.g...
Generates a basic profile with personal informations
def search(self): s = super(TopicSearchMixin, self).search() s = s.filter('bool', should=[ Q('term', tags=tag) for tag in self.topic.tags ]) return s
Override search to match on topic tags
def is_color_supported(): "Find out if your terminal environment supports color." if not hasattr(sys.stdout, 'isatty'): return False if not sys.stdout.isatty() and 'TERMINAL-COLOR' not in os.environ: return False if sys.platform == 'win32': try: import colorama ...
Find out if your terminal environment supports color.
def _call(self, method, url, params, uploads): try: data = self._request(method, url, params, uploads) except Exception, e: self._failed_cb(e) else: self._completed_cb(data)
Initiate resquest to server and handle outcomes.
def _build_query_params(self, headers_only=False, page_size=None): params = {"name": self._project_path, "filter_": self.filter} params["interval"] = types.TimeInterval() params["interval"].end_time.FromDatetime(self._end_time) if self._start_time: params["interval"].start_ti...
Return key-value pairs for the list_time_series API call. :type headers_only: bool :param headers_only: Whether to omit the point data from the :class:`~google.cloud.monitoring_v3.types.TimeSeries` objects. :type page_size: int :param page_size: (O...
def login_server(self): local('ssh -i {0} {1}@{2}'.format( env.key_filename, env.user, env.host_string ))
Login to server
def update_object(self, form, obj): field_name = form.cleaned_data['name'] value = form.cleaned_data['value'] setattr(obj, field_name, value) save_kwargs = {} if CAN_UPDATE_FIELDS: save_kwargs['update_fields'] = [field_name] obj.save(**save_kwargs) dat...
Saves the new value to the target object.
def _find_solo_consonant(self, letters: List[str]) -> List[int]: solos = [] for idx, letter in enumerate(letters): if len(letter) == 1 and self._contains_consonants(letter): solos.append(idx) return solos
Find the positions of any solo consonants that are not yet paired with a vowel.
def settings_view_decorator(function): dec = settings.CLOUD_BROWSER_VIEW_DECORATOR if isinstance(dec, str): mod_str, _, dec_str = dec.rpartition('.') if not (mod_str and dec_str): raise ImportError("Unable to import module: %s" % mod_str) mod = import_module(mod_str) ...
Insert decorator from settings, if any. .. note:: Decorator in ``CLOUD_BROWSER_VIEW_DECORATOR`` can be either a callable or a fully-qualified string path (the latter, which we'll lazy import).
def write_ensemble(ensemble, options): size = len(ensemble) filename = '%s_%s_queries.csv' % (options.outname, size) file = os.path.join(os.getcwd(), filename) f = open(file, 'w') out = ', '.join(ensemble) f.write(out) f.close()
Prints out the ensemble composition at each size
def disassociate_public_ip(self, public_ip_id): floating_ip = self.client.floating_ips.get(public_ip_id) floating_ip = floating_ip.to_dict() instance_id = floating_ip.get('instance_id') address = floating_ip.get('ip') self.client.servers.remove_floating_ip(instance_id, address) ...
Disassociate a external IP
def split_into_batches(input_list, batch_size, batch_storage_dir, checkpoint=False): if checkpoint and not os.path.exists(batch_storage_dir): os.mkdir(batch_storage_dir) batches = [ { 'index': batch_index, 'data': input_list[start_index:start_index + batch_size], ...
Break the input data into smaller batches, optionally saving each one to disk. Args: input_list: An input object that has a list-like interface (indexing and slicing). batch_size: The maximum number of input items in each batch. batch_storage_dir: The directory to save the checkpoints to. ...
def decode_wireformat_uuid(rawguid): if isinstance(rawguid, list): rawguid = bytearray(rawguid) lebytes = struct.unpack_from('<IHH', buffer(rawguid[:8])) bebytes = struct.unpack_from('>HHI', buffer(rawguid[8:])) return '{0:08X}-{1:04X}-{2:04X}-{3:04X}-{4:04X}{5:08X}'.format( lebytes[0], ...
Decode a wire format UUID It handles the rather particular scheme where half is little endian and half is big endian. It returns a string like dmidecode would output.
def as_list(self): return [self.name, self.value, [x.as_list for x in self.children]]
Return all child objects in nested lists of strings.
def add_camera_make_model(self, make, model): self._ef['0th'][piexif.ImageIFD.Make] = make self._ef['0th'][piexif.ImageIFD.Model] = model
Add camera make and model.
def todo(self): if not os.path.exists(self.migrate_dir): self.logger.warn('Migration directory: %s does not exist.', self.migrate_dir) os.makedirs(self.migrate_dir) return sorted(f[:-3] for f in os.listdir(self.migrate_dir) if self.filemask.match(f))
Scan migrations in file system.
def searchAccount(searchTerm, book): print("Search results:\n") found = False for account in book.accounts: if searchTerm.lower() in account.fullname.lower(): print(account.fullname) found = True if not found: print("Search term not found in account names.")
Searches through account names
def handle_sketch_name(msg): if not msg.gateway.is_sensor(msg.node_id): return None msg.gateway.sensors[msg.node_id].sketch_name = msg.payload msg.gateway.alert(msg) return None
Process an internal sketch name message.
def do_GET(self): if self.path.startswith(self.serve_url): from_key = self.path[len(self.serve_url):] val_res = self.decrypt_yubikey_otp(from_key) self.send_response(200) self.send_header('Content-type', 'text/html') self.end_headers() self...
Handle a HTTP GET request.
def _assert_all_loadable_terms_specialized_to(self, domain): for term in self.graph.node: if isinstance(term, LoadableTerm): assert term.domain is domain
Make sure that we've specialized all loadable terms in the graph.
def api_call(self, opts, args=None, body=None, **kwargs): if args: path = opts['name'] % args else: path = opts['name'] path = '/api/v1%s' % path return self._request( opts['method'], path=path, payload=body, **kwargs)
Setup the request
def issuperset(self, other): if len(self) < len(other): return False return all(item in self for item in other)
Report whether this set contains another set. Example: >>> OrderedSet([1, 2]).issuperset([1, 2, 3]) False >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3}) True >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3}) False
def _format_issue_url(self): query = urlencode({ 'title': self._format_issue_title(), 'body': self._format_issue_body(), }) return self.REPO_URL + self.ISSUE_SUFFIX + '?' + query
Format full issue URL.
def convert_units(values, source_measure_or_unit_abbreviation, target_measure_or_unit_abbreviation,**kwargs): if numpy.isscalar(values): values = [values] float_values = [float(value) for value in values] values_to_return = convert(float_values, source_measure_or_unit_abbreviation, target_measure_or...
Convert a value from one unit to another one. Example:: >>> cli = PluginLib.connect() >>> cli.service.convert_units(20.0, 'm', 'km') 0.02 Parameters: values: single measure or an array of measures source_measure_or_unit_abbreviation: A measur...
def recompute(self, quiet=False, **kwargs): if not self.computed: if not (hasattr(self, "_x") and hasattr(self, "_yerr2")): raise RuntimeError("You need to compute the model first") try: self.compute(self._x, np.sqrt(self._yerr2), **kwargs) exc...
Re-compute a previously computed model. You might want to do this if the kernel parameters change and the kernel is labeled as ``dirty``. :param quiet: (optional) If ``True``, return false when the computation fails. Otherwise, throw an error if something goes wrong. (default: `...
def redirect(location=None, internal=False, code=None, headers={}, add_slash=False, request=None): request = request or state.request if add_slash: if location is None: split_url = list(urlparse.urlsplit(request.url)) new_proto = request.environ.get( ...
Perform a redirect, either internal or external. An internal redirect performs the redirect server-side, while the external redirect utilizes an HTTP 302 status code. :param location: The HTTP location to redirect to. :param internal: A boolean indicating whether the redirect should be ...
def notifications(self): params = {"f": "json"} url = "%s/notifications" % self.root return Notifications(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port)
The notifications that are available for the given user. Notifications are events that need the user's attention-application for joining a group administered by the user, acceptance of a group membership application, and so on. A notification is initially marked as new. The user can mark...
def insert_bytes(fobj, size, offset, BUFFER_SIZE=2 ** 16): if size < 0 or offset < 0: raise ValueError fobj.seek(0, 2) filesize = fobj.tell() movesize = filesize - offset if movesize < 0: raise ValueError resize_file(fobj, size, BUFFER_SIZE) if mmap is not None: try: ...
Insert size bytes of empty space starting at offset. fobj must be an open file object, open rb+ or equivalent. Mutagen tries to use mmap to resize the file, but falls back to a significantly slower method if mmap fails. Args: fobj (fileobj) size (int): The amount of space to insert ...
def _setFlags(self): self.atEnd = not self.gametree.variations and (self.index + 1 == len(self.gametree)) self.atStart = not self.stack and (self.index == 0)
Sets up the flags 'self.atEnd' and 'self.atStart'.
def add_tree(self, tree, parent=None): if tree.path in self.path_db: self.remove_tree_by_path(tree.path) for index in tree.indexes: if not getattr(tree, index): continue self._add_to( getattr(self, index + "_db"), getatt...
Add `tree` into database. Args: tree (obj): :class:`.Tree` instance. parent (ref, default None): Reference to parent tree. This is used for all sub-trees in recursive call.
def update(self, num): num = float(num) self.count += 1 self.low = min(self.low, num) self.high = max(self.high, num) delta = num - self.mean self.mean = self.mean + delta / self.count delta2 = num - self.mean self._rolling_variance = self._rolling_varianc...
Update metrics with the new number.
def _get_deps(self, tree, include_punct, representation, universal): if universal: converter = self.universal_converter if self.universal_converter == self.converter: import warnings warnings.warn("This jar doesn't support universal " ...
Get a list of dependencies from a Stanford Tree for a specific Stanford Dependencies representation.
def _on_closed(self): LOGGER.error('Redis connection closed') self.connected = False self._on_close() self._stream = None
Invoked when the connection is closed
def reset(self): self.update_widgets() for column in self._columns: for widget in column: widget.reset() widget.blur() self._live_widget = -1 self._find_next_widget(1)
Reset this Layout and the Widgets it contains.
def on_nick(self, connection, event): old_nickname = self.get_nickname(event) old_color = self.nicknames.pop(old_nickname) new_nickname = event.target() message = "is now known as %s" % new_nickname self.namespace.emit("message", old_nickname, message, old_color) new_colo...
Someone changed their nickname - send the nicknames list to the WebSocket.
def iters(cls, batch_size=32, device=0, root='.data', vectors=None, **kwargs): TEXT = data.Field() LABEL = data.Field(sequential=False) train, val, test = cls.splits(TEXT, LABEL, root=root, **kwargs) TEXT.build_vocab(train, vectors=vectors) LABEL.build_vocab(train) return...
Create iterator objects for splits of the SST dataset. Arguments: batch_size: Batch_size device: Device to create batches on. Use - 1 for CPU and None for the currently active GPU device. root: The root directory that the dataset's zip archive will be ...
def a2bits_list(chars: str, encoding: str = "UTF-8") -> List[str]: return [bin(ord(x))[2:].rjust(ENCODINGS[encoding], "0") for x in chars]
Convert a string to its bits representation as a list of 0's and 1's. >>> a2bits_list("Hello World!") ['01001000', '01100101', '01101100', '01101100', '01101111', '00100000', '01010111', '01101111', '01110010', '01101100', '01100100', '00100001'] >>> "".join(a2b...
def tar_extract(cls, tar_comp_file_path): try: with contextlib.closing(tarfile.open(tar_comp_file_path)) as tar: tar.extractall() except tarfile.ReadError as e: message_format = ( 'Extract failed: ' 'tar_comp_file_path: {0}, reason:...
Extract tar.gz or tar bz2 file. It behaves like - tar xzf tar_gz_file_path - tar xjf tar_bz2_file_path It raises tarfile.ReadError if the file is broken.
def name(self, name): success = idaapi.set_enum_member_name(self.cid, name) if not success: raise exceptions.CantRenameEnumMember( "Failed renaming {!r} to {!r}. Does the name exist somewhere else?".format(self.name, name))
Set the member name. Note that a member name cannot appear in other enums, or generally anywhere else in the IDB.
def profile_cancel(self, query_id, timeout=10): result = Result(*self.perform_request(**{ 'method': 'GET', 'url': '/profiles/cancel/{0}'.format(query_id), 'params': { 'request_timeout': timeout } })) return result
Cancel the query that has the given queryid. :param query_id: The UUID of the query in standard UUID format that Drill assigns to each query. :param timeout: int :return: pydrill.client.Result
def statuses(self): r_json = self._get_json('status') statuses = [Status(self._options, self._session, raw_stat_json) for raw_stat_json in r_json] return statuses
Get a list of status Resources from the server. :rtype: List[Status]
def _factory(slice_, axis, weighted): if slice_.dim_types[0] == DT.MR_SUBVAR: return _MrXCatPairwiseSignificance(slice_, axis, weighted) return _CatXCatPairwiseSignificance(slice_, axis, weighted)
return subclass for PairwiseSignificance, based on slice dimension types.
def load_from_file(cls, filename_prefix): filename = cls._filename(filename_prefix) lines, _ = cls._read_lines_from_file(filename) vocab_list = [line[1:-1] for line in lines] return cls(vocab_list=vocab_list)
Extracts list of subwords from file.
def extract_bzip2 (archive, compression, cmd, verbosity, interactive, outdir): targetname = util.get_single_outfile(outdir, archive) try: with bz2.BZ2File(archive) as bz2file: with open(targetname, 'wb') as targetfile: data = bz2file.read(READ_SIZE_BYTES) whil...
Extract a BZIP2 archive with the bz2 Python module.
def info(self, name, description, labelnames=None, labelvalues=None, **labels): if labels and labelnames: raise ValueError( 'Cannot have labels defined as `dict` ' 'and collections of names and values' ) if labelnames is None and labels: ...
Report any information as a Prometheus metric. This will create a `Gauge` with the initial value of 1. The easiest way to use it is: metrics = PrometheusMetrics(app) metrics.info( 'app_info', 'Application info', version='1.0', major=1, minor=0 ...
def create_system(self, **system_options): if self.master is None: raise ValueError('Handler {} is not able to create systems.'.format(self)) if isinstance(self.master, ForceField): system = self.master.createSystem(self.topology, **system_options) elif isinstance(self.ma...
Create an OpenMM system for every supported topology file with given system options
def delete_events( self, project_name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): if "delete_events" not in self._inner_api_calls: self._inner_api_calls[ "delete_events"...
Deletes all error events of a given project. Example: >>> from google.cloud import errorreporting_v1beta1 >>> >>> client = errorreporting_v1beta1.ErrorStatsServiceClient() >>> >>> project_name = client.project_path('[PROJECT]') >>> ...
def restrict_to_dtype(dtype, message_template): def processor(term_method, _, term_instance): term_dtype = term_instance.dtype if term_dtype != dtype: raise TypeError( message_template.format( method_name=term_method.__name__, expec...
A factory for decorators that restrict Term methods to only be callable on Terms with a specific dtype. This is conceptually similar to zipline.utils.input_validation.expect_dtypes, but provides more flexibility for providing error messages that are specifically targeting Term methods. Parameters ...
def getRequest(self): ars = self.getLinkedRequests() if len(ars) > 1: ar_ids = ", ".join(map(api.get_id, ars)) logger.info("Attachment assigned to more than one AR: [{}]. " "The first AR will be returned".format(ar_ids)) if len(ars) >= 1: ...
Return the primary AR this attachment is linked
def get_by_id(self, id_networkv6): uri = 'api/networkv4/%s/' % id_networkv6 return super(ApiNetworkIPv6, self).get(uri)
Get IPv6 network :param id_networkv4: ID for NetworkIPv6 :return: IPv6 Network
def keep_tc_pos(func): @functools.wraps(func) def wrapper(editor, *args, **kwds): sb = editor.verticalScrollBar() spos = sb.sliderPosition() pos = editor.textCursor().position() retval = func(editor, *args, **kwds) text_cursor = editor.textCursor() text_cursor.set...
Cache text cursor position and restore it when the wrapped function exits. This decorator can only be used on modes or panels. :param func: wrapped function
def optimize(exp_rets, covs): _cov_inv = np.linalg.inv(covs) _u = np.ones((len(exp_rets))) _u_cov_inv = _u.dot(_cov_inv) _rets_cov_inv = exp_rets.dot(_cov_inv) _m = np.empty((2, 2)) _m[0, 0] = _rets_cov_inv.dot(exp_rets) _m[0, 1] = _u_cov_inv.dot(exp_rets) _m[1, 0] = _rets_cov_in...
Return parameters for portfolio optimization. Parameters ---------- exp_rets : ndarray Vector of expected returns for each investment.. covs : ndarray Covariance matrix for the given investments. Returns --------- a : ndarray The first vector (to be combined with ta...
def _serialize(self, uri, node): meta = self._decode_meta(node['meta'], is_published=bool(node['is_published'])) return { 'uri': uri.clone(ext=node['plugin'], version=node['version']), 'content': node['content'], 'meta': meta }
Serialize node result as dict
def cache_key(self, request, method=None): if method is None: method = request.method return "bettercache_page:%s:%s" %(request.build_absolute_uri(), method)
the cache key is the absolute uri and the request method
def image_server_response(self, api_version=None): headers = dict(self.headers) if (api_version < '1.1'): headers['Content-Type'] = 'text/xml' response = self.as_xml() else: headers['Content-Type'] = 'text/plain' response = self.as_txt() re...
Response, code and headers for image server error response. api_version selects the format (XML of 1.0). The return value is a tuple of response - body of HTTP response status - the HTTP status code headers - a dict of HTTP headers which will include the Content-Type ...
def get_plugins(modules, classobj): for module in modules: for plugin in get_module_plugins(module, classobj): yield plugin
Find all class objects in all modules. @param modules: the modules to search @ptype modules: iterator of modules @return: found classes @rytpe: iterator of class objects
def _get_partial(name, partials_dict, partials_path, partials_ext): try: return partials_dict[name] except KeyError: try: path_ext = ('.' + partials_ext if partials_ext else '') path = partials_path + '/' + name + path_ext with io.open(path, 'r', encoding='utf...
Load a partial
def start(vm_name, call=None): if call != 'action': raise SaltCloudSystemExit( 'The start action must be called with -a or --action.' ) conn = get_conn() __utils__['cloud.fire_event']( 'event', 'start instance', 'salt/cloud/{0}/starting'.format(vm_name), ...
Call GCE 'start on the instance. .. versionadded:: 2017.7.0 CLI Example: .. code-block:: bash salt-cloud -a start myinstance
def to_arrow_schema(schema): import pyarrow as pa fields = [pa.field(field.name, to_arrow_type(field.dataType), nullable=field.nullable) for field in schema] return pa.schema(fields)
Convert a schema from Spark to Arrow
def detect_django_settings(): matches = [] for root, dirnames, filenames in os.walk(os.getcwd()): for filename in fnmatch.filter(filenames, '*settings.py'): full = os.path.join(root, filename) if 'site-packages' in full: continue full = os.path.join(ro...
Automatically try to discover Django settings files, return them as relative module paths.
def passive_aggressive_train(self): self._clf = PassiveAggressiveClassifier(n_iter=50, C=0.2, n_jobs=-1, random_state=0) self._clf.fit(self._term_doc_matrix._X, self._term_doc_matrix._y) y_dist = self._clf.decision_function(self._term_doc_matrix._X) pos_ecdf = ECDF(y_dist[y_dist >= 0]) neg_ecdf = ECDF(y_dist[...
Trains passive aggressive classifier
def compile_less(input_file, output_file): from .modules import less if not isinstance(input_file, str): raise RuntimeError('LESS compiler takes only a single input file.') return { 'dependencies_fn': less.less_dependencies, 'compiler_fn': less.less_compile, 'input': input_fi...
Compile a LESS source file. Minifies the output in release mode.
def CMN(self, params): Ra, Rb = self.get_two_parameters(self.TWO_PARAMETER_COMMA_SEPARATED, params) self.check_arguments(low_registers=(Ra, Rb)) def CMN_func(): self.set_NZCV_flags(self.register[Ra], self.register[Rb], self.register[Ra] + self.register...
CMN Ra, Rb Add the two registers and set the NZCV flags The result is discarded Ra and Rb must be low registers
def create_book(self, name): name = name.strip() if not len(name): self.error("Cannot have a blank book name") if name.find(",") >= 0: self.error("Cannot have a ',' in a book name") existing = self.list_books() nexisting = len(existing) if name in ...
Create a new book
def Decorate(cls, class_name, member, parent_member): if isinstance(member, property): fget = cls.DecorateMethod(class_name, member.fget, parent_member) fset = None if member.fset: fset = cls.DecorateMethod(class_name, member.fset, parent_member) fdel = None if member.fdel: ...
Decorates a member with @typecheck. Inherit checks from parent member.
def get_score_system_id(self): if not bool(self._my_map['scoreSystemId']): raise errors.IllegalState('this AssessmentOffered has no score_system') else: return Id(self._my_map['scoreSystemId'])
Gets the grade system ``Id`` for the score. return: (osid.id.Id) - the grade system ``Id`` raise: IllegalState - ``is_scored()`` is ``false`` *compliance: mandatory -- This method must be implemented.*
def read_only_s3_bucket_policy_statements(buckets, folder="*"): list_buckets = [s3_arn(b) for b in buckets] object_buckets = [s3_objects_arn(b, folder) for b in buckets] bucket_resources = list_buckets + object_buckets return [ Statement( Effect=Allow, Resource=[s3_arn("*...
Read only policy an s3 bucket.
def run_callbacks(obj, log=None): def run_callback(callback, args): return callback(*args) return walk_callbacks(obj, run_callback, log)
Run callbacks.
def bulk(self, actions, stats_only=False, **kwargs): success, failed = es_helpers.bulk(self.client, actions, stats_only, **kwargs) logger.info('Bulk is done success %s failed %s actions: \n %s' % (success, failed, actions))
Executes bulk api by elasticsearch.helpers.bulk. :param actions: iterator containing the actions :param stats_only:if `True` only report number of successful/failed operations instead of just number of successful and a list of error responses Any additional keyword arguments will be pas...
def iter_following(username, number=-1, etag=None): return gh.iter_following(username, number, etag) if username else []
List the people ``username`` follows. :param str username: (required), login of the user :param int number: (optional), number of users being followed by username to return. Default: -1, return all of them :param str etag: (optional), ETag from a previous request to the same endpoint :r...
def get_records(self, ids): return self.query(Ids(values=[str(id_) for id_ in ids]))
Return records by their identifiers. :param ids: A list of record identifier. :returns: A list of records.
def convert_to_cgs(self, equivalence=None, **kwargs): self.convert_to_units( self.units.get_cgs_equivalent(), equivalence=equivalence, **kwargs )
Convert the array and in-place to the equivalent cgs units. Optionally, an equivalence can be specified to convert to an equivalent quantity which is not in the same dimensions. Parameters ---------- equivalence : string, optional The equivalence you wish to use. To...
def slaveof(master_host=None, master_port=None, host=None, port=None, db=None, password=None): if master_host and not master_port: master_port = 6379 server = _connect(host, port, db, password) return server.slaveof(master_host, master_port)
Make the server a slave of another instance, or promote it as master CLI Example: .. code-block:: bash # Become slave of redis-n01.example.com:6379 salt '*' redis.slaveof redis-n01.example.com 6379 salt '*' redis.slaveof redis-n01.example.com # Become master salt '*' r...
def subdomains_init(blockstack_opts, working_dir, atlas_state): if not is_subdomains_enabled(blockstack_opts): return None subdomain_state = SubdomainIndex(blockstack_opts['subdomaindb_path'], blockstack_opts=blockstack_opts) atlas_node_add_callback(atlas_state, 'store_zonefile', subdomain_state.enq...
Set up subdomain state Returns a SubdomainIndex object that has been successfully connected to Atlas
def createHorizonPolygons(self): vertsTop = [[-1,0],[-1,1],[1,1],[1,0],[-1,0]] self.topPolygon = Polygon(vertsTop,facecolor='dodgerblue',edgecolor='none') self.axes.add_patch(self.topPolygon) vertsBot = [[-1,0],[-1,-1],[1,-1],[1,0],[-1,0]] self.botPolygon = Polygon(vertsBot,facec...
Creates the two polygons to show the sky and ground.
def prox_yline(y, step): if not np.isscalar(y): y= y[0] if y > -0.75: return np.array([-0.75]) else: return np.array([y])
Projection onto line in y
def get_user_permissions(self, user_id): permissions = self.user_service.getPermissions(id=user_id) return sorted(permissions, key=itemgetter('keyName'))
Returns a sorted list of a users permissions
def execute(self): if self.direct: if self.file_type == 'pdf': raise IOError(u"Direct output mode is not available for PDF " "export") else: print(self.render().encode(self.encoding)) else: self.write_and_...
Execute this generator regarding its current configuration.
def is_cidr_in_cidr(small_cidr, big_cidr): if small_cidr == "0.0.0.0/0": return big_cidr == "0.0.0.0/0" else: if big_cidr == "0.0.0.0/0": return False s = ipaddress.IPv4Network(unicode(small_cidr)) b = ipaddress.IPv4Network(unicode(big_cidr)) return s.subnet_of(b)
Return True if the small CIDR is contained in the big CIDR.
def fit_lsq(self, df): tdf = df.set_index('div') return tdf.ix['1,1']['n_spp'], tdf.ix['1,1']['n_individs']
Parameterize generic SAR curve from empirical data set Parameters ---------- df : DataFrame Result data frame from empirical SAR analysis Notes ----- Simply returns S0 and N0 from empirical SAR output, which are two fixed parameters of METE SAR and E...
def get_peer_id(peer, add_mark=True): if isinstance(peer, int): return peer if add_mark else resolve_id(peer)[0] if isinstance(peer, types.InputPeerSelf): _raise_cast_fail(peer, 'int (you might want to use client.get_peer_id)') try: peer = get_peer(peer) except TypeError: ...
Finds the ID of the given peer, and converts it to the "bot api" format so it the peer can be identified back. User ID is left unmodified, chat ID is negated, and channel ID is prefixed with -100. The original ID and the peer type class can be returned with a call to :meth:`resolve_id(marked_id)`.
def _eval_model(self): arguments = self._x_grid.copy() arguments.update({param: param.value for param in self.model.params}) return self.model(**key2str(arguments))
Convenience method for evaluating the model with the current parameters :return: named tuple with results
def clean_time(self, time): if isinstance(time, int): time = datetime.utcfromtimestamp(time) elif isinstance(time, str): time = parser.parse(time) return time
Transform time field to datetime object if there is any.
def _ToString(x): if x is None: return 'null' if isinstance(x, six.string_types): return x return pprint.pformat(x)
The default default formatter!.
def gather_commands(self, ingredient): for command_name, command in ingredient.commands.items(): yield join_paths(ingredient.path, command_name), command
Collect all commands from this ingredient and its sub-ingredients. Yields ------ cmd_name: str The full (dotted) name of the command. cmd: function The corresponding captured function.
def run_snr(self): if self.ecc: required_kwargs = {'dist_type': self.dist_type, 'initial_cond_type': self.initial_cond_type, 'ecc': True} input_args = [self.m1, self.m2, self.z_or_dist, self.initial_point, ...
Run the snr calculation. Takes results from ``self.set_parameters`` and other inputs and inputs these into the snr calculator.
def rebalance_brokers(self): for rg in six.itervalues(self.cluster_topology.rgs): rg.rebalance_brokers()
Rebalance partition-count across brokers within each replication-group.
def log_setup(debug_bool): level = logging.DEBUG if debug_bool else logging.INFO logging.config.dictConfig( { "version": 1, "disable_existing_loggers": False, "formatters": { "verbose": { "format": "%(asctime)s %(levelname)-8s %(nam...
Set up logging. We output only to stdout. Instead of also writing to a log file, redirect stdout to a log file when the script is executed from cron.
def login(self, username, password, limit=10, sync=True, device_id=None): response = self.api.login( "m.login.password", user=username, password=password, device_id=device_id ) self.user_id = response["user_id"] self.token = response["access_token"] self.hs = response...
Login to the homeserver. Args: username (str): Account username password (str): Account password limit (int): Deprecated. How many messages to return when syncing. This will be replaced by a filter API in a later release. sync (bool): Optional. Wh...