code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _register_dependencies(self): for tree_name, context_entry in context.timetable_context.items(): tree = self.trees[tree_name] assert isinstance(tree, MultiLevelTree) for dependent_on in context_entry.dependent_on: dependent_on_tree = self.trees[dependent_on] assert isinstance(dependent_on_tree, MultiLevelTree) tree.register_dependent_on(dependent_on_tree)
register dependencies between trees
def raise_for_api_error(headers: MutableMapping, data: MutableMapping) -> None: if not data["ok"]: raise exceptions.SlackAPIError(data.get("error", "unknow_error"), headers, data) if "warning" in data: LOG.warning("Slack API WARNING: %s", data["warning"])
Check request response for Slack API error Args: headers: Response headers data: Response data Raises: :class:`slack.exceptions.SlackAPIError`
def _get_memmap(self): with open(self.filename) as fp: data_dtype = self._get_data_dtype() hdr_size = native_header.itemsize return np.memmap(fp, dtype=data_dtype, shape=(self.mda['number_of_lines'],), offset=hdr_size, mode="r")
Get the memory map for the SEVIRI data
def hist2d(self, da, **kwargs): if self.value is None or self.value == 'counts': normed = False else: normed = True y = da.values x = da.coords[da.dims[0]].values counts, xedges, yedges = np.histogram2d( x, y, normed=normed, **kwargs) if self.value == 'counts': counts = counts / counts.sum().astype(float) return counts, xedges, yedges
Make the two dimensional histogram Parameters ---------- da: xarray.DataArray The data source
def trigger_replication_schedule(self, schedule_id, dry_run=False): return self._post("replications/%s/run" % schedule_id, ApiCommand, params=dict(dryRun=dry_run), api_version=3)
Trigger replication immediately. Start and end dates on the schedule will be ignored. @param schedule_id: The id of the schedule to trigger. @param dry_run: Whether to execute a dry run. @return: The command corresponding to the replication job. @since: API v3
def _return_tag_task(self, task): if self.security is None: raise Exception('Tags require security') tasks = [task] transform_url = get_transform_url( tasks, handle=self.handle, security=self.security, apikey=self.apikey ) response = make_call( CDN_URL, 'get', handle=self.handle, security=self.security, transform_url=transform_url ) return response.json()
Runs both SFW and Tags tasks
def git_handler(unused_build_context, target, fetch, package_dir, tar): target_name = split_name(target.name) repo_dir = join(package_dir, fetch.name) if fetch.name else package_dir try: repo = git.Repo(repo_dir) except (InvalidGitRepositoryError, NoSuchPathError): repo = git.Repo.clone_from(fetch.uri, repo_dir) assert repo.working_tree_dir == repo_dir tar.add(package_dir, arcname=target_name, filter=gitfilter)
Handle remote Git repository URI. Clone the repository under the private builder workspace (unless already cloned), and add it to the package tar (filtering out git internals). TODO(itamar): Support branches / tags / specific commit hashes TODO(itamar): Support updating a cloned repository TODO(itamar): Handle submodules? TODO(itamar): Handle force pulls?
def update(self, *args, **kwargs): self.augment_args(args, kwargs) kwargs['log_action'] = kwargs.get('log_action', 'update') if not self.rec: return self.add(**kwargs) else: for k, v in kwargs.items(): if k not in ('source', 's_vid', 'table', 't_vid', 'partition', 'p_vid'): setattr(self.rec, k, v) self._session.merge(self.rec) if self._logger: self._logger.info(self.rec.log_str) self._session.commit() self._ai_rec_id = None return self.rec.id
Update the last section record
def newPanelTab(self): view = self._currentPanel.currentView() if view: new_view = view.duplicate(self._currentPanel) self._currentPanel.addTab(new_view, new_view.windowTitle())
Creates a new panel with a copy of the current widget.
def all(self, fields=None, include_fields=True, page=None, per_page=None, extra_params=None): params = extra_params or {} params['fields'] = fields and ','.join(fields) or None params['include_fields'] = str(include_fields).lower() params['page'] = page params['per_page'] = per_page return self.client.get(self._url(), params=params)
Retrieves a list of all the applications. Important: The client_secret and encryption_key attributes can only be retrieved with the read:client_keys scope. Args: fields (list of str, optional): A list of fields to include or exclude from the result (depending on include_fields). Empty to retrieve all fields. include_fields (bool, optional): True if the fields specified are to be included in the result, False otherwise. page (int): The result's page number (zero based). per_page (int, optional): The amount of entries per page. extra_params (dictionary, optional): The extra parameters to add to the request. The fields, include_fields, page and per_page values specified as parameters take precedence over the ones defined here. See: https://auth0.com/docs/api/management/v2#!/Clients/get_clients
async def get_real_ext_ip(self): while self._ip_hosts: try: timeout = aiohttp.ClientTimeout(total=self._timeout) async with aiohttp.ClientSession( timeout=timeout, loop=self._loop ) as session, session.get(self._pop_random_ip_host()) as resp: ip = await resp.text() except asyncio.TimeoutError: pass else: ip = ip.strip() if self.host_is_ip(ip): log.debug('Real external IP: %s', ip) break else: raise RuntimeError('Could not get the external IP') return ip
Return real external IP address.
def _generate_standard_transitions(cls): allowed_transitions = cls.context.get_config('transitions', {}) for key, transitions in allowed_transitions.items(): key = cls.context.new_meta['translator'].translate(key) new_transitions = set() for trans in transitions: if not isinstance(trans, Enum): trans = cls.context.new_meta['translator'].translate(trans) new_transitions.add(trans) cls.context.new_transitions[key] = new_transitions for state in cls.context.states_enum: if state not in cls.context.new_transitions: cls.context.new_transitions[state] = set()
Generate methods used for transitions.
def create_calcs(self): specs = self._combine_core_aux_specs() for spec in specs: spec['dtype_out_time'] = _prune_invalid_time_reductions(spec) return [Calc(**sp) for sp in specs]
Generate a Calc object for each requested parameter combination.
def make_similar_sized_bins(x, n): y = np.array(x).flatten() y.sort() bins = [y[0]] step = len(y) // n for i in range(step, len(y), step): v = y[i] if v > bins[-1]: bins.append(v) bins[-1] = y[-1] return np.array(bins)
Utility function to create a set of bins over the range of values in `x` such that each bin contains roughly the same number of values. Parameters ---------- x : array_like The values to be binned. n : int The number of bins to create. Returns ------- bins : ndarray An array of bin edges. Notes ----- The actual number of bins returned may be less than `n` if `x` contains integer values and any single value is represented more than len(x)//n times.
def generate_table(self, rows): table = PrettyTable(**self.kwargs) for row in self.rows: if len(row[0]) < self.max_row_width: appends = self.max_row_width - len(row[0]) for i in range(1, appends): row[0].append("-") if row[1] is True: self.make_fields_unique(row[0]) table.field_names = row[0] else: table.add_row(row[0]) return table
Generates from a list of rows a PrettyTable object.
def getidfobjectlist(idf): idfobjects = idf.idfobjects idfobjlst = [idfobjects[key] for key in idf.model.dtls if idfobjects[key]] idfobjlst = itertools.chain.from_iterable(idfobjlst) idfobjlst = list(idfobjlst) return idfobjlst
return a list of all idfobjects in idf
def get_urls(self): from django.conf.urls import patterns, url from views import DashboardWelcomeView urls = super(AdminMixin, self).get_urls() del urls[0] custom_url = patterns( '', url(r'^$', self.admin_view(DashboardWelcomeView.as_view()), name="index") ) return custom_url + urls
Add our dashboard view to the admin urlconf. Deleted the default index.
def create_done_path(done_path, uid=-1, gid=-1): with open(done_path, 'wb'): pass os.chown(done_path, uid, gid);
create a done file to avoid re-doing the mon deployment
def _ConvertAnnotations(self, annotations): flags = 0 if annotations: for annotation in annotations: flags |= self._mapFlags.get(annotation.name, 0) return flags
Convert annotations to pyVmomi flags
def jump_server(self, msg="Changing servers"): if self.connection.is_connected(): self.connection.disconnect(msg) next(self.servers) self._connect()
Connect to a new server, possibly disconnecting from the current. The bot will skip to next server in the server_list each time jump_server is called.
def reset_generation(self): with self._lock: self._generation = Generation.NO_GENERATION self.rejoin_needed = True self.state = MemberState.UNJOINED
Reset the generation and memberId because we have fallen out of the group.
def authenticate(self, auth_token, auth_info, service_name): try: jwt_claims = self.get_jwt_claims(auth_token) except Exception as error: raise suppliers.UnauthenticatedException(u"Cannot decode the auth token", error) _check_jwt_claims(jwt_claims) user_info = UserInfo(jwt_claims) issuer = user_info.issuer if issuer not in self._issuers_to_provider_ids: raise suppliers.UnauthenticatedException(u"Unknown issuer: " + issuer) provider_id = self._issuers_to_provider_ids[issuer] if not auth_info.is_provider_allowed(provider_id): raise suppliers.UnauthenticatedException(u"The requested method does not " u"allow provider id: " + provider_id) audiences = user_info.audiences has_service_name = service_name in audiences allowed_audiences = auth_info.get_allowed_audiences(provider_id) intersected_audiences = set(allowed_audiences).intersection(audiences) if not has_service_name and not intersected_audiences: raise suppliers.UnauthenticatedException(u"Audiences not allowed") return user_info
Authenticates the current auth token. Args: auth_token: the auth token. auth_info: the auth configurations of the API method being called. service_name: the name of this service. Returns: A constructed UserInfo object representing the identity of the caller. Raises: UnauthenticatedException: When * the issuer is not allowed; * the audiences are not allowed; * the auth token has already expired.
def _recurse_replace(obj, key, new_key, sub, remove): if isinstance(obj, list): return [_recurse_replace(x, key, new_key, sub, remove) for x in obj] if isinstance(obj, dict): for k, v in list(obj.items()): if k == key and v in sub: obj[new_key] = sub[v] if remove: del obj[key] else: obj[k] = _recurse_replace(v, key, new_key, sub, remove) return obj
Recursive helper for `replace_by_key`
def init0(self, dae): if not self.system.pflow.config.flatstart: dae.y[self.a] = self.angle + 1e-10 * uniform(self.n) dae.y[self.v] = self.voltage else: dae.y[self.a] = matrix(0.0, (self.n, 1), 'd') + 1e-10 * uniform(self.n) dae.y[self.v] = matrix(1.0, (self.n, 1), 'd')
Set bus Va and Vm initial values
def direct_messages(self, delegate, params={}, extra_args=None): return self.__get('/direct_messages.xml', delegate, params, txml.Direct, extra_args=extra_args)
Get direct messages for the authenticating user. Search results are returned one message at a time a DirectMessage objects
def median(self): mu = self.mean() ret_val = math.exp(mu) if math.isnan(ret_val): ret_val = float("inf") return ret_val
Computes the median of a log-normal distribution built with the stats data.
def attention_mask_ignore_padding(inputs, dtype=tf.float32): inputs = rename_length_to_memory_length(inputs) return mtf.cast(mtf.equal(inputs, 0), dtype) * -1e9
Bias for encoder-decoder attention. Args: inputs: a mtf.Tensor with shape [..., length_dim] dtype: a tf.dtype Returns: a mtf.Tensor with shape [..., memory_length_dim]
def fMeasure(self, label, beta=None): if beta is None: return self.call("fMeasure", label) else: return self.call("fMeasure", label, beta)
Returns f-measure.
def reboot_adb_server(): _reboot_count = 0 _max_retry = 1 def _reboot(): nonlocal _reboot_count if _reboot_count >= _max_retry: raise RuntimeError('fail after retry {} times'.format(_max_retry)) _reboot_count += 1 return_code = subprocess.call(['adb', 'devices'], stdout=subprocess.DEVNULL) if bool(return_code): warnings.warn('return not zero, execute "adb version" failed') raise EnvironmentError('adb did not work :(') return _reboot
execute 'adb devices' to start adb server
def setup_logging(logfile, print_log_location=True, debug=False): log_dir = os.path.dirname(logfile) make_dir(log_dir) fmt = '[%(levelname)s] %(name)s %(asctime)s %(message)s' if debug: logging.basicConfig(filename=logfile, filemode='w', format=fmt, level=logging.DEBUG) else: logging.basicConfig(filename=logfile, filemode='w', format=fmt, level=logging.INFO) logger = logging.getLogger('log') logger = add_stream_handler(logger) if print_log_location: logger.info('LOG LOCATION: {}'.format(logfile))
Set up logging using the built-in ``logging`` package. A stream handler is added to all logs, so that logs at or above ``logging.INFO`` level are printed to screen as well as written to the log file. Arguments: logfile (str): Path to the log file. If the parent directory does not exist, it will be created. Required. print_log_location (bool): If ``True``, the log path will be written to the log upon initialization. Default is ``True``. debug (bool): If true, the log level will be set to ``logging.DEBUG``. If ``False``, the log level will be set to ``logging.INFO``. Default is ``False``.
async def append(self, reply: Reply) \ -> None: result = reply.result identifier = result.get(f.IDENTIFIER.nm) txnId = result.get(TXN_ID) logger.debug("Reply being sent {}".format(reply)) if self._isNewTxn(identifier, reply, txnId): self.addToProcessedTxns(identifier, txnId, reply) if identifier not in self.responses: self.responses[identifier] = asyncio.Queue() await self.responses[identifier].put(reply)
Add the given Reply to this transaction store's list of responses. Also add to processedRequests if not added previously.
def write_tree(self): mdb = MemoryDB() entries = self._entries_sorted() binsha, tree_items = write_tree_from_cache(entries, mdb, slice(0, len(entries))) mdb.stream_copy(mdb.sha_iter(), self.repo.odb) root_tree = Tree(self.repo, binsha, path='') root_tree._cache = tree_items return root_tree
Writes this index to a corresponding Tree object into the repository's object database and return it. :return: Tree object representing this index :note: The tree will be written even if one or more objects the tree refers to does not yet exist in the object database. This could happen if you added Entries to the index directly. :raise ValueError: if there are no entries in the cache :raise UnmergedEntriesError:
def sky2ang(sky): try: theta_phi = sky.copy() except AttributeError as _: theta_phi = np.array(sky) theta_phi[:, [1, 0]] = theta_phi[:, [0, 1]] theta_phi[:, 0] = np.pi/2 - theta_phi[:, 0] return theta_phi
Convert ra,dec coordinates to theta,phi coordinates ra -> phi dec -> theta Parameters ---------- sky : numpy.array Array of (ra,dec) coordinates. See :func:`AegeanTools.regions.Region.radec2sky` Returns ------- theta_phi : numpy.array Array of (theta,phi) coordinates.
def list_function_versions(FunctionName, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) vers = [] for ret in __utils__['boto3.paged_call'](conn.list_versions_by_function, FunctionName=FunctionName): vers.extend(ret['Versions']) if not bool(vers): log.warning('No versions found') return {'Versions': vers} except ClientError as e: return {'error': __utils__['boto3.get_error'](e)}
List the versions available for the given function. Returns list of function versions CLI Example: .. code-block:: yaml versions: - {...} - {...}
def get_otp(hsm, args): if args.no_otp: return None if hsm.version.have_unlock(): if args.stdin: otp = sys.stdin.readline() while otp and otp[-1] == '\n': otp = otp[:-1] else: otp = raw_input('Enter admin YubiKey OTP (press enter to skip) : ') if len(otp) == 44: return otp if otp: sys.stderr.write("ERROR: Invalid YubiKey OTP\n") return None
Get OTP from YubiKey.
def sbosrcarsh(self, prgnam, sbo_link, src_link): sources = [] name = "-".join(prgnam.split("-")[:-1]) category = "{0}/{1}/".format(sbo_link.split("/")[-2], name) for link in src_link: source = link.split("/")[-1] sources.append("{0}{1}{2}".format(self.meta.sbosrcarch_link, category, source)) return sources
Alternative repository for sbo sources
def __collect_fields(self): form = FormData() form.add_field(self.__username_field, required=True, error=self.__username_error) form.add_field(self.__password_field, required=True, error=self.__password_error) form.parse() self.username = form.values[self.__username_field] self.password = form.values[self.__password_field] return
Use field values from config.json and collect from request
def get_below_threshold(umi_quals, quality_encoding, quality_filter_threshold): umi_quals = [x - RANGES[quality_encoding][0] for x in map(ord, umi_quals)] below_threshold = [x < quality_filter_threshold for x in umi_quals] return below_threshold
test whether the umi_quals are below the threshold
def result_to_dict(raw_result): result = {} for channel_index, channel in enumerate(raw_result): channel_id, channel_name = channel[0], channel[1] channel_result = { 'id': channel_id, 'name': channel_name, 'movies': [] } for movie in channel[2]: channel_result['movies'].append({ 'title': movie[1], 'start_time': datetime.fromtimestamp(movie[2]), 'end_time': datetime.fromtimestamp(movie[2] + movie[3]), 'inf': True if movie[3] else False, }) result[channel_id] = channel_result return result
Parse raw result from fetcher into readable dictionary Args: raw_result (dict) - raw data from `fetcher` Returns: dict - readable dictionary
def create_h5py_with_large_cache(filename, cache_size_mb): propfaid = h5py.h5p.create(h5py.h5p.FILE_ACCESS) settings = list(propfaid.get_cache()) settings[2] = 1024 * 1024 * cache_size_mb propfaid.set_cache(*settings) fid = h5py.h5f.create(filename, flags=h5py.h5f.ACC_EXCL, fapl=propfaid) fin = h5py.File(fid) return fin
Allows to open the hdf5 file with specified cache size
def save_trajs(trajs, fn, meta, key_to_path=None): if key_to_path is None: key_to_path = default_key_to_path validate_keys(meta.index, key_to_path) backup(fn) os.mkdir(fn) for k in meta.index: v = trajs[k] npy_fn = os.path.join(fn, key_to_path(k)) os.makedirs(os.path.dirname(npy_fn), exist_ok=True) np.save(npy_fn, v)
Save trajectory-like data Data is stored in individual numpy binary files in the directory given by ``fn``. This method will automatically back up existing files named ``fn``. Parameters ---------- trajs : dict of (key, np.ndarray) Dictionary of trajectory-like ndarray's keyed on ``meta.index`` values. fn : str Where to save the data. This will be a directory containing one file per trajectory meta : pd.DataFrame The DataFrame of metadata
def xgroup_create(self, stream, group_name, latest_id='$', mkstream=False): args = [b'CREATE', stream, group_name, latest_id] if mkstream: args.append(b'MKSTREAM') fut = self.execute(b'XGROUP', *args) return wait_ok(fut)
Create a consumer group
def print_exception(*args, file=None, **kwargs): for line in format_exception(*args, **kwargs): vtml.vtmlprint(line, file=file)
Print the formatted output of an exception object.
def load_config(name, base='conf'): fname = pjoin(base, name + '.json') if not os.path.exists(fname): return {} try: with open(fname) as f: cfg = json.load(f) except Exception as e: warn("Couldn't load %s: %s" % (fname, e)) cfg = {} return cfg
Load config dict from JSON
def rename_datastore(datastore_name, new_datastore_name, service_instance=None): log.trace('Renaming datastore %s to %s', datastore_name, new_datastore_name) target = _get_proxy_target(service_instance) datastores = salt.utils.vmware.get_datastores( service_instance, target, datastore_names=[datastore_name]) if not datastores: raise VMwareObjectRetrievalError('Datastore \'{0}\' was not found' ''.format(datastore_name)) ds = datastores[0] salt.utils.vmware.rename_datastore(ds, new_datastore_name) return True
Renames a datastore. The datastore needs to be visible to the proxy. datastore_name Current datastore name. new_datastore_name New datastore name. service_instance Service instance (vim.ServiceInstance) of the vCenter/ESXi host. Default is None. .. code-block:: bash salt '*' vsphere.rename_datastore old_name new_name
def canonical(request, uploaded_at, file_id): filer_file = get_object_or_404(File, pk=file_id, is_public=True) if (uploaded_at != filer_file.uploaded_at.strftime('%s') or not filer_file.file): raise Http404('No %s matches the given query.' % File._meta.object_name) return redirect(filer_file.url)
Redirect to the current url of a public file
def create(cls, scheduled_analysis, tags=None, json_report_objects=None, raw_report_objects=None, additional_metadata=None, analysis_date=None): if tags is None: tags = [] if additional_metadata is None: additional_metadata = {} if analysis_date is None: analysis_date = datetime.datetime.now() url = cls._creation_point.format(scheduled_analysis=scheduled_analysis.id) return cls._create(url=url, analysis_date=analysis_date, additional_json_files=json_report_objects, additional_binary_files=raw_report_objects, tags=tags, additional_metadata=additional_metadata, force_multipart=True)
Create a new report. For convenience :func:`~mass_api_client.resources.scheduled_analysis.ScheduledAnalysis.create_report` of class :class:`.ScheduledAnalysis` can be used instead. :param scheduled_analysis: The :class:`.ScheduledAnalysis` this report was created for :param tags: A list of strings :param json_report_objects: A dictionary of JSON reports, where the key is the object name. :param raw_report_objects: A dictionary of binary file reports, where the key is the file name. :param analysis_date: A datetime object of the time the report was generated. Defaults to current time. :return: The newly created report object
def Proxy(self, status, headers, exc_info=None): self.call_context['status'] = status self.call_context['headers'] = headers self.call_context['exc_info'] = exc_info return self.body_buffer.write
Save args, defer start_response until response body is parsed. Create output buffer for body to be written into. Note: this is not quite WSGI compliant: The body should come back as an iterator returned from calling service_app() but instead, StartResponse returns a writer that will be later called to output the body. See google/appengine/ext/webapp/__init__.py::Response.wsgi_write() write = start_response('%d %s' % self.__status, self.__wsgi_headers) write(body) Args: status: Http status to be sent with this response headers: Http headers to be sent with this response exc_info: Exception info to be displayed for this response Returns: callable that takes as an argument the body content
def add_filter(self, filter_or_string, *args, **kwargs): self.root_filter.add_filter(filter_or_string, *args, **kwargs) return self
Adds a filter to the query builder's filters. :return: :class:`~es_fluent.builder.QueryBuilder`
def change_disk_usage(self, usage_change, file_path, st_dev): mount_point = self._mount_point_for_device(st_dev) if mount_point: total_size = mount_point['total_size'] if total_size is not None: if total_size - mount_point['used_size'] < usage_change: self.raise_io_error(errno.ENOSPC, file_path) mount_point['used_size'] += usage_change
Change the used disk space by the given amount. Args: usage_change: Number of bytes added to the used space. If negative, the used space will be decreased. file_path: The path of the object needing the disk space. st_dev: The device ID for the respective file system. Raises: IOError: if usage_change exceeds the free file system space
def buffer_to_value(self, obj, buffer, offset, default_endianness=DEFAULT_ENDIANNESS): try: value, length = struct.unpack_from(str(self.endianness or default_endianness) + self.struct_format, buffer, offset)[0], struct.calcsize(self.struct_format) if self._enum is not None: try: return self._enum(value), length except ValueError as e: raise PacketDecodeError("{}: {}".format(self.type, e)) else: return value, length except struct.error as e: raise PacketDecodeError("{}: {}".format(self.type, e))
Converts the bytes in ``buffer`` at ``offset`` to a native Python value. Returns that value and the number of bytes consumed to create it. :param obj: The parent :class:`.PebblePacket` of this field :type obj: .PebblePacket :param buffer: The buffer from which to extract a value. :type buffer: bytes :param offset: The offset in the buffer to start at. :type offset: int :param default_endianness: The default endianness of the value. Used if ``endianness`` was not passed to the :class:`Field` constructor. :type default_endianness: str :return: (value, length) :rtype: (:class:`object`, :any:`int`)
def define_noisy_gate(self, name, qubit_indices, kraus_ops): kraus_ops = [np.asarray(k, dtype=np.complex128) for k in kraus_ops] _check_kraus_ops(len(qubit_indices), kraus_ops) return self.inst(_create_kraus_pragmas(name, tuple(qubit_indices), kraus_ops))
Overload a static ideal gate with a noisy one defined in terms of a Kraus map. .. note:: The matrix elements along each axis are ordered by bitstring. For two qubits the order is ``00, 01, 10, 11``, where the the bits **are ordered in reverse** by the qubit index, i.e., for qubits 0 and 1 the bitstring ``01`` indicates that qubit 0 is in the state 1. See also :ref:`the related documentation section in the QVM Overview <basis-ordering>`. :param str name: The name of the gate. :param tuple|list qubit_indices: The qubits it acts on. :param tuple|list kraus_ops: The Kraus operators. :return: The Program instance :rtype: Program
def validate_exported_interfaces(object_class, exported_intfs): if ( not exported_intfs or not isinstance(exported_intfs, list) or not exported_intfs ): return False else: for exintf in exported_intfs: if exintf not in object_class: return False return True
Validates that the exported interfaces are all provided by the service :param object_class: The specifications of a service :param exported_intfs: The exported specifications :return: True if the exported specifications are all provided by the service
def _get_edge_dict(self): edge_dict = collections.defaultdict(lambda: []) if len(self._edges) > 0: for e in self._edges: data = e['data'] key = tuple([data['i'], data['source'], data['target'], data['polarity']]) edge_dict[key] = data['id'] return edge_dict
Return a dict of edges. Keyed tuples of (i, source, target, polarity) with lists of edge ids [id1, id2, ...]
def add_sparql_line_nums(sparql): lines = sparql.split("\n") return "\n".join(["%s %s" % (i + 1, line) for i, line in enumerate(lines)])
Returns a sparql query with line numbers prepended
def _update_id(record, new_id): old_id = record.id record.id = new_id record.description = re.sub('^' + re.escape(old_id), new_id, record.description) return record
Update a record id to new_id, also modifying the ID in record.description
def set_i(self, i, data, field, side): edge = self.get_i(i, side) setattr(edge, field, data[edge.slice])
Assigns data on the i'th tile to the data 'field' of the 'side' edge of that tile
def as_lwp_str(self, ignore_discard=True, ignore_expires=True): now = time.time() r = [] for cookie in self: if not ignore_discard and cookie.discard: continue if not ignore_expires and cookie.is_expired(now): continue r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie)) return "\n".join(r+[""])
Return cookies as a string of "\\n"-separated "Set-Cookie3" headers. ignore_discard and ignore_expires: see docstring for FileCookieJar.save
def register_tc_plugins(self, plugin_name, plugin_class): if plugin_name in self.registered_plugins: raise PluginException("Plugin {} already registered! Duplicate " "plugins?".format(plugin_name)) self.logger.debug("Registering plugin %s", plugin_name) plugin_class.init(bench=self.bench) if plugin_class.get_bench_api() is not None: register_func = self.plugin_types[PluginTypes.BENCH] register_func(plugin_name, plugin_class) if plugin_class.get_parsers() is not None: register_func = self.plugin_types[PluginTypes.PARSER] register_func(plugin_name, plugin_class) if plugin_class.get_external_services() is not None: register_func = self.plugin_types[PluginTypes.EXTSERVICE] register_func(plugin_name, plugin_class) self.registered_plugins.append(plugin_name)
Loads a plugin as a dictionary and attaches needed parts to correct areas for testing parts. :param plugin_name: Name of the plugins :param plugin_class: PluginBase :return: Nothing
def nodes_geometry(self): nodes = np.array([ n for n in self.transforms.nodes() if 'geometry' in self.transforms.node[n] ]) return nodes
The nodes in the scene graph with geometry attached. Returns ------------ nodes_geometry: (m,) array, of node names
def dump_begin(self, selector_id): if self.dump_walker is not None: self.storage.destroy_walker(self.dump_walker) selector = DataStreamSelector.FromEncoded(selector_id) self.dump_walker = self.storage.create_walker(selector, skip_all=False) return Error.NO_ERROR, Error.NO_ERROR, self.dump_walker.count()
Start dumping a stream. Args: selector_id (int): The buffered stream we want to dump. Returns: (int, int, int): Error code, second error code, number of available readings
def TeXLaTeXStrFunction(target = None, source= None, env=None): if env.GetOption("no_exec"): basedir = os.path.split(str(source[0]))[0] abspath = os.path.abspath(basedir) if is_LaTeX(source,env,abspath): result = env.subst('$LATEXCOM',0,target,source)+" ..." else: result = env.subst("$TEXCOM",0,target,source)+" ..." else: result = '' return result
A strfunction for TeX and LaTeX that scans the source file to decide the "flavor" of the source and then returns the appropriate command string.
def print_gateway(): print("Printing information about the Gateway") data = api(gateway.get_gateway_info()).raw print(jsonify(data))
Print gateway info as JSON
def _read_single(parser, filepath): from os import path global packages if path.isfile(filepath): parser.readfp(open(filepath))
Reads a single config file into the parser, silently failing if the file does not exist. Args: parser (ConfigParser): parser to read the file into. filepath (str): full path to the config file.
def wait_for(self, timeout): def decorator(function): @wrapt.decorator def wrapper(function, _, args, kwargs): @self.run_in_reactor def run(): return function(*args, **kwargs) eventual_result = run() try: return eventual_result.wait(timeout) except TimeoutError: eventual_result.cancel() raise result = wrapper(function) try: result.wrapped_function = function except AttributeError: pass return result return decorator
A decorator factory that ensures the wrapped function runs in the reactor thread. When the wrapped function is called, its result is returned or its exception raised. Deferreds are handled transparently. Calls will timeout after the given number of seconds (a float), raising a crochet.TimeoutError, and cancelling the Deferred being waited on.
def auth_list(): auths = {} with salt.utils.files.fopen('/etc/security/auth_attr', 'r') as auth_attr: for auth in auth_attr: auth = salt.utils.stringutils.to_unicode(auth) auth = auth.split(':') if len(auth) != 6: continue if auth[0][-1:] == '.': auth[0] = '{0}*'.format(auth[0]) auths[auth[0]] = auth[3] return auths
List all available authorization CLI Example: .. code-block:: bash salt '*' rbac.auth_list
def __assert_less(expected, returned): result = "Pass" try: assert (expected < returned), "{0} not False".format(returned) except AssertionError as err: result = "Fail: " + six.text_type(err) return result
Test if a value is less than the returned value
def get_host(name=None, ipv4addr=None, mac=None, return_fields=None, **api_opts): infoblox = _get_infoblox(**api_opts) host = infoblox.get_host(name=name, mac=mac, ipv4addr=ipv4addr, return_fields=return_fields) return host
Get host information CLI Examples: .. code-block:: bash salt-call infoblox.get_host hostname.domain.ca salt-call infoblox.get_host ipv4addr=123.123.122.12 salt-call infoblox.get_host mac=00:50:56:84:6e:ae
def iter_finds(regex_obj, s): if isinstance(regex_obj, str): for m in re.finditer(regex_obj, s): yield m.group() else: for m in regex_obj.finditer(s): yield m.group()
Generate all matches found within a string for a regex and yield each match as a string
def _show(self): if not self._icon: self._icon = self._create_statusicon() widget = self._icon widget.set_visible(True) self._conn_left = widget.connect("activate", self._activate) self._conn_right = widget.connect("popup-menu", self._popup_menu)
Show the tray icon.
def log_level(level): from six import string_types if isinstance(level, int): return level if isinstance(level, string_types): try: return int(level) except ValueError: pass try: return getattr(logging, level.upper()) except AttributeError: pass raise ValueError("cannot convert '{}' into a log level".format(level))
Attempt to convert the given argument into a log level. Log levels are represented as integers, where higher values are more severe. If the given level is already an integer, it is simply returned. If the given level is a string that can be converted into an integer, it is converted and that value is returned. Finally, if the given level is a string naming one of the levels defined in the logging module, return that level. If none of those conditions are met, raise a ValueError.
def post(self, endpoint: str, **kwargs) -> dict: return self._request('POST', endpoint, **kwargs)
HTTP POST operation to API endpoint.
def get_all_access_keys(user_name, marker=None, max_items=None, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: return conn.get_all_access_keys(user_name, marker, max_items) except boto.exception.BotoServerError as e: log.debug(e) log.error('Failed to get access keys for IAM user %s.', user_name) return six.text_type(e)
Get all access keys from a user. .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt myminion boto_iam.get_all_access_keys myuser
def multi_replace(str_, search_list, repl_list): r if isinstance(repl_list, six.string_types): repl_list_ = [repl_list] * len(search_list) else: repl_list_ = repl_list newstr = str_ assert len(search_list) == len(repl_list_), 'bad lens' for search, repl in zip(search_list, repl_list_): newstr = newstr.replace(search, repl) return newstr
r""" Performs multiple replace functions foreach item in search_list and repl_list. Args: str_ (str): string to search search_list (list): list of search strings repl_list (list or str): one or multiple replace strings Returns: str: str_ CommandLine: python -m utool.util_str --exec-multi_replace Example: >>> # ENABLE_DOCTEST >>> from utool.util_str import * # NOQA >>> str_ = 'foo. bar: baz; spam-eggs --- eggs+spam' >>> search_list = ['.', ':', '---'] >>> repl_list = '@' >>> str_ = multi_replace(str_, search_list, repl_list) >>> result = ('str_ = %s' % (str(str_),)) >>> print(result) str_ = foo@ bar@ baz; spam-eggs @ eggs+spam
def decode_for_output(output, target_stream=None, translation_map=None): if not isinstance(output, six.string_types): return output encoding = None if target_stream is not None: encoding = getattr(target_stream, "encoding", None) encoding = get_output_encoding(encoding) try: output = _encode(output, encoding=encoding, translation_map=translation_map) except (UnicodeDecodeError, UnicodeEncodeError): output = to_native_string(output) output = _encode( output, encoding=encoding, errors="replace", translation_map=translation_map ) return to_text(output, encoding=encoding, errors="replace")
Given a string, decode it for output to a terminal :param str output: A string to print to a terminal :param target_stream: A stream to write to, we will encode to target this stream if possible. :param dict translation_map: A mapping of unicode character ordinals to replacement strings. :return: A re-encoded string using the preferred encoding :rtype: str
def working2analysis(self,r): "Transform working space inputs to the analysis color space." a = self.colorspace.convert(self.working_space, self.analysis_space, r) return self.swap_polar_HSVorder[self.analysis_space](a)
Transform working space inputs to the analysis color space.
def setBlockValue(self, block, value): if self._bit_count == 0: raise Exception( "The margin '" + self._name + "' did not allocate any bits for the values") if value < 0: raise Exception( "The margin '" + self._name + "' must be a positive integer" ) if value >= 2 ** self._bit_count: raise Exception( "The margin '" + self._name + "' value exceeds the allocated bit range" ) newMarginValue = value << self._bitRange[ 0 ] currentUserState = block.userState() if currentUserState in [ 0, -1 ]: block.setUserState(newMarginValue) else: marginMask = 2 ** self._bit_count - 1 otherMarginsValue = currentUserState & ~marginMask block.setUserState(newMarginValue | otherMarginsValue)
Sets the required value to the block without damaging the other bits
def find_a_system_python(line): from .vendor.pythonfinder import Finder finder = Finder(system=False, global_search=True) if not line: return next(iter(finder.find_all_python_versions()), None) if (line.startswith("py ") or line.startswith("py.exe ")) and os.name == "nt": line = line.split(" ", 1)[1].lstrip("-") python_entry = find_python(finder, line) return python_entry
Find a Python installation from a given line. This tries to parse the line in various of ways: * Looks like an absolute path? Use it directly. * Looks like a py.exe call? Use py.exe to get the executable. * Starts with "py" something? Looks like a python command. Try to find it in PATH, and use it directly. * Search for "python" and "pythonX.Y" executables in PATH to find a match. * Nothing fits, return None.
def update_time_login(u_name): entry = TabMember.update( time_login=tools.timestamp() ).where( TabMember.user_name == u_name ) entry.execute()
Update the login time for user.
def default_filename_decoder(): factory = default_filename_grammar_factory() grammar_old = factory.get_rule('filename_old') grammar_new = factory.get_rule('filename_new') return FileNameDecoder(grammar_old, grammar_new)
Creates a decoder which parses CWR filenames following the old or the new convention. :return: a CWR filename decoder for the old and the new conventions
def ImportDNS(self, config, token=None): if not token: raise Exception("You must have the dns token set first.") self.dns = CotendoDNS([token, config]) return True
Import a dns configuration file into the helper Note: This requires that you have the latest token. To get the latest token, run the GrabDNS command first.
def _get_sortgo(self): if 'sortgo' in self.datobj.kws: return self.datobj.kws['sortgo'] return self.datobj.grprdflt.gosubdag.prt_attr['sort'] + "\n"
Get function for sorting GO terms in a list of namedtuples.
def elimination_order_width(G, order): adj = {v: set(G[v]) for v in G} treewidth = 0 for v in order: try: dv = len(adj[v]) except KeyError: raise ValueError('{} is in order but not in G'.format(v)) if dv > treewidth: treewidth = dv _elim_adj(adj, v) if adj: raise ValueError('not all nodes in G were in order') return treewidth
Calculates the width of the tree decomposition induced by a variable elimination order. Parameters ---------- G : NetworkX graph The graph on which to compute the width of the tree decomposition. order : list The elimination order. Must be a list of all of the variables in G. Returns ------- treewidth : int The width of the tree decomposition induced by order. Examples -------- This example computes the width of the tree decomposition for the :math:`K_4` complete graph induced by an elimination order found through the min-width heuristic. >>> import dwave_networkx as dnx >>> import networkx as nx >>> K_4 = nx.complete_graph(4) >>> dnx.min_width_heuristic(K_4) (3, [1, 2, 0, 3]) >>> dnx.elimination_order_width(K_4, [1, 2, 0, 3]) 3
def ms_cutlo(self, viewer, event, data_x, data_y): if not self.cancut: return True x, y = self.get_win_xy(viewer) if event.state == 'move': self._cutlow_xy(viewer, x, y) elif event.state == 'down': self._start_x, self._start_y = x, y self._loval, self._hival = viewer.get_cut_levels() else: viewer.onscreen_message(None) return True
An interactive way to set the low cut level.
def addAPK(self, filename, data): digest = hashlib.sha256(data).hexdigest() log.debug("add APK:%s" % digest) apk = APK(data, True) self.analyzed_apk[digest] = [apk] self.analyzed_files[filename].append(digest) self.analyzed_digest[digest] = filename dx = Analysis() self.analyzed_vms[digest] = dx for dex in apk.get_all_dex(): self.addDEX(filename, dex, dx) log.debug("added APK:%s" % digest) return digest, apk
Add an APK file to the Session and run analysis on it. :param filename: (file)name of APK file :param data: binary data of the APK file :return: a tuple of SHA256 Checksum and APK Object
def deadline(self): if not self._deadline: self._deadline = self.now + timezone.timedelta(days=1) return self._deadline
Return next day as deadline if no deadline provided.
def _append_number(self, value, _file): _text = value _labs = ' {text}'.format(text=_text) _file.write(_labs)
Call this function to write number contents. Keyword arguments: * value - dict, content to be dumped * _file - FileIO, output file
def csw_global_dispatch_by_catalog(request, catalog_slug): catalog = get_object_or_404(Catalog, slug=catalog_slug) if catalog: url = settings.SITE_URL.rstrip('/') + request.path.rstrip('/') return csw_global_dispatch(request, url=url, catalog_id=catalog.id)
pycsw wrapper for catalogs
def parse_int_string(int_string: str) -> List[int]: cleaned = " ".join(int_string.strip().split()) cleaned = cleaned.replace(" - ", "-") cleaned = cleaned.replace(",", " ") tokens = cleaned.split(" ") indices: Set[int] = set() for token in tokens: if "-" in token: endpoints = token.split("-") if len(endpoints) != 2: LOG.info(f"Dropping '{token}' as invalid - weird range.") continue start = int(endpoints[0]) end = int(endpoints[1]) + 1 indices = indices.union(indices, set(range(start, end))) else: try: indices.add(int(token)) except ValueError: LOG.info(f"Dropping '{token}' as invalid - not an int.") return list(indices)
Given a string like "1 23 4-8 32 1", return a unique list of those integers in the string and the integers in the ranges in the string. Non-numbers ignored. Not necessarily sorted
def _validate_example(rh, method, example_type): example = getattr(method, example_type + "_example") schema = getattr(method, example_type + "_schema") if example is None: return None try: validate(example, schema) except ValidationError as e: raise ValidationError( "{}_example for {}.{} could not be validated.\n{}".format( example_type, rh.__name__, method.__name__, str(e) ) ) return json.dumps(example, indent=4, sort_keys=True)
Validates example against schema :returns: Formatted example if example exists and validates, otherwise None :raises ValidationError: If example does not validate against the schema
def register(ctx, model, type, trait, manufacturer, product_name, description, device, nickname, client_type): ctx.obj['SESSION'] = google.auth.transport.requests.AuthorizedSession( ctx.obj['CREDENTIALS'] ) ctx.invoke(register_model, model=model, type=type, trait=trait, manufacturer=manufacturer, product_name=product_name, description=description) ctx.invoke(register_device, device=device, model=model, nickname=nickname, client_type=client_type)
Registers a device model and instance. Device model fields can only contain letters, numbers, and the following symbols: period (.), hyphen (-), underscore (_), space ( ) and plus (+). The first character of a field must be a letter or number. Device instance fields must start with a letter or number. The device ID can only contain letters, numbers, and the following symbols: period (.), hyphen (-), underscore (_), and plus (+). The device nickname can only contain numbers, letters, and the space ( ) symbol.
def ConvertToTemplate(self,visibility,description=None,password=None): if visibility not in ('private','shared'): raise(clc.CLCException("Invalid visibility - must be private or shared")) if not password: password = self.Credentials()['password'] if not description: description = self.description return(clc.v2.Requests(clc.v2.API.Call('POST','servers/%s/%s/convertToTemplate' % (self.alias,self.id), json.dumps({"description": description, "visibility": visibility, "password": password}), session=self.session), alias=self.alias, session=self.session))
Converts existing server to a template. visibility is one of private or shared. >>> d = clc.v2.Datacenter() >>> clc.v2.Server(alias='BTDI',id='WA1BTDIAPI207').ConvertToTemplate("private","my template") 0
def cli(patterns, times, json, csv, rst, md, ref, unit, precision, debug): if ref: ref = JSON.load(ref) filenames = [] reporters = [CliReporter(ref=ref, debug=debug, unit=unit, precision=precision)] kwargs = {} for pattern in patterns or ['**/*.bench.py']: filenames.extend(resolve_pattern(pattern)) if json: reporters.append(JsonReporter(json, precision=precision)) if csv: reporters.append(CsvReporter(csv, precision=precision)) if rst: reporters.append(RstReporter(rst, precision=precision)) if md: reporters.append(MarkdownReporter(md, precision=precision)) if times: kwargs['times'] = times runner = BenchmarkRunner(*filenames, reporters=reporters, debug=debug) runner.run(**kwargs)
Execute minibench benchmarks
def poll(self): self._returncode = self.process.poll() if self._returncode is not None: self.set_status(self.S_DONE, "status set to Done") return self._returncode
Check if child process has terminated. Set and return returncode attribute.
def get_context_json(self, context): answer = {} answer['meta'] = self.__jcontext_metadata(context) answer['filter'] = self.__jcontext_filter(context) answer['table'] = {} answer['table']['head'] = self.__jcontext_tablehead(context) answer['table']['body'] = None answer['table']['header'] = None answer['table']['summary'] = None return answer
Return a base answer for a json answer
def _get_event_id(object_type: str) -> str: key = _keys.event_counter(object_type) DB.watch(key, pipeline=True) count = DB.get_value(key) DB.increment(key) DB.execute() if count is None: count = 0 return '{}_event_{:08d}'.format(object_type, int(count))
Return an event key for the event on the object type. This must be a unique event id for the object. Args: object_type (str): Type of object Returns: str, event id
def append(self, element): assert element.locus == self.locus, ( "Element locus (%s) != Pileup locus (%s)" % (element.locus, self.locus)) self.elements[element] = None
Append a PileupElement to this Pileup. If an identical PileupElement is already part of this Pileup, do nothing.
def getWorkingCollisionBoundsInfo(self): fn = self.function_table.getWorkingCollisionBoundsInfo pQuadsBuffer = HmdQuad_t() punQuadsCount = c_uint32() result = fn(byref(pQuadsBuffer), byref(punQuadsCount)) return result, pQuadsBuffer, punQuadsCount.value
Returns the number of Quads if the buffer points to null. Otherwise it returns Quads into the buffer up to the max specified from the working copy.
def get(self, request, *_args, **_kwargs): access_token = self.access_token scope_string = request.GET.get('scope') scope_request = scope_string.split() if scope_string else None claims_string = request.GET.get('claims') claims_request = json.loads(claims_string) if claims_string else None if not provider.scope.check(constants.OPEN_ID_SCOPE, access_token.scope): return self._bad_request('Missing openid scope.') try: claims = self.userinfo_claims(access_token, scope_request, claims_request) except ValueError, exception: return self._bad_request(str(exception)) response = JsonResponse(claims) return response
Respond to a UserInfo request. Two optional query parameters are accepted, scope and claims. See the references above for more details.
def unescape(msg, extra_format_dict={}): new_msg = '' extra_format_dict.update(format_dict) while len(msg): char = msg[0] msg = msg[1:] if char == escape_character: escape_key = msg[0] msg = msg[1:] if escape_key == escape_character: new_msg += escape_character elif escape_key == '{': buf = '' new_char = '' while True: new_char = msg[0] msg = msg[1:] if new_char == '}': break else: buf += new_char new_msg += _get_from_format_dict(extra_format_dict, buf) else: new_msg += _get_from_format_dict(extra_format_dict, escape_key) if escape_key == 'c': fill_last = len(msg) and msg[0] in digits colours, msg = extract_girc_colours(msg, fill_last) new_msg += colours else: new_msg += char return new_msg
Takes a girc-escaped message and returns a raw IRC message