code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def find_sdk_dir(self): if not SCons.Util.can_read_reg: debug('find_sdk_dir(): can not read registry') return None hkey = self.HKEY_FMT % self.hkey_data debug('find_sdk_dir(): checking registry:{}'.format(hkey)) try: sdk_dir = common.read_reg(hkey) ...
Try to find the MS SDK from the registry. Return None if failed or the directory does not exist.
def _eta_from_phi(self): self.eta = scipy.ndarray(N_NT - 1, dtype='float') etaprod = 1.0 for w in range(N_NT - 1): self.eta[w] = 1.0 - self.phi[w] / etaprod etaprod *= self.eta[w] _checkParam('eta', self.eta, self.PARAMLIMITS, self.PARAMTYPES)
Update `eta` using current `phi`.
def data_from_bytes(self, byte_representation): text = byte_representation.decode(self.encoding) return self.data_from_string(text)
Converts the given bytes representation to resource data.
def _resolve_atomtypes(topology): for atom in topology.atoms(): atomtype = [rule_name for rule_name in atom.whitelist - atom.blacklist] if len(atomtype) == 1: atom.id = atomtype[0] elif len(atomtype) > 1: raise FoyerError("Found multiple types for atom {} ({}): {}.".f...
Determine the final atomtypes from the white- and blacklists.
def pop(self, key=__marker, default=__marker): heap = self._heap position = self._position if key is self.__marker: if not heap: raise KeyError('pqdict is empty') key = heap[0].key del self[key] return key try: p...
If ``key`` is in the pqdict, remove it and return its priority value, else return ``default``. If ``default`` is not provided and ``key`` is not in the pqdict, raise a ``KeyError``. If ``key`` is not provided, remove the top item and return its key, or raise ``KeyError`` if the pqdict i...
def post_collection(self, collection, body): assert isinstance(body, (list)), "POST requires body to be a list" assert collection.startswith('/'), "Collections must start with /" uri = self.uri + '/v1' + collection return self.service._post(uri, body)
Creates a new collection. This is mostly just transport layer and passes collection and body along. It presumes the body already has generated. The collection is *not* expected to have the id.
def init_vagrant(self, vagrant_file): if self.inherit_image: image_name, image_tag = str(self.inherit_image).split(":") else: image_name = self.get_arca_base_name() image_tag = self.get_python_base_tag(self.get_python_version()) logger.info("Creating Vagrantfi...
Creates a Vagrantfile in the target dir, with only the base image pulled. Copies the runner script to the directory so it's accessible from the VM.
def _join_list(lst, oxford=False): if len(lst) > 2: s = ', '.join(lst[:-1]) if oxford: s += ',' s += ' and ' + lst[-1] elif len(lst) == 2: s = lst[0] + ' and ' + lst[1] elif len(lst) == 1: s = lst[0] else: s = '' return s
Join a list of words in a gramatically correct way.
def Start(self): self.state.shadows = [] self.state.raw_device = None self.CallClient( server_stubs.WmiQuery, query="SELECT * FROM Win32_ShadowCopy", next_state="ListDeviceDirectories")
Query the client for available Volume Shadow Copies using a WMI query.
def parse_manifest(template_lines): manifest_files = distutils.filelist.FileList() for line in template_lines: if line.strip(): manifest_files.process_template_line(line) return manifest_files.files
List of file names included by the MANIFEST.in template lines.
def _merge_user_attrs(self, attrs_backend, attrs_out, backend_name): for attr in attrs_backend: if attr in self.attributes.backend_attributes[backend_name]: attrid = self.attributes.backend_attributes[backend_name][attr] if attrid not in attrs_out: ...
merge attributes from one backend search to the attributes dict output
def get_estimates_without_scope_in_month(self, customer): estimates = self.get_price_estimates_for_customer(customer) if not estimates: return [] tables = {model: collections.defaultdict(list) for model in self.get_estimated_models()} dates = set() f...
It is expected that valid row for each month contains at least one price estimate for customer, service setting, service, service project link, project and resource. Otherwise all price estimates in the row should be deleted.
def only_path(self): start = [v for v in self.nodes if self.nodes[v].get('start', False)] if len(start) != 1: raise ValueError("graph does not have exactly one start node") path = [] [v] = start while True: path.append(v) u = v vs ...
Finds the only path from the start node. If there is more than one, raises ValueError.
def ean_13(name=None): if name is None: name = 'EAN 13 Field' field = basic.numeric(13) field = field.setName(name) return field.setResultsName('ean_13')
Creates the grammar for an EAN 13 code. These are the codes on thirteen digits barcodes. :param name: name for the field :return: grammar for an EAN 13 field
def get_correlation_table(self, chain=0, parameters=None, caption="Parameter Correlations", label="tab:parameter_correlations"): parameters, cor = self.get_correlations(chain=chain, parameters=parameters) return self._get_2d_latex_table(parameters, cor, caption, label)
Gets a LaTeX table of parameter correlations. Parameters ---------- chain : int|str, optional The chain index or name. Defaults to first chain. parameters : list[str], optional The list of parameters to compute correlations. Defaults to all parameters ...
def get_virtualenv_path(self, requirements_option: RequirementsOptions, requirements_hash: Optional[str]) -> Path: if requirements_option == RequirementsOptions.no_requirements: venv_name = "no_requirements" else: venv_name = requirements_hash return Path(self._arca.base_...
Returns the path to the virtualenv the current state of the repository.
def neighborhood(self, node, degree=4): assert self.by_name[node.name] == node already_visited = frontier = set([node.name]) for _ in range(degree): neighbor_names = set() for node_name in frontier: outgoing = set(n.name for n in self.by_input[node_name]) incoming = set(self.by_n...
Am I really handcoding graph traversal please no
async def _assert_link_secret(self, action: str) -> str: rv = await self.wallet.get_link_secret_label() if rv is None: LOGGER.debug('HolderProver._assert_link_secret: action %s requires link secret but it is not set', action) raise AbsentLinkSecret('Action {} requires link secret...
Return current wallet link secret label. Raise AbsentLinkSecret if link secret is not set. :param action: action requiring link secret
def sort(self, values): for level in self: for wire1, wire2 in level: if values[wire1] > values[wire2]: values[wire1], values[wire2] = values[wire2], values[wire1]
Sort the values in-place based on the connectors in the network.
def add_child(self, child): if not isinstance(child, DependencyNode): raise TypeError('"child" must be a DependencyNode') self._children.append(child)
Add a child node
def fmt_row(self, columns, dimensions, row, **settings): cells = [] i = 0 for column in columns: cells.append(self.fmt_cell( row[i], dimensions[i], column, **settings[self.SETTING_TEXT_FORMATING] ...
Format single table row.
def addUser(self, username, password, firstname, lastname, email, role): self._invites.append({ "username":username, "password":password, "firstname":firstname, "lastname":lastname, "fullname":"%s %s" % (firstname, lastn...
adds a user to the invitation list
async def post(self): self.validate_ip() dispatcher = self.get_dispatcher() update = await self.parse_update(dispatcher.bot) results = await self.process_update(update) response = self.get_response(results) if response: web_response = response.get_web_response...
Process POST request if one of handler returns instance of :class:`aiogram.dispatcher.webhook.BaseResponse` return it to webhook. Otherwise do nothing (return 'ok') :return: :class:`aiohttp.web.Response`
def sign_message(message, private_key, public_key=None): if public_key is None: public_key = private_to_public_key(private_key) return ed25519_blake2.signature_unsafe(message, private_key, public_key)
Signs a `message` using `private_key` and `public_key` .. warning:: Not safe to use with secret keys or secret data. See module docstring. This function should be used for testing only. :param message: the message to sign :type message: bytes :param private_key: private key used to ...
def _get_paths(): import os base_path = os.path.dirname(os.path.abspath(__file__)) test_data_dir = os.path.join(base_path, 'tests', 'data', 'Plate01') test_data_file = os.path.join(test_data_dir, 'RFP_Well_A3.fcs') return test_data_dir, test_data_file
Generate paths to test data. Done in a function to protect namespace a bit.
def get_model(self): if hasattr(self, 'model') and self.model: return self.model try: none = self.get_queryset().none() return none.model except Exception: raise ImproperlyConfigured( "Integrator: Unable to determine the model with ...
Return the class Model used by this Agnocomplete
def json_description(shape, **metadata): metadata.update(shape=shape) return json.dumps(metadata)
Return JSON image description from data shape and other metadata. Return UTF-8 encoded JSON. >>> json_description((256, 256, 3), axes='YXS') # doctest: +SKIP b'{"shape": [256, 256, 3], "axes": "YXS"}'
def serialize(self): try: return jsonpickle.encode(self, unpicklable=False) except Exception: log.exception("got an exception during serialization")
Serialize to JSON document that can be accepted by the X-Ray backend service. It uses jsonpickle to perform serialization.
def older_message(m, lastm): atts = {'time_boot_ms' : 1.0e-3, 'time_unix_usec' : 1.0e-6, 'time_usec' : 1.0e-6} for a in atts.keys(): if hasattr(m, a): mul = atts[a] t1 = m.getattr(a) * mul t2 = lastm.getattr(a) * mul if t2 >= t1 and...
return true if m is older than lastm by timestamp
def load_image(self, imagepath, width=None, height=None): if width: self.width = width self.canvas["width"] = width if height: self.height = height self.canvas["height"] = height self.image = imagepath size = (self.width, self.height) ...
Loads new image into canvas, updating size if needed.
def _setup_ipc(self): log.debug('Setting up the internal IPC proxy') self.ctx = zmq.Context() self.sub = self.ctx.socket(zmq.SUB) self.sub.bind(PUB_PX_IPC_URL) self.sub.setsockopt(zmq.SUBSCRIBE, b'') log.debug('Setting HWM for the proxy frontend: %d', self.hwm) tr...
Setup the IPC PUB and SUB sockets for the proxy.
def prepare_injection_directions(self): if hasattr(self, 'pop_injection_directions') and self.pop_injection_directions: ValueError("Looks like a bug in calling order/logics") ary = [] if (isinstance(self.adapt_sigma, CMAAdaptSigmaTPA) or self.opts['mean_shift_line_sam...
provide genotypic directions for TPA and selective mirroring, with no specific length normalization, to be used in the coming iteration. Details: This method is called in the end of `tell`. The result is assigned to ``self.pop_injection_directions`` and used in `ask_geno...
def process_message(self, message, *args, **kwargs): if not message.level in PERSISTENT_MESSAGE_LEVELS: return message user = kwargs.get("user") or self.get_user() try: anonymous = user.is_anonymous() except TypeError: anonymous = user.is_anonymous ...
If its level is into persist levels, convert the message to models and save it
def add(self, client_id, email_address, name, access_level, password): body = { "EmailAddress": email_address, "Name": name, "AccessLevel": access_level, "Password": password} response = self._post("/clients/%s/people.json" % ...
Adds a person to a client. Password is optional and if not supplied, an invitation will be emailed to the person
def invalid_return_type_error( return_type: GraphQLObjectType, result: Any, field_nodes: List[FieldNode] ) -> GraphQLError: return GraphQLError( f"Expected value of type '{return_type.name}' but got: {inspect(result)}.", field_nodes, )
Create a GraphQLError for an invalid return type.
def _dims2shape(*dims): if not dims: raise ValueError("expected at least one dimension spec") shape = list() for dim in dims: if isinstance(dim, int): dim = (0, dim) if isinstance(dim, tuple) and len(dim) == 2: if dim[0] < 0: raise ValueError("...
Convert input dimensions to a shape.
def status_server(self, port): if self.status_server_started == False: self.status_server_started = True try: self.status_service = binwalk.core.statuserver.StatusServer(port, self) except Exception as e: binwalk.core.common.warning("Failed to ...
Starts the progress bar TCP service on the specified port. This service will only be started once per instance, regardless of the number of times this method is invoked. Failure to start the status service is considered non-critical; that is, a warning will be displayed to the user, but...
def validateIP(value, blank=False, strip=None, allowlistRegexes=None, blocklistRegexes=None, excMsg=None): _validateGenericParameters(blank=blank, strip=strip, allowlistRegexes=allowlistRegexes, blocklistRegexes=blocklistRegexes) returnNow, value = _prevalidationCheck(value, blank, strip, allowlistRegexes, bloc...
Raises ValidationException if value is not an IPv4 or IPv6 address. Returns the value argument. * value (str): The value being validated as an IP address. * blank (bool): If True, a blank string will be accepted. Defaults to False. * strip (bool, str, None): If None, whitespace is stripped from value. ...
def get_performance_data(self, project, **params): results = self._get_json(self.PERFORMANCE_DATA_ENDPOINT, project, **params) return {k: PerformanceSeries(v) for k, v in results.items()}
Gets a dictionary of PerformanceSeries objects You can specify which signatures to get by passing signature to this function
def partial(self, fn, *user_args, **user_kwargs): self.get_annotations(fn) def lazy_injection_fn(*run_args, **run_kwargs): arg_pack = getattr(lazy_injection_fn, 'arg_pack', None) if arg_pack is not None: pack_args, pack_kwargs = arg_pack else: ...
Return function with closure to lazily inject annotated callable. Repeat calls to the resulting function will reuse injections from the first call. Positional arguments are provided in this order: 1. positional arguments provided by injector 2. positional arguments provided in...
def sample(self, probs, _covs, idxs, epsilons): self.set_distribution(epsilons) return self.distribution.sample(self.loss_ratios, probs)
Sample the .loss_ratios with the given probabilities. :param probs: array of E' floats :param _covs: ignored, it is there only for API consistency :param idxs: array of E booleans with E >= E' :param epsilons: array of E floats :return...
def Scale(self, factor): new = self.Copy() new.xs = [x * factor for x in self.xs] return new
Multiplies the xs by a factor. factor: what to multiply by
def is_disconnected(self, node_id): conn = self._conns.get(node_id) if conn is None: return False return conn.disconnected()
Check whether the node connection has been disconnected or failed. A disconnected node has either been closed or has failed. Connection failures are usually transient and can be resumed in the next ready() call, but there are cases where transient failures need to be caught and re-acted...
def get_fw_dev_map(self, fw_id): for cnt in self.res: if fw_id in self.res.get(cnt).get('fw_id_lst'): return self.res[cnt].get('obj_dict'), ( self.res[cnt].get('mgmt_ip')) return None, None
Return the object dict and mgmt ip for a firewall.
def title(self, gender: Optional[Gender] = None, title_type: Optional[TitleType] = None) -> str: gender_key = self._validate_enum(gender, Gender) title_key = self._validate_enum(title_type, TitleType) titles = self._data['title'][gender_key][title_key] return self.random.ch...
Generate a random title for name. You can generate random prefix or suffix for name using this method. :param gender: The gender. :param title_type: TitleType enum object. :return: The title. :raises NonEnumerableError: if gender or title_type in incorrect format. ...
def line( loc, strg ): lastCR = strg.rfind("\n", 0, loc) nextCR = strg.find("\n", loc) if nextCR >= 0: return strg[lastCR+1:nextCR] else: return strg[lastCR+1:]
Returns the line of text containing loc within a string, counting newlines as line separators.
def close(self, code: int = None, reason: str = None) -> None: if self.ws_connection: self.ws_connection.close(code, reason) self.ws_connection = None
Closes this Web Socket. Once the close handshake is successful the socket will be closed. ``code`` may be a numeric status code, taken from the values defined in `RFC 6455 section 7.4.1 <https://tools.ietf.org/html/rfc6455#section-7.4.1>`_. ``reason`` may be a textual message a...
def quoted_split(string, sep, quotes='"'): start = None escape = False quote = False for i, c in enumerate(string): if start is None: start = i if escape: escape = False elif quote: if c == '\\': escape = True elif c...
Split a string on the given separation character, but respecting double-quoted sections of the string. Returns an iterator. :param string: The string to split. :param sep: The character separating sections of the string. :param quotes: A string specifying all legal quote characters. :returns: An ...
def file_mtime(file_path): if not os.path.isfile(file_path): raise IOError('File "%s" does not exist.' % file_path) ut = subprocess.check_output(['git', 'log', '-1', '--format=%ct', file_path]).strip() return datetime.fromtimestamp(int(ut))
Returns the file modified time. This is with regards to the last modification the file has had in the droopescan repo, rather than actual file modification time in the filesystem. @param file_path: file path relative to the executable. @return datetime.datetime object.
def DeserializeMessage(self, response_type, data): try: message = encoding.JsonToMessage(response_type, data) except (exceptions.InvalidDataFromServerError, messages.ValidationError, ValueError) as e: raise exceptions.InvalidDataFromServerError( 'E...
Deserialize the given data as method_config.response_type.
def overwrite_docs(self, doc): for i in range(len(self.docstring)): if (self.docstring[i].doctype == doc.doctype and self.docstring[i].pointsto == doc.pointsto): del self.docstring[i] break self.docstring.append(doc)
Adds the specified DocElement to the docstring list. However, if an element with the same xml tag and pointsto value already exists, it will be overwritten.
def start(self, device): super(NativeBLEVirtualInterface, self).start(device) self.set_advertising(True)
Start serving access to this VirtualIOTileDevice Args: device (VirtualIOTileDevice): The device we will be providing access to
def get_all_project_owners(project_ids=None, **kwargs): projowner_qry = db.DBSession.query(ProjectOwner) if project_ids is not None: projowner_qry = projowner_qry.filter(ProjectOwner.project_id.in_(project_ids)) project_owners_i = projowner_qry.all() return [JSONObject(project_owner_i) for projec...
Get the project owner entries for all the requested projects. If the project_ids argument is None, return all the owner entries for ALL projects
def process_order(self, order): try: dt_orders = self._orders_by_modified[order.dt] except KeyError: self._orders_by_modified[order.dt] = OrderedDict([ (order.id, order), ]) self._orders_by_id[order.id] = order else: sel...
Keep track of an order that was placed. Parameters ---------- order : zp.Order The order to record.
def service_list(): r = salt.utils.http.query(DETAILS['url']+'service/list', decode_type='json', decode=True) return r['dict']
List "services" on the REST server
def _depth_first_search(self, target_id, layer_id_list, node_list): assert len(node_list) <= self.n_nodes u = node_list[-1] if u == target_id: return True for v, layer_id in self.adj_list[u]: layer_id_list.append(layer_id) node_list.append(v) ...
Search for all the layers and nodes down the path. A recursive function to search all the layers and nodes between the node in the node_list and the node with target_id.
def calc_qdga2_v1(self): der = self.parameters.derived.fastaccess old = self.sequences.states.fastaccess_old new = self.sequences.states.fastaccess_new if der.kd2 <= 0.: new.qdga2 = new.qdgz2 elif der.kd2 > 1e200: new.qdga2 = old.qdga2+new.qdgz2-old.qdgz2 else: d_temp = (...
Perform the runoff concentration calculation for "fast" direct runoff. The working equation is the analytical solution of the linear storage equation under the assumption of constant change in inflow during the simulation time step. Required derived parameter: |KD2| Required state sequence:...
def untrack(context, file_names): context.obj.find_repo_type() for fn in file_names: if context.obj.vc_name == 'git': context.obj.call(['git', 'rm', '--cached', fn]) elif context.obj.vc_name == 'hg': context.obj.call(['hg', 'forget', fn])
Forget about tracking each file in the list file_names Tracking does not create or delete the actual file, it only tells the version control system whether to maintain versions (to keep track) of the file.
def update_bgp_peer(self, bgp_peer_id, body=None): return self.put(self.bgp_peer_path % bgp_peer_id, body=body)
Update a BGP peer.
def get_sunrise_time(self, timeformat='unix'): if self._sunrise_time is None: return None return timeformatutils.timeformat(self._sunrise_time, timeformat)
Returns the GMT time of sunrise :param timeformat: the format for the time value. May be: '*unix*' (default) for UNIX time or '*iso*' for ISO8601-formatted string in the format ``YYYY-MM-DD HH:MM:SS+00`` :type timeformat: str :returns: an int or a str or None :ra...
def build_vec(self): for item in all_calls: self.__dict__[item] = [] for dev in self.devices: for item in all_calls: if self.system.__dict__[dev].n == 0: val = False else: val = self.system.__dict__[dev].call...
build call validity vector for each device
def _pdf(self, phi): pdf = np.inner(self._vn, np.cos(np.outer(phi, self._n))) pdf *= 2. pdf += 1. return pdf
Evaluate the _unnormalized_ flow PDF.
def get_projected_player_game_stats_by_team(self, season, week, team_id): result = self._method_call("PlayerGameProjectionStatsByTeam/{season}/{week}/{team_id}", "projections", season=season, week=week, team_id=team_id) return result
Projected Player Game Stats by Team
def mask_image_data(data): if data.bands.size in (2, 4): if not np.issubdtype(data.dtype, np.integer): raise ValueError("Only integer datatypes can be used as a mask.") mask = data.data[-1, :, :] == np.iinfo(data.dtype).min data = data.astype(np.float64) masked_data = da....
Mask image data if alpha channel is present.
def validate_format(self, allowed_formats): if self.format in allowed_formats: return ui.error("Export type '{0}' does not accept '{1}' format, only: " "{2}".format(self.type, self.format, allowed_formats)) sys.exit(1)
Validate the allowed formats for a specific type.
def to_base64(self, skip=()): return base64.b64encode( ensure_bytes( self.to_json(skip=skip), encoding='utf-8', ) )
Construct from base64-encoded JSON.
def set (self, id, param, value): assert isinstance(id, basestring) assert isinstance(param, basestring) assert is_iterable_typed(value, basestring) self.params_.setdefault(param, {})[id] = value
Sets the value of a configuration parameter.
def child(self): return self.stream.directory[self.child_id] \ if self.child_id != NOSTREAM else None
Root entry object has only one child entry and no siblings.
def iter(self, query, *parameters, **kwargs): cursor = self._cursor() try: self._execute(cursor, query, parameters or None, kwargs) if cursor.description: column_names = [column.name for column in cursor.description] while True: ...
Returns a generator for records from the query.
def _set_result_from_operation(self): with self._completion_lock: if not self._operation.done or self._result_set: return if self._operation.HasField("response"): response = protobuf_helpers.from_any_pb( self._result_type, self._operati...
Set the result or exception from the operation if it is complete.
def load_from_JSON(json_filename): try: jsonfilecontent = json.loads(open(json_filename, 'r').read()) except ValueError as exc: raise CredentialsFormatError(msg="Invalid JSON syntax: "+str(exc)) instance = PIDClientCredentials(credentials_filename=json_filename,**jsonfile...
Create a new instance of a PIDClientCredentials with information read from a local JSON file. :param json_filename: The path to the json credentials file. The json file should have the following format: .. code:: json { "handle_s...
def create_ports(port, mpi, rank): if port == "random" or port is None: ports = {} else: port = int(port) ports = { "REQ": port + 0, "PUSH": port + 1, "SUB": port + 2 } if mpi == 'all': fo...
create a list of ports for the current rank
def write_header(self): self.fileobj.seek(0) header = mar_header.build(dict(index_offset=self.last_offset)) self.fileobj.write(header)
Write the MAR header to the file. The MAR header includes the MAR magic bytes as well as the offset to where the index data can be found.
def pull_dependencies(self, nodes): visitor = DependencyFinderVisitor() for node in nodes: visitor.visit(node) for dependency in 'filters', 'tests': mapping = getattr(self, dependency) for name in getattr(visitor, dependency): if name not in ma...
Pull all the dependencies.
def splittermixerfieldlists(data, commdct, objkey): objkey = objkey.upper() objindex = data.dtls.index(objkey) objcomms = commdct[objindex] theobjects = data.dt[objkey] fieldlists = [] for theobject in theobjects: fieldlist = list(range(1, len(theobject))) fieldlists.append(field...
docstring for splittermixerfieldlists
def render(self, surf): pos, size = self.topleft, self.size if not self.flags & self.NO_SHADOW: if self.flags & self.NO_ROUNDING: pygame.draw.rect(surf, LIGHT_GREY, (pos + self._bg_delta, size)) else: roundrect(surf, (pos + self._bg_delta, size), L...
Render the button on a surface.
def fit_model(y, x, yMaxLag, xMaxLag, includesOriginalX=True, noIntercept=False, sc=None): assert sc != None, "Missing SparkContext" jvm = sc._jvm jmodel = jvm.com.cloudera.sparkts.models.AutoregressionX.fitModel(_nparray2breezevector(sc, y.toArray()), _nparray2breezematrix(sc, x.toArray()), yMaxLag, xMaxLa...
Fit an autoregressive model with additional exogenous variables. The model predicts a value at time t of a dependent variable, Y, as a function of previous values of Y, and a combination of previous values of exogenous regressors X_i, and current values of exogenous regressors X_i. This is a generalization ...
def merge_task_lists(runset_results, tasks): for runset in runset_results: dic = dict([(run_result.task_id, run_result) for run_result in reversed(runset.results)]) runset.results = [] for task in tasks: run_result = dic.get(task) if run_result is None: ...
Set the filelists of all RunSetResult elements so that they contain the same files in the same order. For missing files a dummy element is inserted.
def _wrap_element(self, element): def dirty_callback(): self._set_dirty() if isinstance(element, list): element = ProxyList(element, dirty_callback=dirty_callback) elif isinstance(element, dict): element = ProxyDict(element, dirty_callback=dirty_callback) ...
We want to know if an item is modified that is stored in this dict. If the element is a list or dict, we wrap it in a ProxyList or ProxyDict, and if it is modified execute a callback that updates this instance. If it is a ZenpyObject, then the callback updates the parent object.
def iter_subscriptions(self, login=None, number=-1, etag=None): if login: return self.user(login).iter_subscriptions() url = self._build_url('user', 'subscriptions') return self._iter(int(number), url, Repository, etag=etag)
Iterate over repositories subscribed to by ``login`` or the authenticated user. :param str login: (optional), name of user whose subscriptions you want to see :param int number: (optional), number of repositories to return. Default: -1 returns all repositories :p...
def get_queryset(self): "Restrict to a single kind of event, if any, and include Venue data." qs = super().get_queryset() kind = self.get_event_kind() if kind is not None: qs = qs.filter(kind=kind) qs = qs.select_related('venue') return qs
Restrict to a single kind of event, if any, and include Venue data.
def good_classmethod_decorator(decorator): def new_decorator(cls, f): g = decorator(cls, f) g.__name__ = f.__name__ g.__doc__ = f.__doc__ g.__dict__.update(f.__dict__) return g new_decorator.__name__ = decorator.__name__ new_decorator.__doc__ = decorator.__doc__ n...
This decorator makes class method decorators behave well wrt to decorated class method names, doc, etc.
def list_firmware_images(self, **kwargs): kwargs = self._verify_sort_options(kwargs) kwargs = self._verify_filters(kwargs, FirmwareImage, True) api = self._get_api(update_service.DefaultApi) return PaginatedResponse(api.firmware_image_list, lwrap_type=FirmwareImage, **kwargs)
List all firmware images. :param int limit: number of firmware images to retrieve :param str order: ordering of images when ordered by time. 'desc' or 'asc' :param str after: get firmware images after given `image_id` :param dict filters: Dictionary of filters to apply :return: ...
def docs_cli(ctx, recreate, gen_index, run_doctests): if ctx.invoked_subcommand: return from peltak.logic import docs docs.docs(recreate, gen_index, run_doctests)
Build project documentation. This command will run sphinx-refdoc first to generate the reference documentation for the code base. Then it will run sphinx to generate the final docs. You can configure the directory that stores the docs source (index.rst, conf.py, etc.) using the DOC_SRC_PATH conf variab...
def network_pf(network, snapshots=None, skip_pre=False, x_tol=1e-6, use_seed=False): return _network_prepare_and_run_pf(network, snapshots, skip_pre, linear=False, x_tol=x_tol, use_seed=use_seed)
Full non-linear power flow for generic network. Parameters ---------- snapshots : list-like|single snapshot A subset or an elements of network.snapshots on which to run the power flow, defaults to network.snapshots skip_pre: bool, default False Skip the preliminary steps of comp...
def smoothEstimate(self, nodeShape, estimatedNodeCount): weightedEstimate = (1 - self.betaInertia) * estimatedNodeCount + \ self.betaInertia * self.previousWeightedEstimate[nodeShape] self.previousWeightedEstimate[nodeShape] = weightedEstimate return self._round(weight...
Smooth out fluctuations in the estimate for this node compared to previous runs. Returns an integer.
def get_dataset_split(tmp_dir, split, use_control_set): if not use_control_set: dataset_split = { problem.DatasetSplit.TRAIN: [ f for f in tf.gfile.Glob( os.path.join(tmp_dir, "train-novels/*/*.txt")) ], problem.DatasetSplit.EVAL: [ os.path.join(tmp_...
Gives the file paths with regards to the given split. Args: tmp_dir: temp directory split: dataset split use_control_set: uses control dataset if true. Returns: list of file paths.
def shift(self, periods, axis=0, fill_value=None): new_values, fill_value = maybe_upcast(self.values, fill_value) f_ordered = new_values.flags.f_contiguous if f_ordered: new_values = new_values.T axis = new_values.ndim - axis - 1 if np.prod(new_values.shape): ...
shift the block by periods, possibly upcast
def md_to_pdf(input_name, output_name): if output_name[-4:] == '.pdf': os.system("pandoc " + input_name + " -o " + output_name) else: os.system("pandoc " + input_name + " -o " + output_name + ".pdf" )
Converts an input MarkDown file to a PDF of the given output name. Parameters ========== input_name : String Relative file location of the input file to where this function is being called. output_name : String Relative file location of the output file to where this function is being called. N...
def get_display_name(value): display_name = DisplayName() token, value = get_phrase(value) display_name.extend(token[:]) display_name.defects = token.defects[:] return display_name, value
display-name = phrase Because this is simply a name-rule, we don't return a display-name token containing a phrase, but rather a display-name token with the content of the phrase.
def from_emcee(sampler=None, *, var_names=None, arg_names=None, coords=None, dims=None): return EmceeConverter( sampler=sampler, var_names=var_names, arg_names=arg_names, coords=coords, dims=dims ).to_inference_data()
Convert emcee data into an InferenceData object. Parameters ---------- sampler : emcee.EnsembleSampler Fitted sampler from emcee. var_names : list[str] (Optional) A list of names for variables in the sampler arg_names : list[str] (Optional) A list of names for args in the sa...
def make_gym_env(env_id, num_env=2, seed=123, wrapper_kwargs=None, start_index=0): if wrapper_kwargs is None: wrapper_kwargs = {} def make_env(rank): def _thunk(): env = gym.make(env_id) env.seed(seed + rank) return env return _thunk set_global_see...
Create a wrapped, SubprocVecEnv for Gym Environments.
def __exportUsers(self, sort, limit=0): position = 1 dataUsers = self.getSortedUsers(sort) if limit: dataUsers = dataUsers[:limit] exportedUsers = [] for u in dataUsers: userExported = u.export() userExported["position"] = position ...
Export the users to a dictionary. :param sort: field to sort the users :type sort: str. :return: exported users. :rtype: dict.
def subscribe(self, topic, qos=0): result, mid = self.client.subscribe(topic=topic, qos=qos) if result == MQTT_ERR_SUCCESS: self.topics[topic] = TopicQos(topic=topic, qos=qos) logger.debug('Subscribed to topic: {0}, qos: {1}' .format(topic, qos)) ...
Subscribe to a certain topic. :param topic: a string specifying the subscription topic to subscribe to. :param qos: the desired quality of service level for the subscription. Defaults to 0. :rtype: (int, int) :result: (result, mid) A topic is a ...
def clear(self): if self.default_value is None: self.current_value = bytearray() else: self.current_value = bytearray(self.default_value)
Clear this config variable to its reset value.
def get_parents(obj, **kwargs): num_of_mro = kwargs.get("num_of_mro", 5) mro = getmro(obj.__class__) mro_string = ', '.join([extract_type(str(t)) for t in mro[:num_of_mro]]) return "Hierarchy: {}".format(mro_string)
Return the MRO of an object. Do regex on each element to remove the "<class..." bit.
def rpop(self, key, *, encoding=_NOTSET): return self.execute(b'RPOP', key, encoding=encoding)
Removes and returns the last element of the list stored at key.
def getitem_column_array(self, key): numeric_indices = list(self.columns.get_indexer_for(key)) def getitem(df, internal_indices=[]): return df.iloc[:, internal_indices] result = self.data.apply_func_to_select_indices( 0, getitem, numeric_indices, keep_remaining=False ...
Get column data for target labels. Args: key: Target labels by which to retrieve data. Returns: A new QueryCompiler.
def memory_usage(self, **kwargs): def memory_usage_builder(df, **kwargs): return df.memory_usage(**kwargs) func = self._build_mapreduce_func(memory_usage_builder, **kwargs) return self._full_axis_reduce(0, func)
Returns the memory usage of each column. Returns: A new QueryCompiler object containing the memory usage of each column.
def on_click(self, event): if event['button'] == 1 and 'button1' in self.options: subprocess.call(self.options['button1'].split()) elif event['button'] == 2 and 'button2' in self.options: subprocess.call(self.options['button2'].split()) elif event['button'] == 3 and 'butt...
A function that should be overwritten by a plugin that wishes to react to events, if it wants to perform any action other than running the supplied command related to a button. event: A dictionary passed from i3bar (after being decoded from JSON) that has the folowing format: e...