code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def make_library(self, diffuse_yaml, catalog_yaml, binning_yaml): ret_dict = {} components_dict = Component.build_from_yamlfile(binning_yaml) diffuse_ret_dict = make_diffuse_comp_info_dict(GalpropMapManager=self._gmm, DiffuseModelManager=sel...
Build up the library of all the components Parameters ---------- diffuse_yaml : str Name of the yaml file with the library of diffuse component definitions catalog_yaml : str Name of the yaml file width the library of catalog split definitions binning_ya...
def rowsAfterValue(self, value, count): if value is None: query = self.inequalityQuery(None, count, True) else: pyvalue = self._toComparableValue(value) currentSortAttribute = self.currentSortColumn.sortAttribute() query = self.inequalityQuery(currentSortA...
Retrieve some rows at or after a given sort-column value. @param value: Starting value in the index for the current sort column at which to start returning results. Rows with a column value for the current sort column which is greater than or equal to this value will be returned. ...
def suspend(name, call=None): if call != 'action': raise SaltCloudSystemExit( 'The suspend action must be called with ' '-a or --action.' ) vm_properties = [ "name", "summary.runtime.powerState" ] vm_list = salt.utils.vmware.get_mors_with_propertie...
To suspend a VM using its name CLI Example: .. code-block:: bash salt-cloud -a suspend vmname
def process_header(self, data): metadata = { "datacolumns": data.read_chunk("I"), "firstyear": data.read_chunk("I"), "lastyear": data.read_chunk("I"), "annualsteps": data.read_chunk("I"), } if metadata["annualsteps"] != 1: raise Invalid...
Reads the first part of the file to get some essential metadata # Returns return (dict): the metadata in the header
def binary_to_int(binary_list, lower_bound=0, upper_bound=None): if binary_list == []: return lower_bound else: integer = int(''.join([str(bit) for bit in binary_list]), 2) if (upper_bound is not None) and integer + lower_bound > upper_bound: return upper_bound - (integer % (upper_bo...
Return the base 10 integer corresponding to a binary list. The maximum value is determined by the number of bits in binary_list, and upper_bound. The greater allowed by the two. Args: binary_list: list<int>; List of 0s and 1s. lower_bound: Minimum value for output, inclusive. A b...
def quote(code): try: code = code.rstrip() except AttributeError: return code if code and code[0] + code[-1] not in ('""', "''", "u'", '"') \ and '"' not in code: return 'u"' + code + '"' else: return code
Returns quoted code if not already quoted and if possible Parameters ---------- code: String \tCode thta is quoted
def connect(self, peer_address): self._sock.connect(peer_address) peer_address = self._sock.getpeername() BIO_dgram_set_connected(self._wbio.value, peer_address) assert self._wbio is self._rbio if self._do_handshake_on_connect: self.do_handshake()
Client-side UDP connection establishment This method connects this object's underlying socket. It subsequently performs a handshake if do_handshake_on_connect was set during initialization. Arguments: peer_address - address tuple of server peer
def coerce_to_synchronous(func): if inspect.iscoroutinefunction(func): @functools.wraps(func) def sync_wrapper(*args, **kwargs): loop = asyncio.get_event_loop() try: loop.run_until_complete(func(*args, **kwargs)) finally: loop.close...
Given a function that might be async, wrap it in an explicit loop so it can be run in a synchronous context.
def git_exec(self, command, **kwargs): from .cli import verbose_echo command.insert(0, self.git) if kwargs.pop('no_verbose', False): verbose = False else: verbose = self.verbose verbose_echo(' '.join(command), verbose, self.fake) if not self.fake: ...
Execute git commands
def vm_config(name, main, provider, profile, overrides): vm = main.copy() vm = salt.utils.dictupdate.update(vm, provider) vm = salt.utils.dictupdate.update(vm, profile) vm.update(overrides) vm['name'] = name return vm
Create vm config. :param str name: The name of the vm :param dict main: The main cloud config :param dict provider: The provider config :param dict profile: The profile config :param dict overrides: The vm's config overrides
def side_by_side(left, right): r left_lines = list(left.split('\n')) right_lines = list(right.split('\n')) diff = abs(len(left_lines) - len(right_lines)) if len(left_lines) > len(right_lines): fill = ' ' * len(right_lines[0]) right_lines += [fill] * diff elif len(right_lines) > l...
r"""Put two boxes next to each other. Assumes that all lines in the boxes are the same width. Example: >>> left = 'A \nC ' >>> right = 'B\nD' >>> print(side_by_side(left, right)) A B C D <BLANKLINE>
def setText(self, text): if self.text() == text: return self.touch() maxSize = len(self.text()) + 1 self.device.press('KEYCODE_DEL', adbclient.DOWN_AND_UP, repeat=maxSize) self.device.press('KEYCODE_FORWARD_DEL', adbclient.DOWN_AND_UP, repeat=maxSize) self.typ...
This function makes sure that any previously entered text is deleted before setting the value of the field.
def _is_path(instance, attribute, s, exists=True): "Validator for path-yness" if not s: return if exists: if os.path.exists(s): return else: raise OSError("path does not exist") else: raise TypeError("Not a path?")
Validator for path-yness
def unwrap(self, value, session=None): self.validate_unwrap(value) ret = {} for value_dict in value: k = value_dict['k'] v = value_dict['v'] ret[self.key_type.unwrap(k, session=session)] = self.value_type.unwrap(v, session=session) return ret
Expects a list of dictionaries with ``k`` and ``v`` set to the keys and values that will be unwrapped into the output python dictionary should have. Validates the input and then constructs the dictionary from the list.
def convert(source, to, format=None, extra_args=(), encoding='utf-8'): return _convert( _read_file, _process_file, source, to, format, extra_args, encoding=encoding)
Convert given `source` from `format` `to` another. `source` may be either a file path or a string to be converted. It's possible to pass `extra_args` if needed. In case `format` is not provided, it will try to invert the format based on given `source`. Raises OSError if pandoc is not found! Make sure...
def add_new_entry(self, entry): if not self._initialized: raise pycdlibexception.PyCdlibInternalError('El Torito Section Header not yet initialized') self.num_section_entries += 1 self.section_entries.append(entry)
A method to add a completely new entry to the list of entries of this header. Parameters: entry - The new EltoritoEntry object to add to the list of entries. Returns: Nothing.
def error(self, msg, n): raise SyntaxError(msg, n.lineno, n.col_offset, filename=self.compile_info.filename)
Raise a SyntaxError with the lineno and col_offset set to n's.
def fetch(self, recursive=1, exclude_children=False, exclude_back_refs=False): if not self.path.is_resource and not self.path.is_uuid: self.check() params = {} if exclude_children: params['exclude_children'] = True if exclude_back_refs: params['exclude...
Fetch resource from the API server :param recursive: level of recursion for fetching resources :type recursive: int :param exclude_children: don't get children references :type exclude_children: bool :param exclude_back_refs: don't get back_refs references :type exclude_...
def get(self, request): if not self.hosts: return self._get(request.path, request.method, "") try: return self._get( request.path, request.method, request.headers.get("Host", "") ) except NotFound: return self._get(request.path, req...
Get a request handler based on the URL of the request, or raises an error :param request: Request object :return: handler, arguments, keyword arguments
def service_start(name): r = salt.utils.http.query(DETAILS['url']+'service/start/'+name, decode_type='json', decode=True) return r['dict']
Start a "service" on the REST server
def put_json(self, url, data, cls=None, **kwargs): kwargs['data'] = to_json(data, cls=cls) kwargs['headers'] = self.default_headers return self.put(url, **kwargs).json()
PUT data to the api-server :param url: resource location (eg: "/type/uuid") :type url: str :param cls: JSONEncoder class :type cls: JSONEncoder
def parse_request(self): ret = BaseHTTPRequestHandler.parse_request(self) if ret: mname = self.path.lstrip('/').split('/')[0] mname = self.command + ('_' + mname if mname else '') if hasattr(self, 'do_' + mname): self.command = mname return ret
Override parse_request method to enrich basic functionality of `BaseHTTPRequestHandler` class Original class can only invoke do_GET, do_POST, do_PUT, etc method implementations if they are defined. But we would like to have at least some simple routing mechanism, i.e.: GET /uri1/part2 request s...
def GetFlagSuggestions(attempt, longopt_list): if len(attempt) <= 2 or not longopt_list: return [] option_names = [v.split('=')[0] for v in longopt_list] distances = [(_DamerauLevenshtein(attempt, option[0:len(attempt)]), option) for option in option_names] distances.sort(key=lambda t: t[0]) ...
Get helpful similar matches for an invalid flag.
def get_object(self, base_: Type, qualifier: str = None) -> Any: egg_ = self._find_egg(base_, qualifier) if egg_ is None: raise UnknownDependency('Unknown dependency %s' % base_) scope_id = getattr(egg_.egg, '__haps_custom_scope', INSTANCE_SCOPE) try: _scope = sel...
Get instance directly from the container. If the qualifier is not None, proper method to create/retrieve instance is used. :param base_: `base` of this object :param qualifier: optional qualifier :return: object instance
def lookup(source, keys, fallback = None): try: for key in keys: source = source[key] return source except (KeyError, AttributeError, TypeError): return fallback
Traverses the source, looking up each key. Returns None if can't find anything instead of raising an exception.
def dump_hex(ofd, start, len_, prefix=0): prefix_whitespaces = ' ' * prefix limit = 16 - (prefix * 2) start_ = start[:len_] for line in (start_[i:i + limit] for i in range(0, len(start_), limit)): hex_lines, ascii_lines = list(), list() for c in line: hex_lines.append('{0:02...
Convert `start` to hex and logs it, 16 bytes per log statement. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/msg.c#L760 Positional arguments: ofd -- function to call with arguments similar to `logging.debug`. start -- bytearray() or bytearray_ptr() instance. len_ -- size of `start` (integ...
def launch_app(app_path, params=[], time_before_kill_app=15): import subprocess try: res = subprocess.call([app_path, params], timeout=time_before_kill_app, shell=True) print('res = ', res) if res == 0: return True else: return False except Exception a...
start an app
def getTreeWalker(treeType, implementation=None, **kwargs): treeType = treeType.lower() if treeType not in treeWalkerCache: if treeType == "dom": from . import dom treeWalkerCache[treeType] = dom.TreeWalker elif treeType == "genshi": from . import genshi ...
Get a TreeWalker class for various types of tree with built-in support :arg str treeType: the name of the tree type required (case-insensitive). Supported values are: * "dom": The xml.dom.minidom DOM implementation * "etree": A generic walker for tree implementations exposing an ...
def event(self, event_name): self._event_dict.setdefault(event_name, 0) self._event_dict[event_name] += 1 self._log_progress_if_interval_elapsed()
Register an event that occurred during processing of a task of the given type. Args: event_name: str A name for a type of events. Events of the same type are displayed as a single entry and a total count of occurences.
def instantiate_child(self, nurest_object, from_template, response_choice=None, async=False, callback=None, commit=True): if not from_template.id: raise InternalConsitencyError("Cannot instantiate a child from a template with no ID: %s." % from_template) nurest_object.template_id = from_temp...
Instantiate an nurest_object from a template object Args: nurest_object: the NURESTObject object to add from_template: the NURESTObject template object callback: callback containing the object and the connection Returns: Returns t...
def _get_mouse_cursor(self): if self.mouse_cursor is not None: return self.mouse_cursor elif self.interactive and self.draggable: return gdk.CursorType.FLEUR elif self.interactive: return gdk.CursorType.HAND2
Determine mouse cursor. By default look for self.mouse_cursor is defined and take that. Otherwise use gdk.CursorType.FLEUR for draggable sprites and gdk.CursorType.HAND2 for interactive sprites. Defaults to scenes cursor.
def tar_archive(context): logger.debug("start") mode = get_file_mode_for_writing(context) for item in context['tar']['archive']: destination = context.get_formatted_string(item['out']) source = context.get_formatted_string(item['in']) with tarfile.open(destination, mode) as archive_m...
Archive specified path to a tar archive. Args: context: dictionary-like. context is mandatory. context['tar']['archive'] must exist. It's a dictionary. keys are the paths to archive. values are the destination output paths. Example: tar: archive:...
def XML(content, source=None): try: tree = ET.XML(content) except ET.ParseError as err: x_parse_error(err, content, source) return tree
Parses the XML text using the ET.XML function, but handling the ParseError in a user-friendly way.
def remove_xml_element(name, tree): remove = tree.findall( ".//{{http://soap.sforce.com/2006/04/metadata}}{}".format(name) ) if not remove: return tree parent_map = {c: p for p in tree.iter() for c in p} for elem in remove: parent = parent_map[elem] parent.remove(elem...
Removes XML elements from an ElementTree content tree
def list_devices(self, **kwargs): kwargs = self._verify_sort_options(kwargs) kwargs = self._verify_filters(kwargs, Device, True) api = self._get_api(device_directory.DefaultApi) return PaginatedResponse(api.device_list, lwrap_type=Device, **kwargs)
List devices in the device catalog. Example usage, listing all registered devices in the catalog: .. code-block:: python filters = { 'state': {'$eq': 'registered' } } devices = api.list_devices(order='asc', filters=filters) for idx, d in enumerate(devices): ...
def append(self, item): if isinstance(item, Monomer): self._monomers.append(item) else: raise TypeError( 'Only Monomer objects can be appended to an Polymer.') return
Appends a `Monomer to the `Polymer`. Notes ----- Does not update labelling.
def convertDay(self, day, prefix="", weekday=False): def sameDay(d1, d2): d = d1.day == d2.day m = d1.month == d2.month y = d1.year == d2.year return d and m and y tom = self.now + datetime.timedelta(days=1) if sameDay(day, self.now): r...
Convert a datetime object representing a day into a human-ready string that can be read, spoken aloud, etc. Args: day (datetime.date): A datetime object to be converted into text. prefix (str): An optional argument that prefixes the converted string. For example,...
def delete(name, wait=False, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: conn.delete_cache_cluster(name) if not wait: log.info('Deleted cache cluster %s.', name) return True while Tr...
Delete a cache cluster. CLI example:: salt myminion boto_elasticache.delete myelasticache
def peak_interval(self, name, alpha=_alpha, npoints=_npoints, **kwargs): data = self.get(name, **kwargs) return peak_interval(data,alpha,npoints)
Calculate peak interval for parameter.
def global_symbols_in_children(self): result = set() for child in self.children: result |= ( child.global_symbols | child.global_symbols_in_children) return result
This is based on all children referenced symbols that have not been declared. The intended use case is to ban the symbols from being used as remapped symbol values.
def K_to_F(self, K, method='doubling'): A1 = self.A + dot(self.C, K) B1 = self.B Q1 = self.Q R1 = self.R - self.beta * self.theta * dot(K.T, K) lq = LQ(Q1, R1, A1, B1, beta=self.beta) P, F, d = lq.stationary_values(method=method) return F, P
Compute agent 1's best value-maximizing response F, given K. Parameters ---------- K : array_like(float, ndim=2) A j x n array method : str, optional(default='doubling') Solution method used in solving the associated Riccati equation, str in {'doublin...
def _pagination(self): oldest = self.oldest newest = self.newest base = {key: val for key, val in self.spec.items() if key not in OFFSET_PRIORITY} oldest_neighbor = View({ **base, 'before': oldest, 'order': 'newest' }).first if ...
Compute the neighboring pages from this view. Returns a tuple of older page, newer page.
def string(_object): if is_callable(_object): _validator = _object @wraps(_validator) def decorated(value): ensure(isinstance(value, basestring), "not of type string") return _validator(value) return decorated ensure(isinstance(_object, basestring), "not o...
Validates a given input is of type string. Example usage:: data = {'a' : 21} schema = (string, 21) You can also use this as a decorator, as a way to check for the input before it even hits a validator you may be writing. .. note:: If the argument is a callable, the decorating...
def decode_path(file_path): if file_path is None: return if isinstance(file_path, six.binary_type): file_path = file_path.decode(sys.getfilesystemencoding()) return file_path
Turn a path name into unicode.
def expand_indent(line): r if '\t' not in line: return len(line) - len(line.lstrip()) result = 0 for char in line: if char == '\t': result = result // 8 * 8 + 8 elif char == ' ': result += 1 else: break return result
r"""Return the amount of indentation. Tabs are expanded to the next multiple of 8. >>> expand_indent(' ') 4 >>> expand_indent('\t') 8 >>> expand_indent(' \t') 8 >>> expand_indent(' \t') 16
def get_jump_target_maps(code, opc): offset2prev = {} prev_offset = -1 for offset, op, arg in unpack_opargs_bytecode(code, opc): if prev_offset >= 0: prev_list = offset2prev.get(offset, []) prev_list.append(prev_offset) offset2prev[offset] = prev_list if o...
Returns a dictionary where the key is an offset and the values are a list of instruction offsets which can get run before that instruction. This includes jump instructions as well as non-jump instructions. Therefore, the keys of the dictionary are reachable instructions. The values of the dictionary may...
def match(self, subject: Union[Expression, FlatTerm]) -> Iterator[Tuple[T, Substitution]]: for index in self._match(subject): pattern, label = self._patterns[index] subst = Substitution() if subst.extract_substitution(subject, pattern.expression): for constrai...
Match the given subject against all patterns in the net. Args: subject: The subject that is matched. Must be constant. Yields: A tuple :code:`(final label, substitution)`, where the first component is the final label associated with the pattern as gi...
def loads(cls, data): rep = cbor.loads(data) if not isinstance(rep, Sequence): raise SerializationError('expected a CBOR list') if len(rep) != 2: raise SerializationError('expected a CBOR list of 2 items') metadata = rep[0] if 'v' not in metadata: ...
Create a feature collection from a CBOR byte string.
def _process_scrape_info(self, scraper: BaseScraper, scrape_result: ScrapeResult, item_session: ItemSession): if not scrape_result: return 0, 0 num_inline = 0 num_linked = 0 for link_context in scrape_result.link_conte...
Collect the URLs from the scrape info dict.
def status_unpin(self, id): id = self.__unpack_id(id) url = '/api/v1/statuses/{0}/unpin'.format(str(id)) return self.__api_request('POST', url)
Unpin a pinned status for the logged-in user. Returns a `toot dict`_ with the status that used to be pinned.
def schema(self): if not hasattr(self, "_schema"): ret = None o = self._type if isinstance(o, type): ret = getattr(o, "schema", None) elif isinstance(o, Schema): ret = o else: module, klass = utils.get_ob...
return the schema instance if this is reference to another table
def _make_ctx_options(ctx_options, config_cls=ContextOptions): if not ctx_options: return None for key in list(ctx_options): translation = _OPTION_TRANSLATIONS.get(key) if translation: if translation in ctx_options: raise ValueError('Cannot specify %s and %s at the same time' % ...
Helper to construct a ContextOptions object from keyword arguments. Args: ctx_options: A dict of keyword arguments. config_cls: Optional Configuration class to use, default ContextOptions. Note that either 'options' or 'config' can be used to pass another Configuration object, but not both. If another ...
def get_sync_start_position(self, document, lineno): " Scan backwards, and find a possible position to start. " pattern = self._compiled_pattern lines = document.lines for i in range(lineno, max(-1, lineno - self.MAX_BACKWARDS), -1): match = pattern.match(lines[i]) ...
Scan backwards, and find a possible position to start.
def if_then(self, classical_reg, if_program, else_program=None): else_program = else_program if else_program is not None else Program() label_then = LabelPlaceholder("THEN") label_end = LabelPlaceholder("END") self.inst(JumpWhen(target=label_then, condition=unpack_classical_reg(classical...
If the classical register at index classical reg is 1, run if_program, else run else_program. Equivalent to the following construction: .. code:: IF [c]: instrA... ELSE: instrB... => JUMP-WHEN @THEN [c] ...
def _islots(self): if "__slots__" not in self.locals: return None for slots in self.igetattr("__slots__"): for meth in ITER_METHODS: try: slots.getattr(meth) break except exceptions.AttributeInferenceError: ...
Return an iterator with the inferred slots.
def collect(self, force=False): if force or not self.changes: self.changes = tuple(self.collect_impl()) return self.changes
calls collect_impl and stores the results as the child changes of this super-change. Returns a tuple of the data generated from collect_impl. Caches the result rather than re-computing each time, unless force is True
def insertDatastore(self, index, store): if not isinstance(store, Datastore): raise TypeError("stores must be of type %s" % Datastore) self._stores.insert(index, store)
Inserts datastore `store` into this collection at `index`.
async def wait_for_group(self, container, networkid, timeout = 120): if networkid in self._current_groups: return self._current_groups[networkid] else: if not self._connection.connected: raise ConnectionResetException groupchanged = VXLANGroupChanged.c...
Wait for a VXLAN group to be created
def result(self, timeout=None): if self.done: return self._result result = self.conn.process_packets(transaction_id=self.transaction_id, timeout=timeout) self._result = result self.done = True return result
Retrieves the result of the call. :param timeout: The time to wait for a result from the server. Raises :exc:`RTMPTimeoutError` on timeout.
def get_club_members(self, club_id, limit=None): result_fetcher = functools.partial(self.protocol.get, '/clubs/{id}/members', id=club_id) return BatchedResultsIterator(entity=model.Athlete, bind_client=self, ...
Gets the member objects for specified club ID. http://strava.github.io/api/v3/clubs/#get-members :param club_id: The numeric ID for the club. :type club_id: int :param limit: Maximum number of athletes to return. (default unlimited) :type limit: int :return: An iterat...
def _get_validate(data): if data.get("vrn_file") and tz.get_in(["config", "algorithm", "validate"], data): return utils.deepish_copy(data) elif "group_orig" in data: for sub in multi.get_orig_items(data): if "validate" in sub["config"]["algorithm"]: sub_val = utils.de...
Retrieve items to validate, from single samples or from combined joint calls.
def is_downloaded(self, file_path): if os.path.exists(file_path): self.chatbot.logger.info('File is already downloaded') return True return False
Check if the data file is already downloaded.
def isrot(m, ntol, dtol): m = stypes.toDoubleMatrix(m) ntol = ctypes.c_double(ntol) dtol = ctypes.c_double(dtol) return bool(libspice.isrot_c(m, ntol, dtol))
Indicate whether a 3x3 matrix is a rotation matrix. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/isrot_c.html :param m: A matrix to be tested. :type m: 3x3-Element Array of floats :param ntol: Tolerance for the norms of the columns of m. :type ntol: float :param dtol: ...
def _get_voltage_magnitude_var(self, buses, generators): Vm = array([b.v_magnitude for b in buses]) for g in generators: Vm[g.bus._i] = g.v_magnitude Vmin = array([b.v_min for b in buses]) Vmax = array([b.v_max for b in buses]) return Variable("Vm", len(buses), Vm, Vm...
Returns the voltage magnitude variable set.
def broadcast_channel(message, channel): try: socket = CLIENTS[CHANNELS.get(channel, [])[0]][1] except (IndexError, KeyError): raise NoSocket("There are no clients on the channel: " + channel) socket.send_and_broadcast_channel(message, channel)
Find the first socket for the given channel, and use it to broadcast to the channel, including the socket itself.
def make_prediction_pipeline(pipeline, args): predicted_values, errors = ( pipeline | 'Read CSV Files' >> beam.io.ReadFromText(str(args.predict_data), strip_trailing_newlines=True) | 'Batch Input' >> beam.ParDo(EmitAsBatchDoFn(args.batch_size)) | 'Run TF ...
Builds the prediction pipeline. Reads the csv files, prepends a ',' if the target column is missing, run prediction, and then prints the formated results to a file. Args: pipeline: the pipeline args: command line args
def didLastExecutedUpgradeSucceeded(self) -> bool: lastEventInfo = self.lastActionEventInfo if lastEventInfo: ev_data = lastEventInfo.data currentPkgVersion = NodeControlUtil.curr_pkg_info(ev_data.pkg_name)[0] if currentPkgVersion: return currentPkgVer...
Checks last record in upgrade log to find out whether it is about scheduling upgrade. If so - checks whether current version is equals to the one in that record :returns: upgrade execution result
def _pull_player_data(self): player_info = self._retrieve_html_page() if not player_info: return self._parse_player_information(player_info) all_stats = self._combine_all_stats(player_info) setattr(self, '_season', list(all_stats.keys())) return all_stats
Pull and aggregate all player information. Pull the player's HTML stats page and parse unique properties, such as the player's height, weight, and name. Next, combine all stats for all seasons plus the player's career stats into a single object which can easily be iterated upon. ...
def get_config(config_schema, env=None): if env is None: env = os.environ return parser.parse_env( config_schema, env, )
Parse config from the environment against a given schema Args: config_schema: A dictionary mapping keys in the environment to envpy Schema objects describing the expected value. env: An optional dictionary used to override the environment rather than ...
def parse_options(given, available): for key, value in sorted(given.items()): if not value: continue if key in available: yield "--{0}={1}".format(key, value)
Given a set of options, check if available
def export_to_hdf5(network, path, export_standard_types=False, **kwargs): kwargs.setdefault('complevel', 4) basename = os.path.basename(path) with ExporterHDF5(path, **kwargs) as exporter: _export_to_exporter(network, exporter, basename=basename, export_standard_types=exp...
Export network and components to an HDF store. Both static and series attributes of components are exported, but only if they have non-default values. If path does not already exist, it is created. Parameters ---------- path : string Name of hdf5 file to which to export (if it exists,...
def protected_view(view, info): if info.options.get('protected'): def wrapper_view(context, request): response = _advice(request) if response is not None: return response else: return view(context, request) return wrapper_view r...
allows adding `protected=True` to a view_config`
def deps_tree(self): dependencies = self.dependencies + [self.name] if self.repo == "sbo": for dep in dependencies: deps = Requires(flag="").sbo(dep) if dep not in self.deps_dict.values(): self.deps_dict[dep] = Utils().dimensional_list(deps...
Package dependencies image map file
def wait_for_parent_image_build(self, nvr): self.log.info('Waiting for Koji build for parent image %s', nvr) poll_start = time.time() while time.time() - poll_start < self.poll_timeout: build = self.koji_session.getBuild(nvr) if build: self.log.info('Paren...
Given image NVR, wait for the build that produced it to show up in koji. If it doesn't within the timeout, raise an error. :return build info dict with 'nvr' and 'id' keys
def save(self, fname): out = etree.tostring(self.root, xml_declaration=True, standalone=True, pretty_print=True) with open(fname, 'wb') as fid: fid.write(out)
Save figure to a file
def polymer_to_reference_axis_distances(p, reference_axis, tag=True, reference_axis_name='ref_axis'): if not len(p) == len(reference_axis): raise ValueError( "The reference axis must contain the same number of points " "as the Polymer primitive.") prim_cas = p.primitive.coordinat...
Returns distances between the primitive of a Polymer and a reference_axis. Notes ----- Distances are calculated between each point of the Polymer primitive and the corresponding point in reference_axis. In the special case of the helical barrel, if the Polymer is a helix and the reference_axis ...
def put_info(self, key, value): return self.instance.put_task_info(self.name, key, value)
Put associated information of the task.
def locations_to_cache(locations, latest=False): cum_cache = lal.Cache() for source in locations: flist = glob.glob(source) if latest: def relaxed_getctime(fn): try: return os.path.getctime(fn) except OSError: re...
Return a cumulative cache file build from the list of locations Parameters ---------- locations : list A list of strings containing files, globs, or cache files used to build a combined lal cache file object. latest : Optional, {False, Boolean} Only return a cache with the most rece...
def component(self, extra_params=None): if self.get('component_id', None): components = self.space.components(id=self['component_id'], extra_params=extra_params) if components: return components[0]
The Component currently assigned to the Ticket
def tag_stuff(self): for item in self.input_stream: if 'tags' not in item: item['tags'] = set() for tag_method in self.tag_methods: item['tags'].add(tag_method(item)) if None in item['tags']: item['tags'].remove(None) ...
Look through my input stream for the fields to be tagged
def _ensure_value_is_valid(self, value): if not isinstance(value, self.__class__.value_type): raise TypeError('{0} is not valid collection value, instance ' 'of {1} required'.format( value, self.__class__.value_type)) return value
Ensure that value is a valid collection's value.
def _get_request_args(method, **kwargs): args = [ ('api_key', api_key), ('format', 'json'), ('method', method), ('nojsoncallback', '1'), ] if kwargs: for key, value in kwargs.iteritems(): args.append((key, value)) args.sort(key=lambda tup: tup[0]) ...
Use `method` and other settings to produce a flickr API arguments. Here also use json as the return type. :param method: The method provided by flickr, ex: flickr.photosets.getPhotos :type method: str :param kwargs: Other settings :type kwargs: dict :return: An argument list used fo...
def add_xml_declaration(fn): @wraps(fn) def add_xml_declaration_decorator(*args, **kwargs): return '<?xml version="1.0" encoding="UTF-8"?>\n\n' + fn( *args, **kwargs ) return add_xml_declaration_decorator
Decorator to add header with XML version declaration to output from FN.
def _parse_config(self): config = self.get_block('mlag configuration') cfg = dict() cfg.update(self._parse_domain_id(config)) cfg.update(self._parse_local_interface(config)) cfg.update(self._parse_peer_address(config)) cfg.update(self._parse_peer_link(config)) cfg...
Parses the mlag global configuration Returns: dict: A dict object that is intended to be merged into the resource dict
def from_pydatetime(cls, pydatetime): return cls(date=Date.from_pydate(pydatetime.date), time=Time.from_pytime(pydatetime.time))
Creates sql datetime2 object from Python datetime object ignoring timezone @param pydatetime: Python datetime object @return: sql datetime2 object
def init_logs(args, tool="NanoPlot"): start_time = dt.fromtimestamp(time()).strftime('%Y%m%d_%H%M') logname = os.path.join(args.outdir, args.prefix + tool + "_" + start_time + ".log") handlers = [logging.FileHandler(logname)] if args.verbose: handlers.append(logging.StreamHandler()) logging....
Initiate log file and log arguments.
def process_geneways_files(input_folder=data_folder, get_evidence=True): gp = GenewaysProcessor(input_folder, get_evidence) return gp
Reads in Geneways data and returns a list of statements. Parameters ---------- input_folder : Optional[str] A folder in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this ...
def subset_sum(x, R): k = len(x) // 2 Y = [v for v in part_sum(x[:k])] Z = [R - v for v in part_sum(x[k:])] Y.sort() Z.sort() i = 0 j = 0 while i < len(Y) and j < len(Z): if Y[i] == Z[j]: return True elif Y[i] < Z[j]: i += 1 else: ...
Subsetsum by splitting :param x: table of values :param R: target value :returns bool: if there is a subsequence of x with total sum R :complexity: :math:`O(n^{\\lceil n/2 \\rceil})`
def save(self, file_path): try: file_path = os.path.abspath(file_path) with open(file_path, 'wb') as df: pickle.dump((self.__data, self.__classes, self.__labels, self.__dtype, self.__description, self.__num_features, ...
Method to save the dataset to disk. Parameters ---------- file_path : str File path to save the current dataset to Raises ------ IOError If saving to disk is not successful.
def to_disk(self, path, exclude=tuple(), disable=None): if disable is not None: deprecation_warning(Warnings.W014) exclude = disable path = util.ensure_path(path) serializers = OrderedDict() serializers["tokenizer"] = lambda p: self.tokenizer.to_disk(p, exclude=["...
Save the current state to a directory. If a model is loaded, this will include the model. path (unicode or Path): Path to a directory, which will be created if it doesn't exist. exclude (list): Names of components or serialization fields to exclude. DOCS: https://spacy.io/...
def start(self): super().start() try: initial = self._get_initial_context() self._stack = ContextCurrifier(self.wrapped, *initial.args, **initial.kwargs) if isconfigurabletype(self.wrapped): try: self.wrapped = self.wrapped(_final=T...
Starts this context, a.k.a the phase where you setup everything which will be necessary during the whole lifetime of a transformation. The "ContextCurrifier" is in charge of setting up a decorating stack, that includes both services and context processors, and will call the actual node callable...
def _format_command_usage(commands): if not commands: return "" command_usage = "\nCommands:\n" cmd_len = max([len(c) for c in commands] + [8]) command_doc = OrderedDict( [(cmd_name, _get_first_line_of_docstring(cmd_doc)) for cmd_name, cmd_doc in commands.items()]) for cmd_n...
Construct the Commands-part of the usage text. Parameters ---------- commands : dict[str, func] dictionary of supported commands. Each entry should be a tuple of (name, function). Returns ------- str Text formatted as a description of the commands.
def get_job_statuses(github_token, api_url, build_id, polling_interval, job_number): auth = get_json('{api_url}/auth/github'.format(api_url=api_url), data={'github_token': github_token})['access_token'] while True: build = get_json('{api_url}/builds/{build_id}'.f...
Wait for all the travis jobs to complete. Once the other jobs are complete, return a list of booleans, indicating whether or not the job was successful. Ignore jobs marked "allow_failure".
def offset(self, index=0): eta = self._geometry[self.camera][index]["ra"] xi = self._geometry[self.camera][index]["dec"] ra = self.origin.ra - (eta/math.cos(self.dec.radian))*units.degree dec = self.origin.dec - xi * units.degree + 45 * units.arcsec self._coordinate = SkyCoord(ra...
Offset the camera pointing to be centred on a particular CCD.
def _init_taxids(taxid, taxids): ret = set() if taxids is not None: if taxids is True: return True if isinstance(taxids, int): ret.add(taxids) else: ret.update(taxids) if taxid is not None: ret.add(ta...
Return taxid set
def tostring(self): parser = etree.XMLParser(remove_blank_text=True) outputtree = etree.XML(etree.tostring(self.__doc), parser) return etree.tostring(outputtree, pretty_print=True)
return a pretty-printed string output for rpc reply
def check_error_response(self, body, status): status_code = int(status.split(' ', 1)[0]) if status_code >= 300: raise errors.BackendError(body, status)
Raise an exception if the response from the backend was an error. Args: body: A string containing the backend response body. status: A string containing the backend response status. Raises: BackendError if the response is an error.
def target(self, project_module): assert isinstance(project_module, basestring) if project_module not in self.module2target: self.module2target[project_module] = \ b2.build.targets.ProjectTarget(project_module, project_module, self.attribute(proj...
Returns the project target corresponding to the 'project-module'.
def _parse_banners(self): motd_value = login_value = None matches = re.findall('^banner\s+(login|motd)\s?$\n(.*?)$\nEOF$\n', self.config, re.DOTALL | re.M) for match in matches: if match[0].strip() == "motd": motd_value = match[1] ...
Parses the global config and returns the value for both motd and login banners. Returns: dict: The configure value for modtd and login banners. If the banner is not set it will return a value of None for that key. The returned dict object is intendd to...
def as_symbols(self): out = set() for name in self.types: out.add(('type', name)) for name in self.enums: out.add(('enum', name)) for name in self.commands: out.add(('command', name)) return out
Set of symbols required by this Require :return: set of ``(symbol type, symbol name)`` tuples