code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def policy_and_value_net(rng_key, batch_observations_shape, num_actions, bottom_layers=None): cur_layers = [] if bottom_layers is not None: cur_layers.extend(bottom_layers) cur_layers.extend([layers.Branch(), layers.Parallel( layers.Serial(layers.Dense(num_actions), layers.LogSoftmax()), layers.Dense(1) )]) net = layers.Serial(*cur_layers) return net.initialize(batch_observations_shape, rng_key), net
A policy and value net function.
def zlist(self, name_start, name_end, limit=10): limit = get_positive_integer('limit', limit) return self.execute_command('zlist', name_start, name_end, limit)
Return a list of the top ``limit`` zset's name between ``name_start`` and ``name_end`` in ascending order .. note:: The range is (``name_start``, ``name_end``]. The ``name_start`` isn't in the range, but ``name_end`` is. :param string name_start: The lower bound(not included) of zset names to be returned, empty string ``''`` means -inf :param string name_end: The upper bound(included) of zset names to be returned, empty string ``''`` means +inf :param int limit: number of elements will be returned. :return: a list of zset's name :rtype: list >>> ssdb.zlist('zset_ ', 'zset_z', 10) ['zset_1', 'zset_2'] >>> ssdb.zlist('zset_ ', '', 3) ['zset_1', 'zset_2'] >>> ssdb.zlist('', 'aaa_not_exist', 10) []
def GetLoggingLocation(): frame = inspect.currentframe() this_file = frame.f_code.co_filename frame = frame.f_back while frame: if this_file == frame.f_code.co_filename: if 'cdbg_logging_location' in frame.f_locals: ret = frame.f_locals['cdbg_logging_location'] if len(ret) != 3: return (None, None, None) return ret frame = frame.f_back return (None, None, None)
Search for and return the file and line number from the log collector. Returns: (pathname, lineno, func_name) The full path, line number, and function name for the logpoint location.
def subscribe_and_validate(self, topic, qos, payload, timeout=1): seconds = convert_time(timeout) self._verified = False logger.info('Subscribing to topic: %s' % topic) self._mqttc.subscribe(str(topic), int(qos)) self._payload = str(payload) self._mqttc.on_message = self._on_message timer_start = time.time() while time.time() < timer_start + seconds: if self._verified: break self._mqttc.loop() if not self._verified: raise AssertionError("The expected payload didn't arrive in the topic")
Subscribe to a topic and validate that the specified payload is received within timeout. It is required that a connection has been established using `Connect` keyword. The payload can be specified as a python regular expression. If the specified payload is not received within timeout, an AssertionError is thrown. `topic` topic to subscribe to `qos` quality of service for the subscription `payload` payload (message) that is expected to arrive `timeout` time to wait for the payload to arrive Examples: | Subscribe And Validate | test/test | 1 | test message |
def validate_cmap(val): from matplotlib.colors import Colormap try: return validate_str(val) except ValueError: if not isinstance(val, Colormap): raise ValueError( "Could not find a valid colormap!") return val
Validate a colormap Parameters ---------- val: str or :class:`mpl.colors.Colormap` Returns ------- str or :class:`mpl.colors.Colormap` Raises ------ ValueError
def export_module_spec_with_checkpoint(module_spec, checkpoint_path, export_path, scope_prefix=""): with tf.Graph().as_default(): m = hub.Module(module_spec) assign_map = { scope_prefix + name: value for name, value in m.variable_map.items() } tf.train.init_from_checkpoint(checkpoint_path, assign_map) init_op = tf.initializers.global_variables() with tf.Session() as session: session.run(init_op) m.export(export_path, session)
Exports given checkpoint as tfhub module with given spec.
def integer(prompt=None, empty=False): s = _prompt_input(prompt) if empty and not s: return None else: try: return int(s) except ValueError: return integer(prompt=prompt, empty=empty)
Prompt an integer. Parameters ---------- prompt : str, optional Use an alternative prompt. empty : bool, optional Allow an empty response. Returns ------- int or None An int if the user entered a valid integer. None if the user pressed only Enter and ``empty`` was True.
def add_edge_fun(graph): succ, pred, node = graph._succ, graph._pred, graph._node def add_edge(u, v, **attr): if v not in succ: succ[v], pred[v], node[v] = {}, {}, {} succ[u][v] = pred[v][u] = attr return add_edge
Returns a function that adds an edge to the `graph` checking only the out node. :param graph: A directed graph. :type graph: networkx.classes.digraph.DiGraph :return: A function that adds an edge to the `graph`. :rtype: callable
def wait_for(func): @wraps(func) def wrapped(*args, **kwargs): timeout = kwargs.pop('timeout', TIMEOUT) start = None while True: try: return func(*args, **kwargs) except AssertionError: if not start: start = time() if time() - start < timeout: sleep(CHECK_EVERY) continue else: raise return wrapped
A decorator to invoke a function, retrying on assertion errors for a specified time interval. Adds a kwarg `timeout` to `func` which is a number of seconds to try for (default 15).
def _gcd_array(X): greatest_common_divisor = 0.0 for x in X: greatest_common_divisor = _gcd(greatest_common_divisor, x) return greatest_common_divisor
Return the largest real value h such that all elements in x are integer multiples of h.
def print_update(self): print("\r\n") now = datetime.datetime.now() print("Update info: (from: %s)" % now.strftime("%c")) current_total_size = self.total_stined_bytes + self.total_new_bytes if self.total_errored_items: print(" * WARNING: %i omitted files!" % self.total_errored_items) print(" * fast backup: %i files" % self.total_fast_backup) print( " * new content saved: %i files (%s %.1f%%)" % ( self.total_new_file_count, human_filesize(self.total_new_bytes), to_percent(self.total_new_bytes, current_total_size), ) ) print( " * stint space via hardlinks: %i files (%s %.1f%%)" % ( self.total_file_link_count, human_filesize(self.total_stined_bytes), to_percent(self.total_stined_bytes, current_total_size), ) ) duration = default_timer() - self.start_time performance = current_total_size / duration / 1024.0 / 1024.0 print(" * present performance: %.1fMB/s\n" % performance)
print some status information in between.
def filter_parts(cls, part_info): filtered = OrderedDict() for part_name, info_list in part_info.items(): if info_list is None or isinstance(info_list, Exception): continue info_list = [i for i in info_list if isinstance(i, cls)] if info_list: filtered[part_name] = info_list return filtered
Filter the part_info dict looking for instances of our class Args: part_info (dict): {part_name: [Info] or None} as returned from Controller.run_hook() Returns: dict: {part_name: [info]} where info is a subclass of cls
def inspect_hash(path): " Calculate the hash of a database, efficiently. " m = hashlib.sha256() with path.open("rb") as fp: while True: data = fp.read(HASH_BLOCK_SIZE) if not data: break m.update(data) return m.hexdigest()
Calculate the hash of a database, efficiently.
def get_parent_element(self): return {AUDIT_REF_STATE: self.context.audit_record, SIGNATURE_REF_STATE: self.context.signature}[self.ref_state]
Signatures and Audit elements share sub-elements, we need to know which to set attributes on
def chosen_view_factory(chooser_cls): class ChosenView(chooser_cls): def get(self, request, *args, **kwargs): self.object = self.get_object() return render_modal_workflow( self.request, None, '{0}/chosen.js'.format(self.template_dir), {'obj': self.get_json(self.object)} ) def get_object(self, queryset=None): if queryset is None: queryset = self.get_queryset() pk = self.kwargs.get('pk', None) try: return queryset.get(pk=pk) except self.models.DoesNotExist: raise Http404() def post(self, request, *args, **kwargs): return self.get(request, *args, **kwargs) return ChosenView
Returns a ChosenView class that extends specified chooser class. :param chooser_cls: the class to extend. :rtype: class.
def get_l(self): cell_left = CellBorders(self.cell_attributes, *self.cell.get_left_key_rect()) return cell_left.get_r()
Returns the left border of the cell
def duplicates_removed(it, already_seen=()): lst = [] seen = set() for i in it: if i in seen or i in already_seen: continue lst.append(i) seen.add(i) return lst
Returns a list with duplicates removed from the iterable `it`. Order is preserved.
def dtypes(self): return Series(np.array(list(self._gather_dtypes().values()), dtype=np.bytes_), self.keys())
Series of NumPy dtypes present in the DataFrame with index of column names. Returns ------- Series
def getDocPath(fn, root=None): cwd = pathlib.Path(os.getcwd()) if root: cwd = pathlib.Path(root) while True: dpath = cwd.joinpath('docdata') if dpath.is_dir(): break parent = cwd.parent if parent == cwd: raise ValueError(f'Unable to find data directory from {os.getcwd()}.') cwd = parent fpath = os.path.abspath(os.path.join(dpath.as_posix(), fn)) if not fpath.startswith(dpath.as_posix()): raise ValueError(f'Path escaping detected: {fn}') if not os.path.isfile(fpath): raise ValueError(f'File does not exist: {fn}') return fpath
Helper for getting a documentation data file paths. Args: fn (str): Name of the file to retrieve the full path for. root (str): Optional root path to look for a docdata in. Notes: Defaults to looking for the ``docdata`` directory in the current working directory. This behavior works fine for notebooks nested in the docs directory of synapse; but this root directory that is looked for may be overridden by providing an alternative root. Returns: str: A file path. Raises: ValueError if the file does not exist or directory traversal attempted..
async def download_file(self, Bucket, Key, Filename, ExtraArgs=None, Callback=None, Config=None): with open(Filename, 'wb') as open_file: await download_fileobj(self, Bucket, Key, open_file, ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
Download an S3 object to a file. Usage:: import boto3 s3 = boto3.resource('s3') s3.meta.client.download_file('mybucket', 'hello.txt', '/tmp/hello.txt') Similar behavior as S3Transfer's download_file() method, except that parameters are capitalized.
def load(stream, overrides=None, **kwargs): global is_initialized if not is_initialized: initialize() if isinstance(stream, basestring): string = stream else: string = '\n'.join(stream.readlines()) proxy_graph = yaml.load(string, **kwargs) from . import init init_dict = proxy_graph.get('init', {}) init(**init_dict) if overrides is not None: handle_overrides(proxy_graph, overrides) return instantiate_all(proxy_graph)
Loads a YAML configuration from a string or file-like object. Parameters ---------- stream : str or object Either a string containing valid YAML or a file-like object supporting the .read() interface. overrides : dict, optional A dictionary containing overrides to apply. The location of the override is specified in the key as a dot-delimited path to the desired parameter, e.g. "model.corruptor.corruption_level". Returns ------- graph : dict or object The dictionary or object (if the top-level element specified an Python object to instantiate). Notes ----- Other keyword arguments are passed on to `yaml.load`.
def update(self, title=None, body=None, state=None): data = {'title': title, 'body': body, 'state': state} json = None self._remove_none(data) if data: json = self._json(self._patch(self._api, data=dumps(data)), 200) if json: self._update_(json) return True return False
Update this pull request. :param str title: (optional), title of the pull :param str body: (optional), body of the pull request :param str state: (optional), ('open', 'closed') :returns: bool
def _attachToObject(self, anchorObj, relationName) : "Attaches the rabalist to a raba object. Only attached rabalists can be saved" if self.anchorObj == None : self.relationName = relationName self.anchorObj = anchorObj self._setNamespaceConAndConf(anchorObj._rabaClass._raba_namespace) self.tableName = self.connection.makeRabaListTableName(self.anchorObj._rabaClass.__name__, self.relationName) faultyElmt = self._checkSelf() if faultyElmt != None : raise ValueError("Element %s violates specified list or relation constraints" % faultyElmt) elif self.anchorObj is not anchorObj : raise ValueError("Ouch: attempt to steal rabalist, use RabaLict.copy() instead.\nthief: %s\nvictim: %s\nlist: %s" % (anchorObj, self.anchorObj, self))
Attaches the rabalist to a raba object. Only attached rabalists can be saved
def generate_template(self, channeldir, filename, header): file_path = get_metadata_file_path(channeldir, filename) if not os.path.exists(file_path): with open(file_path, 'w') as csv_file: csvwriter = csv.DictWriter(csv_file, header) csvwriter.writeheader()
Create empty template .csv file called `filename` as siblings of the directory `channeldir` with header fields specified in `header`.
def doArc8(arcs, domains, assignments): check = dict.fromkeys(domains, True) while check: variable, _ = check.popitem() if variable not in arcs or variable in assignments: continue domain = domains[variable] arcsvariable = arcs[variable] for othervariable in arcsvariable: arcconstraints = arcsvariable[othervariable] if othervariable in assignments: otherdomain = [assignments[othervariable]] else: otherdomain = domains[othervariable] if domain: for value in domain[:]: assignments[variable] = value if otherdomain: for othervalue in otherdomain: assignments[othervariable] = othervalue for constraint, variables in arcconstraints: if not constraint( variables, domains, assignments, True ): break else: break else: domain.hideValue(value) del assignments[othervariable] del assignments[variable] if not domain: return False return True
Perform the ARC-8 arc checking algorithm and prune domains @attention: Currently unused.
def help_text(self, name, text, text_kind='plain', trim_pfx=0): if trim_pfx > 0: text = toolbox.trim_prefix(text, trim_pfx) if text_kind == 'rst': try: overrides = {'input_encoding': 'ascii', 'output_encoding': 'utf-8'} text_html = publish_string(text, writer_name='html', settings_overrides=overrides) text = text_html.decode('utf-8') text_kind = 'html' except Exception as e: self.logger.error("Error converting help text to HTML: %s" % ( str(e))) else: raise ValueError( "I don't know how to display text of kind '%s'" % (text_kind)) if text_kind == 'html': self.help(text=text, text_kind='html') else: self.show_help_text(name, text)
Provide help text for the user. This method will convert the text as necessary with docutils and display it in the WBrowser plugin, if available. If the plugin is not available and the text is type 'rst' then the text will be displayed in a plain text widget. Parameters ---------- name : str Category of help to show. text : str The text to show. Should be plain, HTML or RST text text_kind : str (optional) One of 'plain', 'html', 'rst'. Default is 'plain'. trim_pfx : int (optional) Number of spaces to trim off the beginning of each line of text.
def _process_image_file(fobj, session, filename): image = _decode_image(fobj, session, filename=filename) return _encode_jpeg(image)
Process image files from the dataset.
def copy(self, to_name): return self.reddit_session.copy_multireddit(self._author, self.name, to_name)
Copy this multireddit. Convenience function that utilizes :meth:`.MultiredditMixin.copy_multireddit` populating both the `from_redditor` and `from_name` parameters.
def intermediate_cpfs(self) -> List[CPF]: _, cpfs = self.cpfs interm_cpfs = [cpf for cpf in cpfs if cpf.name in self.intermediate_fluents] interm_cpfs = sorted(interm_cpfs, key=lambda cpf: (self.intermediate_fluents[cpf.name].level, cpf.name)) return interm_cpfs
Returns list of intermediate-fluent CPFs in level order.
def create_from_tuple(cls, tube, the_tuple): if the_tuple is None: return if not the_tuple.rowcount: raise Queue.ZeroTupleException("Error creating task") row = the_tuple[0] return cls( tube, task_id=row[0], state=row[1], data=row[2] )
Create task from tuple. Returns `Task` instance.
def augmentation_transform(self, data, label): for aug in self.auglist: data, label = aug(data, label) return (data, label)
Override Transforms input data with specified augmentations.
def run_command(self, command_name, config_updates=None, named_configs=(), args=(), meta_info=None): import warnings warnings.warn("run_command is deprecated. Use run instead", DeprecationWarning) return self.run(command_name, config_updates, named_configs, meta_info, args)
Run the command with the given name. .. note:: Deprecated in Sacred 0.7 run_command() will be removed in Sacred 1.0. It is replaced by run() which can now also handle command_names.
def system_update_column_family(self, cf_def): self._seqid += 1 d = self._reqs[self._seqid] = defer.Deferred() self.send_system_update_column_family(cf_def) return d
updates properties of a column family. returns the new schema id. Parameters: - cf_def
def add_ok_action(self, action_arn=None): if not action_arn: return self.actions_enabled = 'true' self.ok_actions.append(action_arn)
Adds an ok action, represented as an SNS topic, to this alarm. What to do when the ok state is reached. :type action_arn: str :param action_arn: SNS topics to which notification should be sent if the alarm goes to state INSUFFICIENT_DATA.
def create(cls, obj): self = cls.__new__(cls) self.__proto__ = obj return self
Create a new prototype object with the argument as the source prototype. .. Note: This does not `initialize` the newly created object any more than setting its prototype. Calling the __init__ method is usually unnecessary as all initialization data should be in the original prototype object already. If required, call __init__ explicitly: >>> proto_obj = MyProtoObj(1, 2, 3) >>> obj = MyProtoObj.create(proto_obj) >>> obj.__init__(1, 2, 3)
def check_all_local(self): all_local_chk = self.event['global']['all_local'].isChecked() for buttons in self.event['local'].values(): buttons[0].setChecked(all_local_chk) buttons[1].setEnabled(buttons[0].isChecked())
Check or uncheck all local event parameters.
def checkversion(doc, metadata, enable_dev ): cdoc = None if isinstance(doc, CommentedSeq): if not isinstance(metadata, CommentedMap): raise Exception("Expected metadata to be CommentedMap") lc = metadata.lc metadata = copy.deepcopy(metadata) metadata.lc.data = copy.copy(lc.data) metadata.lc.filename = lc.filename metadata[u"$graph"] = doc cdoc = metadata elif isinstance(doc, CommentedMap): cdoc = doc else: raise Exception("Expected CommentedMap or CommentedSeq") version = metadata[u"cwlVersion"] cdoc["cwlVersion"] = version if version not in UPDATES: if version in DEVUPDATES: if enable_dev: pass else: raise validate.ValidationException( u"Version '%s' is a development or deprecated version.\n " "Update your document to a stable version (%s) or use " "--enable-dev to enable support for development and " "deprecated versions." % (version, ", ".join( list(UPDATES.keys())))) else: raise validate.ValidationException( u"Unrecognized version %s" % version) return (cdoc, version)
Checks the validity of the version of the give CWL document. Returns the document and the validated version string.
def ensure_clean_git(operation='operation'): if os.system('git diff-index --quiet HEAD --'): print("Unstaged or uncommitted changes detected. {} aborted.".format( operation.capitalize())) sys.exit()
Verify that git has no uncommitted changes
def restore(self, request, *args, **kwargs): paths = request.path_info.split('/') object_id_index = paths.index("restore") - 2 object_id = paths[object_id_index] obj = super(VersionedAdmin, self).get_object(request, object_id) obj.restore() admin_wordIndex = object_id_index - 3 path = "/%s" % ("/".join(paths[admin_wordIndex:object_id_index])) opts = self.model._meta msg_dict = { 'name': force_text(opts.verbose_name), 'obj': format_html('<a href="{}">{}</a>', urlquote(request.path), obj), } msg = format_html(_('The {name} "{obj}" was restored successfully.'), **msg_dict) self.message_user(request, msg, messages.SUCCESS) return HttpResponseRedirect(path)
View for restoring object from change view
def clean(self): for f in self.catalog_names: if 'match' in f: if os.path.exists(self.catalog_names[f]): log.info('Deleting intermediate match file: %s'% self.catalog_names[f]) os.remove(self.catalog_names[f]) else: for extn in f: if os.path.exists(extn): log.info('Deleting intermediate catalog: %d'%extn) os.remove(extn)
Remove intermediate files created.
def write_generator_data(self, file): writer = self._get_writer(file) writer.writerow(["bus"] + GENERATOR_ATTRS) for g in self.case.generators: i = g.bus._i writer.writerow([i] + [getattr(g,a) for a in GENERATOR_ATTRS])
Write generator data as CSV.
def suspend(rq, ctx, duration): "Suspends all workers." ctx.invoke( rq_cli.suspend, duration=duration, **shared_options(rq) )
Suspends all workers.
def fromstring(text, schema=None): if schema: parser = objectify.makeparser(schema = schema.schema) return objectify.fromstring(text, parser=parser) else: return objectify.fromstring(text)
Parses a KML text string This function parses a KML text string and optionally validates it against a provided schema object
def flatten(nested): flat_return = list() def __inner_flat(nested,flat): for i in nested: __inner_flat(i, flat) if isinstance(i, list) else flat.append(i) return flat __inner_flat(nested,flat_return) return flat_return
Return a flatten version of the nested argument
def _run_sync(self, method: Callable, *args, **kwargs) -> Any: if self.loop.is_running(): raise RuntimeError("Event loop is already running.") if not self.is_connected: self.loop.run_until_complete(self.connect()) task = asyncio.Task(method(*args, **kwargs), loop=self.loop) result = self.loop.run_until_complete(task) self.loop.run_until_complete(self.quit()) return result
Utility method to run commands synchronously for testing.
def deserialize(stream_or_string, **options): options.setdefault('Loader', Loader) try: return yaml.load(stream_or_string, **options) except ScannerError as error: log.exception('Error encountered while deserializing') err_type = ERROR_MAP.get(error.problem, 'Unknown yaml render error') line_num = error.problem_mark.line + 1 raise DeserializationError(err_type, line_num, error.problem_mark.buffer) except ConstructorError as error: log.exception('Error encountered while deserializing') raise DeserializationError(error) except Exception as error: log.exception('Error encountered while deserializing') raise DeserializationError(error)
Deserialize any string of stream like object into a Python data structure. :param stream_or_string: stream or string to deserialize. :param options: options given to lower yaml module.
def get_desktop_for_window(self, window): desktop = ctypes.c_long(0) _libxdo.xdo_get_desktop_for_window( self._xdo, window, ctypes.byref(desktop)) return desktop.value
Get the desktop a window is on. Uses _NET_WM_DESKTOP of the EWMH spec. If your desktop does not support ``_NET_WM_DESKTOP``, then '*desktop' remains unmodified. :param wid: the window to query
def find_lib_directory(self): lib_directory = None if self.lib_micro_version in self.lib_directories: lib_directory = self.lib_micro_version elif self.lib_minor_version in self.lib_directories: lib_directory = self.lib_minor_version elif self.lib_major_version in self.lib_directories: lib_directory = self.lib_major_version else: for lv in [self.lib_micro_version, self.lib_minor_version, self.lib_major_version]: for d in self.lib_directories: if lv in d: lib_directory = d break else: continue break return lib_directory
Find the optimal lib directory.
def acquire(graftm_package_path): contents_hash = json.load( open( os.path.join( graftm_package_path, GraftMPackage._CONTENTS_FILE_NAME ), ) ) v=contents_hash[GraftMPackage.VERSION_KEY] logging.debug("Loading version %i GraftM package: %s" % (v, graftm_package_path)) if v == 2: pkg = GraftMPackageVersion2() elif v == 3: pkg = GraftMPackageVersion3() else: raise InsufficientGraftMPackageException("Bad version: %s" % v) pkg._contents_hash = contents_hash pkg._base_directory = graftm_package_path pkg.check_universal_keys(v) pkg.check_required_keys(GraftMPackage._REQUIRED_KEYS[str(v)]) return pkg
Acquire a new graftm Package Parameters ---------- graftm_output_path: str path to base directory of graftm
def GetCustomerIDs(client): managed_customer_service = client.GetService('ManagedCustomerService', version='v201809') offset = 0 selector = { 'fields': ['CustomerId'], 'predicates': [{ 'field': 'CanManageClients', 'operator': 'EQUALS', 'values': [False] }], 'paging': { 'startIndex': str(offset), 'numberResults': str(PAGE_SIZE) } } queue = multiprocessing.Queue() more_pages = True while more_pages: page = managed_customer_service.get(selector) if page and 'entries' in page and page['entries']: for entry in page['entries']: queue.put(entry['customerId']) else: raise Exception('Can\'t retrieve any customer ID.') offset += PAGE_SIZE selector['paging']['startIndex'] = str(offset) more_pages = offset < int(page['totalNumEntries']) return queue
Retrieves all CustomerIds in the account hierarchy. Note that your configuration file must specify a client_customer_id belonging to an AdWords manager account. Args: client: an AdWordsClient instance. Raises: Exception: if no CustomerIds could be found. Returns: A Queue instance containing all CustomerIds in the account hierarchy.
def getargspec(func): if inspect.ismethod(func): func = func.__func__ try: code = func.__code__ except AttributeError: raise TypeError("{!r} is not a Python function".format(func)) if hasattr(code, "co_kwonlyargcount") and code.co_kwonlyargcount > 0: raise ValueError("keyword-only arguments are not supported by getargspec()") args, varargs, varkw = inspect.getargs(code) return inspect.ArgSpec(args, varargs, varkw, func.__defaults__)
Variation of inspect.getargspec that works for more functions. This function works for Cythonized, non-cpdef functions, which expose argspec information but are not accepted by getargspec. It also works for Python 3 functions that use annotations, which are simply ignored. However, keyword-only arguments are not supported.
def add_user_to_group(iam_client, user, group, quiet = False): if not quiet: printInfo('Adding user to group %s...' % group) iam_client.add_user_to_group(GroupName = group, UserName = user)
Add an IAM user to an IAM group :param iam_client: :param group: :param user: :param user_info: :param dry_run: :return:
def _run_mpi_cmd(self, cmd): log("Number of Processes: {0}".format(self.num_processors)) time_start = datetime.utcnow() cmd = [self.mpiexec_path, '-n', str(self.num_processors)] + cmd log("Command Line: {0}".format(" ".join(cmd))) process = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=False) out, err = process.communicate() if out: log("OUTPUT:") for line in out.split(b'\n'): log(line) if err: log(err, severity="WARNING") log("Time to complete: {0}".format(datetime.utcnow()-time_start))
This runs the command you send in
def namedb_get_num_names_in_namespace( cur, namespace_id, current_block ): unexpired_query, unexpired_args = namedb_select_where_unexpired_names( current_block ) query = "SELECT COUNT(name_records.name) FROM name_records JOIN namespaces ON name_records.namespace_id = namespaces.namespace_id WHERE name_records.namespace_id = ? AND " + unexpired_query + " ORDER BY name;" args = (namespace_id,) + unexpired_args num_rows = namedb_select_count_rows( cur, query, args, count_column='COUNT(name_records.name)' ) return num_rows
Get the number of names in a given namespace
def search(self, query: 're.Pattern') -> 'Iterable[_WorkTitles]': titles: 'Titles' for titles in self._titles_list: title: 'AnimeTitle' for title in titles.titles: if query.search(title.title): yield WorkTitles( aid=titles.aid, main_title=_get_main_title(titles.titles), titles=[t.title for t in titles.titles], ) continue
Search titles using a compiled RE query.
def match_conditions(self, filepath, sourcedir=None, nopartial=True, exclude_patterns=[], excluded_libdirs=[]): excluded_libdirs = [os.path.join(d, "") for d in excluded_libdirs] filename, ext = os.path.splitext(filepath) ext = ext[1:] if ext not in self.FINDER_STYLESHEET_EXTS: return False if nopartial and self.is_partial(filepath): return False if any( filepath.startswith(excluded_path) for excluded_path in paths_by_depth(excluded_libdirs) ): return False if sourcedir and exclude_patterns: candidates = [sourcedir]+excluded_libdirs relative_path = self.get_relative_from_paths(filepath, candidates) if not self.is_allowed(relative_path, excludes=exclude_patterns): return False return True
Find if a filepath match all required conditions. Available conditions are (in order): * Is allowed file extension; * Is a partial source; * Is from an excluded directory; * Is matching an exclude pattern; Args: filepath (str): Absolute filepath to match against conditions. Keyword Arguments: sourcedir (str or None): Absolute sources directory path. Can be ``None`` but then the exclude_patterns won't be matched against (because this method require to distinguish source dir from lib dirs). nopartial (bool): Accept partial sources if ``False``. Default is ``True`` (partial sources fail matchind condition). See ``Finder.is_partial()``. exclude_patterns (list): List of glob patterns, if filepath match one these pattern, it wont match conditions. See ``Finder.is_allowed()``. excluded_libdirs (list): A list of directory to match against filepath, if filepath starts with one them, it won't match condtions. Returns: bool: ``True`` if match all conditions, else ``False``.
def _tseitin(ex, auxvarname, auxvars=None): if isinstance(ex, Literal): return ex, list() else: if auxvars is None: auxvars = list() lits = list() constraints = list() for x in ex.xs: lit, subcons = _tseitin(x, auxvarname, auxvars) lits.append(lit) constraints.extend(subcons) auxvarindex = len(auxvars) auxvar = exprvar(auxvarname, auxvarindex) auxvars.append(auxvar) f = ASTOPS[ex.ASTOP](*lits) constraints.append((auxvar, f)) return auxvar, constraints
Convert a factored expression to a literal, and a list of constraints.
def is_ancestor_of(self, other, include_self=False): return other.is_descendant_of(self, include_self=include_self)
Is this node an ancestor of `other`?
def type_from_ast(schema, type_node): if isinstance(type_node, ListTypeNode): inner_type = type_from_ast(schema, type_node.type) return GraphQLList(inner_type) if inner_type else None if isinstance(type_node, NonNullTypeNode): inner_type = type_from_ast(schema, type_node.type) return GraphQLNonNull(inner_type) if inner_type else None if isinstance(type_node, NamedTypeNode): return schema.get_type(type_node.name.value) raise TypeError( f"Unexpected type node: '{inspect(type_node)}'." )
Get the GraphQL type definition from an AST node. Given a Schema and an AST node describing a type, return a GraphQLType definition which applies to that type. For example, if provided the parsed AST node for `[User]`, a GraphQLList instance will be returned, containing the type called "User" found in the schema. If a type called "User" is not found in the schema, then None will be returned.
def get_available_positions(self): available_positions = ["new"] layout = self.context.getLayout() used_positions = [int(slot["position"]) for slot in layout] if used_positions: used = [ pos for pos in range(1, max(used_positions) + 1) if pos not in used_positions] available_positions.extend(used) return available_positions
Return a list of empty slot numbers
def get_subdirectories(directory): return [name for name in os.listdir(directory) if name != '__pycache__' if os.path.isdir(os.path.join(directory, name))]
Get subdirectories without pycache
def get_token(username, length=20, timeout=20): redis = get_redis_client() token = get_random_string(length) token_key = 'token:{}'.format(token) redis.set(token_key, username) redis.expire(token_key, timeout) return token
Obtain an access token that can be passed to a websocket client.
def generate_sibling_distance(self): sibling_distance = defaultdict(lambda: defaultdict(dict)) topics = {p.topic for p in self.partitions} for source in self.brokers: for dest in self.brokers: if source != dest: for topic in topics: sibling_distance[dest][source][topic] = \ dest.count_partitions(topic) - \ source.count_partitions(topic) return sibling_distance
Generate a dict containing the distance computed as difference in in number of partitions of each topic from under_loaded_brokers to over_loaded_brokers. Negative distance means that the destination broker has got less partitions of a certain topic than the source broker. returns: dict {dest: {source: {topic: distance}}}
def sync_user_email_addresses(user): from .models import EmailAddress email = user_email(user) if email and not EmailAddress.objects.filter(user=user, email__iexact=email).exists(): if app_settings.UNIQUE_EMAIL \ and EmailAddress.objects.filter(email__iexact=email).exists(): return EmailAddress.objects.create(user=user, email=email, primary=False, verified=False)
Keep user.email in sync with user.emailaddress_set. Under some circumstances the user.email may not have ended up as an EmailAddress record, e.g. in the case of manually created admin users.
def populate_host_templates(host_templates, hardware_ids=None, virtual_guest_ids=None, ip_address_ids=None, subnet_ids=None): if hardware_ids is not None: for hardware_id in hardware_ids: host_templates.append({ 'objectType': 'SoftLayer_Hardware', 'id': hardware_id }) if virtual_guest_ids is not None: for virtual_guest_id in virtual_guest_ids: host_templates.append({ 'objectType': 'SoftLayer_Virtual_Guest', 'id': virtual_guest_id }) if ip_address_ids is not None: for ip_address_id in ip_address_ids: host_templates.append({ 'objectType': 'SoftLayer_Network_Subnet_IpAddress', 'id': ip_address_id }) if subnet_ids is not None: for subnet_id in subnet_ids: host_templates.append({ 'objectType': 'SoftLayer_Network_Subnet', 'id': subnet_id })
Populate the given host_templates array with the IDs provided :param host_templates: The array to which host templates will be added :param hardware_ids: A List of SoftLayer_Hardware ids :param virtual_guest_ids: A List of SoftLayer_Virtual_Guest ids :param ip_address_ids: A List of SoftLayer_Network_Subnet_IpAddress ids :param subnet_ids: A List of SoftLayer_Network_Subnet ids
def send(tag, data=None): data = data or {} event = salt.utils.event.get_master_event(__opts__, __opts__['sock_dir'], listen=False) return event.fire_event(data, tag)
Send an event with the given tag and data. This is useful for sending events directly to the master from the shell with salt-run. It is also quite useful for sending events in orchestration states where the ``fire_event`` requisite isn't sufficient because it does not support sending custom data with the event. Note that event tags will *not* be namespaced like events sent with the ``fire_event`` requisite! Whereas events produced from ``fire_event`` are prefixed with ``salt/state_result/<jid>/<minion_id>/<name>``, events sent using this runner module will have no such prefix. Make sure your reactors don't expect a prefix! :param tag: the tag to send with the event :param data: an optional dictionary of data to send with the event CLI Example: .. code-block:: bash salt-run event.send my/custom/event '{"foo": "bar"}' Orchestration Example: .. code-block:: yaml # orch/command.sls run_a_command: salt.function: - name: cmd.run - tgt: my_minion - arg: - exit {{ pillar['exit_code'] }} send_success_event: salt.runner: - name: event.send - tag: my_event/success - data: foo: bar - require: - salt: run_a_command send_failure_event: salt.runner: - name: event.send - tag: my_event/failure - data: baz: qux - onfail: - salt: run_a_command .. code-block:: bash salt-run state.orchestrate orch.command pillar='{"exit_code": 0}' salt-run state.orchestrate orch.command pillar='{"exit_code": 1}'
def _handle_userCount(self, data): self.room.user_count = data self.conn.enqueue_data("user_count", self.room.user_count)
Handle user count changes
def empty( self, node ): overall = self.overall( node ) if overall: return (overall - self.children_sum( self.children(node), node))/float(overall) return 0
Calculate empty space as a fraction of total space
def QueueResponse(self, response, timestamp=None): if timestamp is None: timestamp = self.frozen_timestamp self.response_queue.append((response, timestamp))
Queues the message on the flow's state.
def clean(): for queue in MyQueue.collection().instances(): queue.delete() for job in MyJob.collection().instances(): job.delete() for person in Person.collection().instances(): person.delete()
Clean data created by this script
def split(self, tValues): if self.segmentType == "curve": on1 = self.previousOnCurve off1 = self.points[0].coordinates off2 = self.points[1].coordinates on2 = self.points[2].coordinates return bezierTools.splitCubicAtT(on1, off1, off2, on2, *tValues) elif self.segmentType == "line": segments = [] x1, y1 = self.previousOnCurve x2, y2 = self.points[0].coordinates dx = x2 - x1 dy = y2 - y1 pp = x1, y1 for t in tValues: np = (x1+dx*t, y1+dy*t) segments.append([pp, np]) pp = np segments.append([pp, (x2, y2)]) return segments elif self.segmentType == "qcurve": raise NotImplementedError else: raise NotImplementedError
Split the segment according the t values
def from_credentials(credentials, loop: asyncio.AbstractEventLoop=asyncio.get_event_loop()): return API(username=credentials.username, password=credentials.password, database=credentials.database, session_id=credentials.session_id, server=credentials.server, loop=loop)
Returns a new async API object from an existing Credentials object. :param credentials: The existing saved credentials. :param loop: The asyncio loop. :return: A new API object populated with MyGeotab credentials.
def is_absolute_path (path): if os.name == 'nt': if re.search(r"^[a-zA-Z]:", path): return True path = path.replace("\\", "/") return path.startswith("/")
Check if given path is absolute. On Windows absolute paths start with a drive letter. On all other systems absolute paths start with a slash.
def has_nvme_ssd(self): for member in self._drives_list(): if (member.media_type == constants.MEDIA_TYPE_SSD and member.protocol == constants.PROTOCOL_NVMe): return True return False
Return True if the drive is SSD and protocol is NVMe
def remove(self, class_name, name): if class_name not in self.components: logger.error("Component class {} not found".format(class_name)) return None cls_df = self.df(class_name) cls_df.drop(name, inplace=True) pnl = self.pnl(class_name) for df in itervalues(pnl): if name in df: df.drop(name, axis=1, inplace=True)
Removes a single component from the network. Removes it from component DataFrames. Parameters ---------- class_name : string Component class name name : string Component name Examples -------- >>> network.remove("Line","my_line 12345")
def delete_jdbc_connection_pool(name, target='server', cascade=False, server=None): data = {'target': target, 'cascade': cascade} return _delete_element(name, 'resources/jdbc-connection-pool', data, server)
Delete a JDBC pool
def replaceWith(self, el): self.childs = el.childs self.params = el.params self.endtag = el.endtag self.openertag = el.openertag self._tagname = el.getTagName() self._element = el.tagToString() self._istag = el.isTag() self._isendtag = el.isEndTag() self._iscomment = el.isComment() self._isnonpairtag = el.isNonPairTag()
Replace value in this element with values from `el`. This useful when you don't want change all references to object. Args: el (obj): :class:`HTMLElement` instance.
def find(self, path): path = path.split('.') node = self while node._parent: node = node._parent for name in path: node = node._tree.get(name, None) if node is None or type(node) is float: return None return node
Return the node for a path, or None.
def split_scoped_hparams(scopes, merged_hparams): split_values = {scope: {} for scope in scopes} merged_values = merged_hparams.values() for scoped_key, value in six.iteritems(merged_values): scope = scoped_key.split(".")[0] key = scoped_key[len(scope) + 1:] split_values[scope][key] = value return [ hparam.HParams(**split_values[scope]) for scope in scopes ]
Split single HParams with scoped keys into multiple.
def copy(self): return Retry(max_tries=self.max_tries, delay=self.delay, backoff=self.backoff, max_jitter=self.max_jitter / 100.0, max_delay=self.max_delay, sleep_func=self.sleep_func, deadline=self.deadline, retry_exceptions=self.retry_exceptions)
Return a clone of this retry manager
def get_function_source(ft, **kwargs)->str: "Returns link to `ft` in source code." try: line = inspect.getsourcelines(ft)[1] except Exception: return '' mod_path = get_module_name(ft).replace('.', '/') + '.py' return get_source_link(mod_path, line, **kwargs)
Returns link to `ft` in source code.
def export_as_json(self): return { "items": { str(jid): item.export_as_json() for jid, item in self.items.items() }, "ver": self.version }
Export the whole roster as currently stored on the client side into a JSON-compatible dictionary and return that dictionary.
def avail_images(call=None): if call == 'action': raise SaltCloudException( 'The avail_images function must be called with -f or --function.' ) response = _query('avail', 'distributions') ret = {} for item in response['DATA']: name = item['LABEL'] ret[name] = item return ret
Return available Linode images. CLI Example: .. code-block:: bash salt-cloud --list-images my-linode-config salt-cloud -f avail_images my-linode-config
def apply(self, styler): return styler.format(self.formatter, *self.args, **self.kwargs)
Apply Summary over Pandas Styler
def main(argv=None): arguments = cli_common(__doc__, argv=argv) campaign_file = arguments['CAMPAIGN_FILE'] if arguments['-g']: if osp.exists(campaign_file): raise Exception('Campaign file already exists') with open(campaign_file, 'w') as ostr: Generator().write(ostr) else: node = arguments.get('-n') output_dir = arguments.get('--output-dir') exclude_nodes = arguments.get('--exclude-nodes') srun_tag = arguments.get('--srun') driver = CampaignDriver( campaign_file, node=node, output_dir=output_dir, srun=srun_tag, exclude_nodes=exclude_nodes, ) driver() if argv is not None: return driver campaign_fd = int(arguments.get('--campaign-path-fd') or 1) message = (osp.abspath(driver.campaign_path) + '\n').encode() os.write(campaign_fd, message)
ben-sh entry point
def main(): parser = create_parser() options = vars(parser.parse_args()) HASH_STORE.IGNORE_CACHE_FILE = options[constants.LABEL_FORCE] moban_file = options[constants.LABEL_MOBANFILE] load_engine_factory_and_engines() if moban_file is None: moban_file = mobanfile.find_default_moban_file() if moban_file: try: count = handle_moban_file(moban_file, options) moban_exit(options[constants.LABEL_EXIT_CODE], count) except ( exceptions.DirectoryNotFound, exceptions.NoThirdPartyEngine, exceptions.MobanfileGrammarException, ) as e: reporter.report_error_message(str(e)) moban_exit(options[constants.LABEL_EXIT_CODE], constants.ERROR) else: try: count = handle_command_line(options) moban_exit(options[constants.LABEL_EXIT_CODE], count) except exceptions.NoTemplate as e: reporter.report_error_message(str(e)) moban_exit(options[constants.LABEL_EXIT_CODE], constants.ERROR)
program entry point
def merge_noun_chunks(doc): if not doc.is_parsed: return doc with doc.retokenize() as retokenizer: for np in doc.noun_chunks: attrs = {"tag": np.root.tag, "dep": np.root.dep} retokenizer.merge(np, attrs=attrs) return doc
Merge noun chunks into a single token. doc (Doc): The Doc object. RETURNS (Doc): The Doc object with merged noun chunks. DOCS: https://spacy.io/api/pipeline-functions#merge_noun_chunks
def _get_supercell_size(self, s1, s2): if self._supercell_size == 'num_sites': fu = s2.num_sites / s1.num_sites elif self._supercell_size == 'num_atoms': fu = s2.composition.num_atoms / s1.composition.num_atoms elif self._supercell_size == 'volume': fu = s2.volume / s1.volume else: try: el = get_el_sp(self._supercell_size) fu = s2.composition[el] / s1.composition[el] except: raise ValueError('Invalid argument for supercell_size.') if fu < 2/3: return int(round(1/fu)), False else: return int(round(fu)), True
Returns the supercell size, and whether the supercell should be applied to s1. If fu == 1, s1_supercell is returned as true, to avoid ambiguity.
def __need_ssl(self, host_and_port=None): if not host_and_port: host_and_port = self.current_host_and_port return host_and_port in self.__ssl_params
Whether current host needs SSL or not. :param (str,int) host_and_port: the host/port pair to check, default current_host_and_port
def copy(self): new_client = self._client.copy() return self.__class__( self.instance_id, new_client, self.configuration_name, node_count=self.node_count, display_name=self.display_name, )
Make a copy of this instance. Copies the local data stored as simple types and copies the client attached to this instance. :rtype: :class:`~google.cloud.spanner_v1.instance.Instance` :returns: A copy of the current instance.
def logout(self): if not self.logged_in: raise RuntimeError("User is not logged in") if self.conn.connected: params = {"room": self.conn.room.room_id} resp = self.conn.make_api_call("logout", params) if not resp.get("success", False): raise RuntimeError( f"Logout unsuccessful: " f"{resp['error'].get('message') or resp['error']}" ) self.conn.make_call("logout", params) self.conn.cookies.pop("session") self.logged_in = False
Logs your user out
def acquire_write(self): self.monitor.acquire() while self.rwlock != 0: self.writers_waiting += 1 self.writers_ok.wait() self.writers_waiting -= 1 self.rwlock = -1 self.monitor.release()
Acquire a write lock. Only one thread can hold this lock, and only when no read locks are also held.
def AddAdGroup(self, client_customer_id, campaign_id, name, status): self.client.SetClientCustomerId(client_customer_id) ad_group_service = self.client.GetService('AdGroupService') operations = [{ 'operator': 'ADD', 'operand': { 'campaignId': campaign_id, 'name': name, 'status': status } }] ad_group_service.mutate(operations)
Create a new ad group. Args: client_customer_id: str Client Customer Id used to create the AdGroup. campaign_id: str Id of the campaign to use. name: str Name to assign to the AdGroup. status: str Status to assign to the AdGroup when it is created.
def set_install_id(filename, install_id): if get_install_id(filename) is None: raise InstallNameError('{0} has no install id'.format(filename)) back_tick(['install_name_tool', '-id', install_id, filename])
Set install id for library named in `filename` Parameters ---------- filename : str filename of library install_id : str install id for library `filename` Raises ------ RuntimeError if `filename` has not install id
def show_service_profile(self, flavor_profile, **_params): return self.get(self.service_profile_path % (flavor_profile), params=_params)
Fetches information for a certain Neutron service flavor profile.
def make_logger(scraper): logger = logging.getLogger('') logger.setLevel(logging.DEBUG) requests_log = logging.getLogger("requests") requests_log.setLevel(logging.WARNING) json_handler = logging.FileHandler(log_path(scraper)) json_handler.setLevel(logging.DEBUG) json_formatter = jsonlogger.JsonFormatter(make_json_format()) json_handler.setFormatter(json_formatter) logger.addHandler(json_handler) console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) fmt = '%(name)s [%(levelname)-8s]: %(message)s' formatter = logging.Formatter(fmt) console_handler.setFormatter(formatter) logger.addHandler(console_handler) logger = logging.getLogger(scraper.name) logger = TaskAdapter(logger, scraper) return logger
Create two log handlers, one to output info-level ouput to the console, the other to store all logging in a JSON file which will later be used to generate reports.
def send_mail_worker(config, mail, event): log = "" try: if config.mail_ssl: server = SMTP_SSL(config.mail_server, port=config.mail_server_port, timeout=30) else: server = SMTP(config.mail_server, port=config.mail_server_port, timeout=30) if config.mail_tls: log += 'Starting TLS\n' server.starttls() if config.mail_username != '': log += 'Logging in with ' + str(config.mail_username) + "\n" server.login(config.mail_username, config.mail_password) else: log += 'No username, trying anonymous access\n' log += 'Sending Mail\n' response_send = server.send_message(mail) server.quit() except timeout as e: log += 'Could not send email to enrollee, mailserver timeout: ' + str(e) + "\n" return False, log, event log += 'Server response:' + str(response_send) return True, log, event
Worker task to send out an email, which blocks the process unless it is threaded
def set_visible_func(self, visible_func): self.model_filter.set_visible_func( self._internal_visible_func, visible_func, ) self._visible_func = visible_func self.model_filter.refilter()
Set the function to decide visibility of an item :param visible_func: A callable that returns a boolean result to decide if an item should be visible, for example:: def is_visible(item): return True
def touch(): from .models import Bucket bucket = Bucket.create() db.session.commit() click.secho(str(bucket), fg='green')
Create new bucket.
def on_sighup(self, signal_unused, frame_unused): for setting in self.http_config: if getattr(self.http_server, setting) != self.http_config[setting]: LOGGER.debug('Changing HTTPServer %s setting', setting) setattr(self.http_server, setting, self.http_config[setting]) for setting in self.settings: if self.app.settings[setting] != self.settings[setting]: LOGGER.debug('Changing Application %s setting', setting) self.app.settings[setting] = self.settings[setting] self.app.handlers = [] self.app.named_handlers = {} routes = self.namespace.config.get(config.ROUTES) self.app.add_handlers(".*$", self.app.prepare_routes(routes)) LOGGER.info('Configuration reloaded')
Reload the configuration :param int signal_unused: Unused signal number :param frame frame_unused: Unused frame the signal was caught in