code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def excluded(filename): basename = os.path.basename(filename) for pattern in options.exclude: if fnmatch(basename, pattern): return True
Check if options.exclude contains a pattern that matches filename.
def free_vpcid_for_switch(vpc_id, nexus_ip): LOG.debug("free_vpcid_for_switch() called") if vpc_id != 0: update_vpc_entry([nexus_ip], vpc_id, False, False)
Free a vpc id for the given switch_ip.
def wrap_as_node(self, func): 'wrap a function as a node' name = self.get_name(func) @wraps(func) def wrapped(*args, **kwargs): 'wrapped version of func' message = self.get_message_from_call(*args, **kwargs) self.logger.info('calling "%s" with %r', name, message) result = func(message) if isinstance(result, GeneratorType): results = [ self.wrap_result(name, item) for item in result if item is not NoResult ] self.logger.debug( '%s returned generator yielding %d items', func, len(results) ) [self.route(name, item) for item in results] return tuple(results) else: if result is NoResult: return result result = self.wrap_result(name, result) self.logger.debug( '%s returned single value %s', func, result ) self.route(name, result) return result return wrapped
wrap a function as a node
def read(database, table, key): with database.snapshot() as snapshot: result = snapshot.execute_sql('SELECT u.* FROM %s u WHERE u.id="%s"' % (table, key)) for row in result: key = row[0] for i in range(NUM_FIELD): field = row[i + 1]
Does a single read operation.
def humanize_timedelta(seconds): hours, remainder = divmod(seconds, 3600) days, hours = divmod(hours, 24) minutes, seconds = divmod(remainder, 60) if days: result = '{}d'.format(days) if hours: result += ' {}h'.format(hours) if minutes: result += ' {}m'.format(minutes) return result if hours: result = '{}h'.format(hours) if minutes: result += ' {}m'.format(minutes) return result if minutes: result = '{}m'.format(minutes) if seconds: result += ' {}s'.format(seconds) return result return '{}s'.format(seconds)
Creates a string representation of timedelta.
def drop_nodes(self) -> None: t = time.time() self.session.query(Node).delete() self.session.commit() log.info('dropped all nodes in %.2f seconds', time.time() - t)
Drop all nodes in the database.
def pack(self): try: structs_ = get_structs_for_fields([self.fields[0]]) except (TypeError): raise PackError(self) if structs_ == []: try: structs_ = get_structs_for_fields([self.fields[0], self.fields[1]]) except (IndexError, TypeError): raise PackError(self) for struct_ in structs_: try: return struct_.pack(*self.fields) except struct.error: pass raise PackError(self)
Return binary format of packet. The returned string is the binary format of the packet with stuffing and framing applied. It is ready to be sent to the GPS.
def pfadd(self, key, *elements): return self._execute([b'PFADD', key] + list(elements), 1)
Adds all the element arguments to the HyperLogLog data structure stored at the variable name specified as first argument. As a side effect of this command the HyperLogLog internals may be updated to reflect a different estimation of the number of unique items added so far (the cardinality of the set). If the approximated cardinality estimated by the HyperLogLog changed after executing the command, :meth:`~tredis.RedisClient.pfadd` returns ``1``, otherwise ``0`` is returned. The command automatically creates an empty HyperLogLog structure (that is, a Redis String of a specified length and with a given encoding) if the specified key does not exist. To call the command without elements but just the variable name is valid, this will result into no operation performed if the variable already exists, or just the creation of the data structure if the key does not exist (in the latter case ``1`` is returned). For an introduction to HyperLogLog data structure check :meth:`~tredis.RedisClient.pfcount`. .. versionadded:: 0.2.0 .. note:: **Time complexity**: ``O(1)`` to add every element. :param key: The key to add the elements to :type key: :class:`str`, :class:`bytes` :param elements: One or more elements to add :type elements: :class:`str`, :class:`bytes` :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError`
def labels(): datapath = path.join(path.dirname(path.realpath(__file__)), path.pardir) datapath = path.join(datapath, '../gzoo_data', 'train_solution.csv') return path.normpath(datapath)
Path to labels file
def get_child(parent, tag_name, root_or_cache, namespace): if parent is None: return None ret = parent.find('.//' + namespace + tag_name) if ret is None: reference = parent.find('.//' + namespace + tag_name + '-REF') if reference is not None: if isinstance(root_or_cache, ArTree): ret = get_cached_element_by_path(root_or_cache, reference.text) else: ret = get_element_by_path(root_or_cache, reference.text, namespace) return ret
Get first sub-child or referenced sub-child with given name.
def font(self): defRPr = ( self._chartSpace .get_or_add_txPr() .p_lst[0] .get_or_add_pPr() .get_or_add_defRPr() ) return Font(defRPr)
Font object controlling text format defaults for this chart.
def _visit_body(self, node): if (node.body and isinstance(node.body[0], ast.Expr) and self.is_base_string(node.body[0].value)): node.body[0].value.is_docstring = True self.visit(node.body[0].value) for sub_node in node.body: self.visit(sub_node)
Traverse the body of the node manually. If the first node is an expression which contains a string or bytes it marks that as a docstring.
def decipher_block (self, state): if len(state) != 16: Log.error(u"Expecting block of 16") self._add_round_key(state, self._Nr) for i in range(self._Nr - 1, 0, -1): self._i_shift_rows(state) self._i_sub_bytes(state) self._add_round_key(state, i) self._mix_columns(state, True) self._i_shift_rows(state) self._i_sub_bytes(state) self._add_round_key(state, 0) return state
Perform AES block decipher on input
def setup_shot_page(self, ): self.shot_asset_treev.header().setResizeMode(QtGui.QHeaderView.ResizeToContents) self.shot_task_tablev.horizontalHeader().setResizeMode(QtGui.QHeaderView.ResizeToContents)
Create and set the model on the shot page :returns: None :rtype: None :raises: None
def valuemap(f): @wraps(f) def wrapper(*args, **kwargs): if 'value' in kwargs: val = kwargs['value'] del kwargs['value'] _f = f(*args, **kwargs) def valued_f(*args, **kwargs): result = _f(*args, **kwargs) s, obj, span = result if callable(val): return PegreResult(s, val(obj), span) else: return PegreResult(s, val, span) return valued_f else: return f(*args, **kwargs) return wrapper
Decorator to help PEG functions handle value conversions.
def generichash_blake2b_update(state, data): ensure(isinstance(state, Blake2State), 'State must be a Blake2State object', raising=exc.TypeError) ensure(isinstance(data, bytes), 'Input data must be a bytes sequence', raising=exc.TypeError) rc = lib.crypto_generichash_blake2b_update(state._statebuf, data, len(data)) ensure(rc == 0, 'Unexpected failure', raising=exc.RuntimeError)
Update the blake2b hash state :param state: a initialized Blake2bState object as returned from :py:func:`.crypto_generichash_blake2b_init` :type state: :py:class:`.Blake2State` :param data: :type data: bytes
def set_signal(self, signal, name): r signal = self._check_signal(signal) self.signals[name] = signal
r"""Attach a signal to the graph. Attached signals can be accessed (and modified or deleted) through the :attr:`signals` dictionary. Parameters ---------- signal : array_like A sequence that assigns a value to each vertex. The value of the signal at vertex `i` is ``signal[i]``. name : String Name of the signal used as a key in the :attr:`signals` dictionary. Examples -------- >>> graph = graphs.Sensor(10) >>> signal = np.arange(graph.n_vertices) >>> graph.set_signal(signal, 'mysignal') >>> graph.signals {'mysignal': array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])}
def update(self, params, ignore_set=False, overwrite=False): log = logging.getLogger(__name__) valid = {i[0] for i in self} for key, value in params.items(): if not hasattr(self, key): raise AttributeError("'{}' object has no attribute '{}'".format(self.__class__.__name__, key)) if key not in valid: message = "'{}' object does not support item assignment on '{}'" raise AttributeError(message.format(self.__class__.__name__, key)) if key in self._already_set: if ignore_set: log.debug('%s already set in config, skipping.', key) continue if not overwrite: message = "'{}' object does not support item re-assignment on '{}'" raise AttributeError(message.format(self.__class__.__name__, key)) setattr(self, key, value) self._already_set.add(key)
Set instance values from dictionary. :param dict params: Click context params. :param bool ignore_set: Skip already-set values instead of raising AttributeError. :param bool overwrite: Allow overwriting already-set values.
def submit(self, q, context=None, task_name="casjobs", estimate=30): if not context: context = self.context params = {"qry": q, "context": context, "taskname": task_name, "estimate": estimate} r = self._send_request("SubmitJob", params=params) job_id = int(self._parse_single(r.text, "long")) return job_id
Submit a job to CasJobs. ## Arguments * `q` (str): The SQL query. ## Keyword Arguments * `context` (str): Casjobs context used for this query. * `task_name` (str): The task name. * `estimate` (int): Estimate of the time this job will take (in minutes). ## Returns * `job_id` (int): The submission ID.
def getMechanismName(self): if self._server_side: mech = self._authenticator.current_mech return mech.getMechanismName() if mech else None else: return getattr(self._authenticator, 'authMech', None)
Return the authentication mechanism name.
def tag_add(package, tag, pkghash): team, owner, pkg = parse_package(package) session = _get_session(team) session.put( "{url}/api/tag/{owner}/{pkg}/{tag}".format( url=get_registry_url(team), owner=owner, pkg=pkg, tag=tag ), data=json.dumps(dict( hash=_match_hash(package, pkghash) )) )
Add a new tag for a given package hash. Unlike versions, tags can have an arbitrary format, and can be modified and deleted. When a package is pushed, it gets the "latest" tag.
def check_ssl(): try: import ssl except: log.warning('Error importing SSL module', stack_info=True) print(SSL_ERROR_MESSAGE) sys.exit(1) else: log.info('SSL module is available') return ssl
Attempts to import SSL or raises an exception.
def _jobStoreClasses(self): jobStoreClassNames = ( "toil.jobStores.azureJobStore.AzureJobStore", "toil.jobStores.fileJobStore.FileJobStore", "toil.jobStores.googleJobStore.GoogleJobStore", "toil.jobStores.aws.jobStore.AWSJobStore", "toil.jobStores.abstractJobStore.JobStoreSupport") jobStoreClasses = [] for className in jobStoreClassNames: moduleName, className = className.rsplit('.', 1) from importlib import import_module try: module = import_module(moduleName) except ImportError: logger.debug("Unable to import '%s' as is expected if the corresponding extra was " "omitted at installation time.", moduleName) else: jobStoreClass = getattr(module, className) jobStoreClasses.append(jobStoreClass) return jobStoreClasses
A list of concrete AbstractJobStore implementations whose dependencies are installed. :rtype: list[AbstractJobStore]
def gen_all_voltages_for_injections(self, injections_raw): injections = injections_raw.astype(int) N = self.nr_electrodes all_quadpoles = [] for idipole in injections: Icurrent = np.sort(idipole) - 1 velecs = list(range(1, N + 1)) del(velecs[Icurrent[1]]) del(velecs[Icurrent[0]]) voltages = itertools.permutations(velecs, 2) for voltage in voltages: all_quadpoles.append( (idipole[0], idipole[1], voltage[0], voltage[1]) ) configs_unsorted = np.array(all_quadpoles) configs_sorted = np.hstack(( np.sort(configs_unsorted[:, 0:2], axis=1), np.sort(configs_unsorted[:, 2:4], axis=1), )) configs = self.remove_duplicates(configs_sorted) self.add_to_configs(configs) self.remove_duplicates() return configs
For a given set of current injections AB, generate all possible unique potential measurements. After Noel and Xu, 1991, for N electrodes, the number of possible voltage dipoles for a given current dipole is :math:`(N - 2)(N - 3) / 2`. This includes normal and reciprocal measurements. If current dipoles are generated with ConfigManager.gen_all_current_dipoles(), then :math:`N \cdot (N - 1) / 2` current dipoles are generated. Thus, this function will produce :math:`(N - 1)(N - 2)(N - 3) / 4` four-point configurations ABMN, half of which are reciprocals (Noel and Xu, 1991). All generated measurements are added to the instance. Use ConfigManager.split_into_normal_and_reciprocal() to split the configurations into normal and reciprocal measurements. Parameters ---------- injections: numpy.ndarray Kx2 array holding K current injection dipoles A-B Returns ------- configs: numpy.ndarray Nax4 array holding all possible measurement configurations
def prepare_inventory(self): if self.inventory is None: self.inventory = os.path.join(self.private_data_dir, "inventory")
Prepares the inventory default under ``private_data_dir`` if it's not overridden by the constructor.
def write(self, value): if self.capacity > 0 and self.strategy == 0: len_value = len(value) if len_value >= self.capacity: needs_new_strategy = True else: self.seek(0, 2) needs_new_strategy = \ (self.tell() + len_value) >= self.capacity if needs_new_strategy: self.makeTempFile() if not isinstance(value, six.binary_type): value = value.encode('utf-8') self._delegate.write(value)
If capacity != -1 and length of file > capacity it is time to switch
def add(self, value): index = len(self.__history) self.__history.append(value) return index
Add new record to history. Record will be added to the end :param value: new record :return: int record position in history
def argument_request_user(obj, func_name): func = getattr(obj, func_name) request = threadlocals.request() if request: return func(request.user)
Pass request.user as an argument to the given function call.
async def get_update_info(self, from_network=True) -> SoftwareUpdateInfo: if from_network: from_network = "true" else: from_network = "false" info = await self.services["system"]["getSWUpdateInfo"](network=from_network) return SoftwareUpdateInfo.make(**info)
Get information about updates.
def display_information_message_bar( title=None, message=None, more_details=None, button_text=tr('Show details ...'), duration=8, iface_object=iface): iface_object.messageBar().clearWidgets() widget = iface_object.messageBar().createMessage(title, message) if more_details: button = QPushButton(widget) button.setText(button_text) button.pressed.connect( lambda: display_information_message_box( title=title, message=more_details)) widget.layout().addWidget(button) iface_object.messageBar().pushWidget(widget, Qgis.Info, duration)
Display an information message bar. :param iface_object: The QGIS IFace instance. Note that we cannot use qgis.utils.iface since it is not available in our test environment. :type iface_object: QgisInterface :param title: The title of the message bar. :type title: basestring :param message: The message inside the message bar. :type message: basestring :param more_details: The message inside the 'Show details' button. :type more_details: basestring :param button_text: The text of the button if 'more_details' is not empty. :type button_text: basestring :param duration: The duration for the display, default is 8 seconds. :type duration: int
def close(self): if self._server is None: raise RuntimeError('Server is not started') self._server.close() for handler in self._handlers: handler.close()
Stops accepting new connections, cancels all currently running requests. Request handlers are able to handle `CancelledError` and exit properly.
def formvalue (form, key): field = form.get(key) if isinstance(field, list): field = field[0] return field
Get value with given key from WSGI form.
def newest(cls, session): media_type = cls.__name__.lower() p = session.session.get(u'http://myanimelist.net/' + media_type + '.php?o=9&c[]=a&c[]=d&cv=2&w=1').text soup = utilities.get_clean_dom(p) latest_entry = soup.find(u"div", {u"class": u"hoverinfo"}) if not latest_entry: raise MalformedMediaPageError(0, p, u"No media entries found on recently-added page") latest_id = int(latest_entry[u'rel'][1:]) return getattr(session, media_type)(latest_id)
Fetches the latest media added to MAL. :type session: :class:`myanimelist.session.Session` :param session: A valid MAL session :rtype: :class:`.Media` :return: the newest media on MAL :raises: :class:`.MalformedMediaPageError`
def unsubscribe(self, message, handler): with self._lock: self._subscribers[message].remove(WeakCallable(handler))
Removes handler from message listeners. :param str message: Name of message to unsubscribe handler from. :param callable handler: Callable that should be removed as handler for `message`.
def shutdown(url=None): if url is None: for host in util.hosts.values(): host.shutdown() global core_type core_type = None else: host = util.hosts[url] host.shutdown()
Stops the Host passed by parameter or all of them if none is specified, stopping at the same time all its actors. Should be called at the end of its usage, to finish correctly all the connections and threads.
def get_schema(self, schema_id): res = requests.get(self._url('/schemas/ids/{}', schema_id)) raise_if_failed(res) return json.loads(res.json()['schema'])
Retrieves the schema with the given schema_id from the registry and returns it as a `dict`.
def html_parts(input_string, source_path=None, destination_path=None, input_encoding='unicode', doctitle=1, initial_header_level=1): overrides = { 'input_encoding': input_encoding, 'doctitle_xform': doctitle, 'initial_header_level': initial_header_level, 'report_level': 5 } parts = core.publish_parts( source=input_string, source_path=source_path, destination_path=destination_path, writer_name='html', settings_overrides=overrides) return parts
Given an input string, returns a dictionary of HTML document parts. Dictionary keys are the names of parts, and values are Unicode strings; encoding is up to the client. Parameters: - `input_string`: A multi-line text string; required. - `source_path`: Path to the source file or object. Optional, but useful for diagnostic output (system messages). - `destination_path`: Path to the file or object which will receive the output; optional. Used for determining relative paths (stylesheets, source links, etc.). - `input_encoding`: The encoding of `input_string`. If it is an encoded 8-bit string, provide the correct encoding. If it is a Unicode string, use "unicode", the default. - `doctitle`: Disable the promotion of a lone top-level section title to document title (and subsequent section title to document subtitle promotion); enabled by default. - `initial_header_level`: The initial level for header elements (e.g. 1 for "<h1>").
def create_serving_logger() -> Logger: logger = getLogger('quart.serving') if logger.level == NOTSET: logger.setLevel(INFO) logger.addHandler(serving_handler) return logger
Create a logger for serving. This creates a logger named quart.serving.
def iterate_with_exp_backoff(base_iter, max_num_tries=6, max_backoff=300.0, start_backoff=4.0, backoff_multiplier=2.0, frac_random_backoff=0.25): try_number = 0 if hasattr(base_iter, '__iter__'): base_iter = iter(base_iter) while True: try: yield next(base_iter) try_number = 0 except StopIteration: break except TooManyRequests as e: logging.warning('TooManyRequests error: %s', tb.format_exc()) if try_number >= max_num_tries: logging.error('Number of tries exceeded, too many requests: %s', e) raise sleep_time = start_backoff * math.pow(backoff_multiplier, try_number) sleep_time *= (1.0 + frac_random_backoff * random.random()) sleep_time = min(sleep_time, max_backoff) logging.warning('Too many requests error, ' 'retrying with exponential backoff %.3f', sleep_time) time.sleep(sleep_time) try_number += 1
Iterate with exponential backoff on failures. Useful to wrap results of datastore Query.fetch to avoid 429 error. Args: base_iter: basic iterator of generator object max_num_tries: maximum number of tries for each request max_backoff: maximum backoff, in seconds start_backoff: initial value of backoff backoff_multiplier: backoff multiplier frac_random_backoff: fraction of the value of random part of the backoff Yields: values of yielded by base iterator
def setitem(self, key, value): with self.lock: self.tbl[key] = value
Maps dictionary keys to values for assignment. Called for dictionary style access with assignment.
def _is_word_type(token_type): return token_type in [TokenType.Word, TokenType.QuotedLiteral, TokenType.UnquotedLiteral, TokenType.Number, TokenType.Deref]
Return true if this is a word-type token.
def load_patt(filename): with open(filename) as f: lines = f.readlines() lst = lines[0].split(',') patt = np.zeros([int(lst[0]), int(lst[1])], dtype=np.complex128) lines.pop(0) for line in lines: lst = line.split(',') n = int(lst[0]) m = int(lst[1]) re = float(lst[2]) im = float(lst[3]) patt[n, m] = re + 1j * im return sp.ScalarPatternUniform(patt, doublesphere=False)
Loads a file that was saved with the save_patt routine.
def consume_value(self, ctx, opts): value = click.Option.consume_value(self, ctx, opts) if not value: gandi = ctx.obj value = gandi.get(self.name) if value is not None: self.display_value(ctx, value) else: if self.default is None and self.required: metavar = '' if self.type.name not in ['integer', 'text']: metavar = self.make_metavar() prompt = '%s %s' % (self.help, metavar) gandi.echo(prompt) return value
Retrieve default value and display it when prompt is disabled.
def start_capture(self): previous_map_tool = self.canvas.mapTool() if previous_map_tool != self.tool: self.previous_map_tool = previous_map_tool self.canvas.setMapTool(self.tool) self.hide()
Start capturing the rectangle.
def to_spans(self): "Convert the tree to a set of nonterms and spans." s = set() self._convert_to_spans(self.tree, 1, s) return s
Convert the tree to a set of nonterms and spans.
def _get_erred_shared_settings_module(self): result_module = modules.LinkList(title=_('Shared provider settings in erred state')) result_module.template = 'admin/dashboard/erred_link_list.html' erred_state = structure_models.SharedServiceSettings.States.ERRED queryset = structure_models.SharedServiceSettings.objects settings_in_erred_state = queryset.filter(state=erred_state).count() if settings_in_erred_state: result_module.title = '%s (%s)' % (result_module.title, settings_in_erred_state) for service_settings in queryset.filter(state=erred_state).iterator(): module_child = self._get_link_to_instance(service_settings) module_child['error'] = service_settings.error_message result_module.children.append(module_child) else: result_module.pre_content = _('Nothing found.') return result_module
Returns a LinkList based module which contains link to shared service setting instances in ERRED state.
def grant(self, auth, resource, permissions, ttl=None, defer=False): args = [resource, permissions] if ttl is not None: args.append({"ttl": ttl}) return self._call('grant', auth, args, defer)
Grant resources with specific permissions and return a token. Args: auth: <cik> resource: Alias or ID of resource. permissions: permissions of resources. ttl: Time To Live.
def wait(self, timeout=None): if not self.__running: raise RuntimeError("ThreadPool ain't running") self.__queue.wait(timeout)
Block until all jobs in the ThreadPool are finished. Beware that this can make the program run into a deadlock if another thread adds new jobs to the pool! # Raises Timeout: If the timeout is exceeded.
def route(self, origin, message): self.resolve_node_modules() if not self.routing_enabled: return subs = self.routes.get(origin, set()) for destination in subs: self.logger.debug('routing "%s" -> "%s"', origin, destination) self.dispatch(origin, destination, message)
\ Using the routing dictionary, dispatch a message to all subscribers :param origin: name of the origin node :type origin: :py:class:`str` :param message: message to dispatch :type message: :py:class:`emit.message.Message` or subclass
def make_vertical_bar(percentage, width=1): bar = ' _▁▂▃▄▅▆▇█' percentage //= 10 percentage = int(percentage) if percentage < 0: output = bar[0] elif percentage >= len(bar): output = bar[-1] else: output = bar[percentage] return output * width
Draws a vertical bar made of unicode characters. :param value: A value between 0 and 100 :param width: How many characters wide the bar should be. :returns: Bar as a String
def getfile(self, section, option, raw=False, vars=None, fallback="", validate=False): v = self.get(section, option, raw=raw, vars=vars, fallback=fallback) v = self._convert_to_path(v) return v if not validate or os.path.isfile(v) else fallback
A convenience method which coerces the option in the specified section to a file.
def css(request): if 'grappelli' in settings.INSTALLED_APPS: margin_left = 0 elif VERSION[:2] <= (1, 8): margin_left = 110 else: margin_left = 170 responsive_admin = VERSION[:2] >= (2, 0) return HttpResponse(render_to_string('tinymce/tinymce4.css', context={ 'margin_left': margin_left, 'responsive_admin': responsive_admin }, request=request), content_type='text/css; charset=utf-8')
Custom CSS for TinyMCE 4 widget By default it fixes widget's position in Django Admin :param request: Django http request :type request: django.http.request.HttpRequest :return: Django http response with CSS file for TinyMCE 4 :rtype: django.http.HttpResponse
def _get_generator(self, name): for ep in pkg_resources.iter_entry_points(self.group, name=None): if ep.name == name: generator = ep.load() return generator
Load the generator plugin and execute its lifecycle. :param dist: distribution
def generate_random_string(template_dict, key='start'): data = template_dict.get(key) result = random.choice(data) for match in token_regex.findall(result): word = generate_random_string(template_dict, match) or match result = result.replace('{{{0}}}'.format(match), word) return result
Generates a random excuse from a simple template dict. Based off of drow's generator.js (public domain). Grok it here: http://donjon.bin.sh/code/random/generator.js Args: template_dict: Dict with template strings. key: String with the starting index for the dict. (Default: 'start') Returns: Generated string.
def SetConfiguredUsers(self, users): prefix = self.logger.name + '-' with tempfile.NamedTemporaryFile( mode='w', prefix=prefix, delete=True) as updated_users: updated_users_file = updated_users.name for user in users: updated_users.write(user + '\n') updated_users.flush() if not os.path.exists(self.google_users_dir): os.makedirs(self.google_users_dir) shutil.copy(updated_users_file, self.google_users_file) file_utils.SetPermissions(self.google_users_file, mode=0o600, uid=0, gid=0)
Set the list of configured Google user accounts. Args: users: list, the username strings of the Linux accounts.
def copy(self) -> "Feed": other = Feed(dist_units=self.dist_units) for key in set(cs.FEED_ATTRS) - set(["dist_units"]): value = getattr(self, key) if isinstance(value, pd.DataFrame): value = value.copy() elif isinstance(value, pd.core.groupby.DataFrameGroupBy): value = deepcopy(value) setattr(other, key, value) return other
Return a copy of this feed, that is, a feed with all the same attributes.
def drop(self, async_=False, if_exists=False, **kw): async_ = kw.get('async', async_) return self.parent.delete(self, async_=async_, if_exists=if_exists)
Drop this table. :param async_: run asynchronously if True :return: None
def wait_until_not_present(self, locator, timeout=None): timeout = timeout if timeout is not None else self.timeout this = self def wait(): return WebDriverWait(self.driver, timeout).until(lambda d: not this.is_present(locator)) return self.execute_and_handle_webdriver_exceptions( wait, timeout, locator, 'Timeout waiting for element not to be present')
Waits for an element to no longer be present @type locator: webdriverwrapper.support.locator.Locator @param locator: the locator or css string to search for the element @type timeout: int @param timeout: the maximum number of seconds the driver will wait before timing out @rtype: webdriverwrapper.WebElementWrapper @return: Returns the element found
def parse(cls, string): match = re.match(r'^(?P<name>[A-Za-z0-9\.\-_]+)\s+' + '(?P<value>[0-9\.]+)\s+' + '(?P<timestamp>[0-9\.]+)(\n?)$', string) try: groups = match.groupdict() return Metric(groups['name'], groups['value'], float(groups['timestamp'])) except: raise DiamondException( "Metric could not be parsed from string: %s." % string)
Parse a string and create a metric
def match(fullname1, fullname2, strictness='default', options=None): if options is not None: settings = deepcopy(SETTINGS[strictness]) deep_update_dict(settings, options) else: settings = SETTINGS[strictness] name1 = Name(fullname1) name2 = Name(fullname2) return name1.deep_compare(name2, settings)
Takes two names and returns true if they describe the same person. :param string fullname1: first human name :param string fullname2: second human name :param string strictness: strictness settings to use :param dict options: custom strictness settings updates :return bool: the names match
def _lookup_nexus_bindings(query_type, session=None, **bfilter): if session is None: session = bc.get_reader_session() query_method = getattr(session.query( nexus_models_v2.NexusPortBinding).filter_by(**bfilter), query_type) try: bindings = query_method() if bindings: return bindings except sa_exc.NoResultFound: pass raise c_exc.NexusPortBindingNotFound(**bfilter)
Look up 'query_type' Nexus bindings matching the filter. :param query_type: 'all', 'one' or 'first' :param session: db session :param bfilter: filter for bindings query :returns: bindings if query gave a result, else raise NexusPortBindingNotFound.
def _write_string(self, string, pos_x, pos_y, height, color, bold=False, align_right=False, depth=0.): stroke_width = height / 8. if bold: stroke_width = height / 5. color.set() self._set_closest_stroke_width(stroke_width) glMatrixMode(GL_MODELVIEW) glPushMatrix() pos_y -= height if not align_right: glTranslatef(pos_x, pos_y, depth) else: width = self._string_width(string, height) glTranslatef(pos_x - width, pos_y, depth) font_height = 119.5 scale_factor = height / font_height glScalef(scale_factor, scale_factor, scale_factor) for c in string: glutStrokeCharacter(GLUT_STROKE_ROMAN, ord(c)) glPopMatrix()
Write a string Writes a string with a simple OpenGL method in the given size at the given position. :param string: The string to draw :param pos_x: x starting position :param pos_y: y starting position :param height: desired height :param bold: flag whether to use a bold font :param depth: the Z layer
def basename(path): base_path = path.strip(SEP) sep_ind = base_path.rfind(SEP) if sep_ind < 0: return path return base_path[sep_ind + 1:]
Rightmost part of path after separator.
def findOverlap(x_mins, y_mins, min_distance): n = len(x_mins) idex = [] for i in range(n): if i == 0: pass else: for j in range(0, i): if (abs(x_mins[i] - x_mins[j]) < min_distance and abs(y_mins[i] - y_mins[j]) < min_distance): idex.append(i) break x_mins = np.delete(x_mins, idex, axis=0) y_mins = np.delete(y_mins, idex, axis=0) return x_mins, y_mins
finds overlapping solutions, deletes multiples and deletes non-solutions and if it is not a solution, deleted as well
def details_dict(obj, existing, ignore_missing, opt): existing = dict_unicodeize(existing) obj = dict_unicodeize(obj) for ex_k, ex_v in iteritems(existing): new_value = normalize_val(obj.get(ex_k)) og_value = normalize_val(ex_v) if ex_k in obj and og_value != new_value: print(maybe_colored("-- %s: %s" % (ex_k, og_value), 'red', opt)) print(maybe_colored("++ %s: %s" % (ex_k, new_value), 'green', opt)) if (not ignore_missing) and (ex_k not in obj): print(maybe_colored("-- %s: %s" % (ex_k, og_value), 'red', opt)) for ob_k, ob_v in iteritems(obj): val = normalize_val(ob_v) if ob_k not in existing: print(maybe_colored("++ %s: %s" % (ob_k, val), 'green', opt)) return
Output the changes, if any, for a dict
def get_corrected_commands(self, command): new_commands = self.get_new_command(command) if not isinstance(new_commands, list): new_commands = (new_commands,) for n, new_command in enumerate(new_commands): yield CorrectedCommand(script=new_command, side_effect=self.side_effect, priority=(n + 1) * self.priority)
Returns generator with corrected commands. :type command: Command :rtype: Iterable[CorrectedCommand]
def resolve(self, function): filename = self.get_filename() if not filename: return None try: hlib = win32.GetModuleHandle(filename) address = win32.GetProcAddress(hlib, function) except WindowsError: try: hlib = win32.LoadLibraryEx(filename, win32.DONT_RESOLVE_DLL_REFERENCES) try: address = win32.GetProcAddress(hlib, function) finally: win32.FreeLibrary(hlib) except WindowsError: return None if address in (None, 0): return None return address - hlib + self.lpBaseOfDll
Resolves a function exported by this module. @type function: str or int @param function: str: Name of the function. int: Ordinal of the function. @rtype: int @return: Memory address of the exported function in the process. Returns None on error.
def run_validation(options): if options.files == sys.stdin: results = validate(options.files, options) return [FileValidationResults(is_valid=results.is_valid, filepath='stdin', object_results=results)] files = get_json_files(options.files, options.recursive) results = [validate_file(fn, options) for fn in files] return results
Validate files based on command line options. Args: options: An instance of ``ValidationOptions`` containing options for this validation run.
def request_quotes(tickers_list, selected_columns=['*']): __validate_list(tickers_list) __validate_list(selected_columns) query = 'select {cols} from yahoo.finance.quotes where symbol in ({vals})' query = query.format( cols=', '.join(selected_columns), vals=', '.join('"{0}"'.format(s) for s in tickers_list) ) response = __yahoo_request(query) if not response: raise RequestError('Unable to process the request. Check if the ' + 'columns selected are valid.') if not type(response['quote']) is list: return [response['quote']] return response['quote']
Request Yahoo Finance recent quotes. Returns quotes information from YQL. The columns to be requested are listed at selected_columns. Check `here <http://goo.gl/8AROUD>`_ for more information on YQL. >>> request_quotes(['AAPL'], ['Name', 'PreviousClose']) { 'PreviousClose': '95.60', 'Name': 'Apple Inc.' } :param table: Table name. :type table: string :param tickers_list: List of tickers that will be returned. :type tickers_list: list of strings :param selected_columns: List of columns to be returned, defaults to ['*'] :type selected_columns: list of strings, optional :returns: Requested quotes. :rtype: json :raises: TypeError, TypeError
def minimize(self, tolerance=None, max_iterations=None): if tolerance is None: tolerance = self.minimization_tolerance if max_iterations is None: max_iterations = self.minimization_max_iterations self.simulation.minimizeEnergy(tolerance * u.kilojoules_per_mole, max_iterations)
Minimize energy of the system until meeting `tolerance` or performing `max_iterations`.
def login_required(http_method_handler): @wraps(http_method_handler) def secure_http_method_handler(self, *args, **kwargs): if not self.__provider_config__.authentication: _message = "Service available to authenticated users only, no auth context provider set in handler" authentication_error = prestans.exception.AuthenticationError(_message) authentication_error.request = self.request raise authentication_error if not self.__provider_config__.authentication.is_authenticated_user(): authentication_error = prestans.exception.AuthenticationError() authentication_error.request = self.request raise authentication_error http_method_handler(self, *args, **kwargs) return secure_http_method_handler
provides a decorator for RESTRequestHandler methods to check for authenticated users RESTRequestHandler subclass must have a auth_context instance, refer to prestans.auth for the parent class definition. If decorator is used and no auth_context is provided the client will be denied access. Handler will return a 401 Unauthorized if the user is not logged in, the service does not redirect to login handler page, this is the client's responsibility. auth_context_handler instance provides a message called get_current_user, use this to obtain a reference to an authenticated user profile. If all goes well, the original handler definition is executed.
def close(self): if self.mdr is None: return exc = (None, None, None) try: self.cursor.close() except: exc = sys.exc_info() try: if self.mdr.__exit__(*exc): exc = (None, None, None) except: exc = sys.exc_info() self.mdr = None self.cursor = None if exc != (None, None, None): six.reraise(*exc)
Release all resources associated with this factory.
def activities(self): if self._activities is None: self._activities = ActivityList(self._version, workspace_sid=self._solution['sid'], ) return self._activities
Access the activities :returns: twilio.rest.taskrouter.v1.workspace.activity.ActivityList :rtype: twilio.rest.taskrouter.v1.workspace.activity.ActivityList
def id(self, id): if id is None: raise ValueError("Invalid value for `id`, must not be `None`") if id is not None and not re.search('^[A-Za-z0-9]{32}', id): raise ValueError("Invalid value for `id`, must be a follow pattern or equal to `/^[A-Za-z0-9]{32}/`") self._id = id
Sets the id of this BulkResponse. Bulk ID :param id: The id of this BulkResponse. :type: str
def registerLoggers(info, error, debug): global log_info global log_error global log_debug log_info = info log_error = error log_debug = debug
Add logging functions to this module. Functions will be called on various severities (log, error, or debug respectively). Each function must have the signature: fn(message, **kwargs) If Python str.format()-style placeholders are in message, kwargs will be interpolated.
def AsCGI(nsdict={}, typesmodule=None, rpc=False, modules=None): if os.environ.get('REQUEST_METHOD') != 'POST': _CGISendFault(Fault(Fault.Client, 'Must use POST')) return ct = os.environ['CONTENT_TYPE'] try: if ct.startswith('multipart/'): cid = resolvers.MIMEResolver(ct, sys.stdin) xml = cid.GetSOAPPart() ps = ParsedSoap(xml, resolver=cid.Resolve) else: length = int(os.environ['CONTENT_LENGTH']) ps = ParsedSoap(sys.stdin.read(length)) except ParseException, e: _CGISendFault(FaultFromZSIException(e)) return _Dispatch(ps, modules, _CGISendXML, _CGISendFault, nsdict=nsdict, typesmodule=typesmodule, rpc=rpc)
Dispatch within a CGI script.
def _init_weights(self, X): X = np.asarray(X, dtype=np.float64) if self.scaler is not None: X = self.scaler.fit_transform(X) if self.initializer is not None: self.weights = self.initializer(X, self.num_neurons) for v in self.params.values(): v['value'] = v['orig'] return X
Set the weights and normalize data before starting training.
def get_linked_version(doi): try: request = requests.head(to_url(doi)) return request.headers.get("location") except RequestException: return None
Get the original link behind the DOI. :param doi: A canonical DOI. :returns: The canonical URL behind the DOI, or ``None``. >>> get_linked_version('10.1209/0295-5075/111/40005') 'http://stacks.iop.org/0295-5075/111/i=4/a=40005?key=crossref.9ad851948a976ecdf216d4929b0b6f01'
def find_rings(self, including=None): undirected = self.graph.to_undirected() directed = undirected.to_directed() cycles_nodes = [] cycles_edges = [] all_cycles = [c for c in nx.simple_cycles(directed) if len(c) > 2] unique_sorted = [] unique_cycles = [] for cycle in all_cycles: if sorted(cycle) not in unique_sorted: unique_sorted.append(sorted(cycle)) unique_cycles.append(cycle) if including is None: cycles_nodes = unique_cycles else: for i in including: for cycle in unique_cycles: if i in cycle and cycle not in cycles_nodes: cycles_nodes.append(cycle) for cycle in cycles_nodes: edges = [] for i, e in enumerate(cycle): edges.append((cycle[i-1], e)) cycles_edges.append(edges) return cycles_edges
Find ring structures in the MoleculeGraph. :param including: list of site indices. If including is not None, then find_rings will only return those rings including the specified sites. By default, this parameter is None, and all rings will be returned. :return: dict {index:cycle}. Each entry will be a ring (cycle, in graph theory terms) including the index found in the Molecule. If there is no cycle including an index, the value will be an empty list.
def _get_line_no_from_comments(py_line): matched = LINECOL_COMMENT_RE.match(py_line) if matched: return int(matched.group(1)) else: return 0
Return the line number parsed from the comment or 0.
def start_server(self): if self._server is None: self._server = SimpleServer() self._server.createPV(prefix=self._options.prefix, pvdb={k: v.config for k, v in self.interface.bound_pvs.items()}) self._driver = PropertyExposingDriver(interface=self.interface, device_lock=self.device_lock) self._driver.process_pv_updates(force=True) self.log.info('Started serving PVs: %s', ', '.join((self._options.prefix + pv for pv in self.interface.bound_pvs.keys())))
Creates a pcaspy-server. .. note:: The server does not process requests unless :meth:`handle` is called regularly.
def has_port_by_name(self, port_name): with self._mutex: if self.get_port_by_name(port_name): return True return False
Check if this component has a port by the given name.
def _build_cached_instances(self): connection = self._connect() reservations = connection.get_all_reservations() cached_instances = {} for rs in reservations: for vm in rs.instances: cached_instances[vm.id] = vm return cached_instances
Build lookup table of VM instances known to the cloud provider. The returned dictionary links VM id with the actual VM object.
def file(cls, path, encoding=None, parser=None): cls.__hierarchy.append(file.File(path, encoding, parser))
Set a file as a source. File are parsed as literal python dicts by default, this behaviour can be configured. Args: path: The path to the file to be parsed encoding: The encoding of the file. Defaults to 'raw'. Available built-in values: 'ini', 'json', 'yaml'. Custom value can be used in conjunction with parser. parser: A parser function for a custom encoder. It is expected to return a dict containing the parsed values when called with the contents of the file as an argument.
def authenticate(self): unique_id = self.new_unique_id() message = { 'op': 'authentication', 'id': unique_id, 'appKey': self.app_key, 'session': self.session_token, } self._send(message) return unique_id
Authentication request.
def subject(self) -> Optional[UnstructuredHeader]: try: return cast(UnstructuredHeader, self[b'subject'][0]) except (KeyError, IndexError): return None
The ``Subject`` header.
def setup_method_options(method, tuning_options): kwargs = {} maxiter = numpy.prod([len(v) for v in tuning_options.tune_params.values()]) kwargs['maxiter'] = maxiter if method in ["Nelder-Mead", "Powell"]: kwargs['maxfev'] = maxiter elif method == "L-BFGS-B": kwargs['maxfun'] = maxiter if method in ["CG", "BFGS", "L-BFGS-B", "TNC", "SLSQP"]: kwargs['eps'] = tuning_options.eps elif method == "COBYLA": kwargs['rhobeg'] = tuning_options.eps return kwargs
prepare method specific options
def _receive_with_timeout(self, socket, timeout_s, use_multipart=False): if timeout_s is config.FOREVER: timeout_ms = config.FOREVER else: timeout_ms = int(1000 * timeout_s) poller = zmq.Poller() poller.register(socket, zmq.POLLIN) ms_so_far = 0 try: for interval_ms in self.intervals_ms(timeout_ms): sockets = dict(poller.poll(interval_ms)) ms_so_far += interval_ms if socket in sockets: if use_multipart: return socket.recv_multipart() else: return socket.recv() else: raise core.SocketTimedOutError(timeout_s) except KeyboardInterrupt: raise core.SocketInterruptedError(ms_so_far / 1000.0)
Check for socket activity and either return what's received on the socket or time out if timeout_s expires without anything on the socket. This is implemented in loops of self.try_length_ms milliseconds to allow Ctrl-C handling to take place.
def compile(self, model): log = SensorLog(InMemoryStorageEngine(model), model) self.sensor_graph = SensorGraph(log, model) allocator = StreamAllocator(self.sensor_graph, model) self._scope_stack = [] root = RootScope(self.sensor_graph, allocator) self._scope_stack.append(root) for statement in self.statements: statement.execute(self.sensor_graph, self._scope_stack) self.sensor_graph.initialize_remaining_constants() self.sensor_graph.sort_nodes()
Compile this file into a SensorGraph. You must have preivously called parse_file to parse a sensor graph file into statements that are then executed by this command to build a sensor graph. The results are stored in self.sensor_graph and can be inspected before running optimization passes. Args: model (DeviceModel): The device model that we should compile this sensor graph for.
def get_essential_properties(self): data = self.get_host_health_data() properties = { 'memory_mb': self._parse_memory_embedded_health(data) } cpus, cpu_arch = self._parse_processor_embedded_health(data) properties['cpus'] = cpus properties['cpu_arch'] = cpu_arch properties['local_gb'] = self._parse_storage_embedded_health(data) macs = self._parse_nics_embedded_health(data) return_value = {'properties': properties, 'macs': macs} return return_value
Gets essential scheduling properties as required by ironic :returns: a dictionary of server properties like memory size, disk size, number of cpus, cpu arch, port numbers and mac addresses. :raises:IloError if iLO returns an error in command execution.
def organize_commands(corrected_commands): try: first_command = next(corrected_commands) yield first_command except StopIteration: return without_duplicates = { command for command in sorted( corrected_commands, key=lambda command: command.priority) if command != first_command} sorted_commands = sorted( without_duplicates, key=lambda corrected_command: corrected_command.priority) logs.debug('Corrected commands: '.format( ', '.join(u'{}'.format(cmd) for cmd in [first_command] + sorted_commands))) for command in sorted_commands: yield command
Yields sorted commands without duplicates. :type corrected_commands: Iterable[thefuck.types.CorrectedCommand] :rtype: Iterable[thefuck.types.CorrectedCommand]
def autolink(self, raw_url, is_email): if self.check_url(raw_url): url = self.rewrite_url(('mailto:' if is_email else '') + raw_url) url = escape_html(url) return '<a href="%s">%s</a>' % (url, escape_html(raw_url)) else: return escape_html('<%s>' % raw_url)
Filters links generated by the ``autolink`` extension.
def unregister(self, thread): if thread not in self.threads.keys(): self.log.warning("Can not unregister thread %s" % thread) else: del (self.threads[thread]) self.__log.debug("Thread %s got unregistered" % thread)
Unregisters an existing thread, so that this thread is no longer available. This function is mainly used during plugin deactivation. :param thread: Name of the thread
def append_tier(self, coro, **kwargs): source = self.tiers[-1] if self.tiers else None return self.add_tier(coro, source=source, **kwargs)
Implicitly source from the tail tier like a pipe.
def get_network_name(self): start = self.network.find('network') end = self.network.find('}\n', start) network_attribute = Suppress('network') + Word(alphanums + '_' + '-') + '{' network_name = network_attribute.searchString(self.network[start:end])[0][0] return network_name
Retruns the name of the network Example --------------- >>> from pgmpy.readwrite import BIFReader >>> reader = BIF.BifReader("bif_test.bif") >>> reader.network_name() 'Dog-Problem'
def job_callback(self, job): self.logger.debug('job_callback for %s started'%str(job.id)) with self.thread_cond: self.logger.debug('job_callback for %s got condition'%str(job.id)) self.num_running_jobs -= 1 if not self.result_logger is None: self.result_logger(job) self.iterations[job.id[0]].register_result(job) self.config_generator.new_result(job) if self.num_running_jobs <= self.job_queue_sizes[0]: self.logger.debug("HBMASTER: Trying to run another job!") self.thread_cond.notify() self.logger.debug('job_callback for %s finished'%str(job.id))
method to be called when a job has finished this will do some book keeping and call the user defined new_result_callback if one was specified
def get_neighbor_attribute_map(neigh_ip_address, route_dist=None, route_family=VRF_RF_IPV4): core = CORE_MANAGER.get_core_service() peer = core.peer_manager.get_by_addr(neigh_ip_address) at_maps_key = const.ATTR_MAPS_LABEL_DEFAULT if route_dist is not None: at_maps_key = ':'.join([route_dist, route_family]) at_maps = peer.attribute_maps.get(at_maps_key) if at_maps: return at_maps.get(const.ATTR_MAPS_ORG_KEY) else: return []
Returns a neighbor attribute_map for given ip address if exists.
def expand_abbreviations(txt, fields): def _expand(matchobj): s = matchobj.group("var") if s not in fields: matches = [x for x in fields if x.startswith(s)] if len(matches) == 1: s = matches[0] return "{%s}" % s return re.sub(FORMAT_VAR_REGEX, _expand, txt)
Expand abbreviations in a format string. If an abbreviation does not match a field, or matches multiple fields, it is left unchanged. Example: >>> fields = ("hey", "there", "dude") >>> expand_abbreviations("hello {d}", fields) 'hello dude' Args: txt (str): Format string. fields (list of str): Fields to expand to. Returns: Expanded string.
def sed(match, replacement, path, modifiers=""): cmd = "sed -r -i 's/%s/%s/%s' %s" % (match, replacement, modifiers, path) process = Subprocess(cmd, shell=True) ret, out, err = process.run(timeout=60) if ret: raise SubprocessError("Sed command failed!")
Perform sed text substitution.
def get_version(path, default="master"): version = default if os.path.exists(path): version_contents = file_to_string(path) if version_contents: version = version_contents.strip() return version
Return the version from a VERSION file