code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _get_cache_dir(candidate): if candidate: return candidate import distutils.dist import distutils.command.build build_cmd = distutils.command.build.build(distutils.dist.Distribution()) build_cmd.finalize_options() cache_dir = os.path.abspath(build_cmd.build_temp) try: os.makedirs(cache_dir) except OSError as error: if error.errno != errno.EEXIST: raise error return cache_dir
Get the current cache directory.
def watch_instances(self, flag): lib.EnvSetDefclassWatchInstances(self._env, int(flag), self._cls)
Whether or not the Class Instances are being watched.
def decode_embedded_strs(src): if not six.PY3: return src if isinstance(src, dict): return _decode_embedded_dict(src) elif isinstance(src, list): return _decode_embedded_list(src) elif isinstance(src, bytes): try: return src.decode() except UnicodeError: return src else: return src
Convert enbedded bytes to strings if possible. This is necessary because Python 3 makes a distinction between these types. This wouldn't be needed if we used "use_bin_type=True" when encoding and "encoding='utf-8'" when decoding. Unfortunately, this would break backwards compatibility due to a change in wire protocol, so this less than ideal solution is used instead.
def fetch(self): params = values.of({}) payload = self._version.fetch( 'GET', self._uri, params=params, ) return FunctionVersionInstance( self._version, payload, service_sid=self._solution['service_sid'], function_sid=self._solution['function_sid'], sid=self._solution['sid'], )
Fetch a FunctionVersionInstance :returns: Fetched FunctionVersionInstance :rtype: twilio.rest.serverless.v1.service.function.function_version.FunctionVersionInstance
def make_reverse_dict(in_dict, warn=True): out_dict = {} for k, v in in_dict.items(): for vv in v: if vv in out_dict: if warn: print("Dictionary collision %i" % vv) out_dict[vv] = k return out_dict
Build a reverse dictionary from a cluster dictionary Parameters ---------- in_dict : dict(int:[int,]) A dictionary of clusters. Each cluster is a source index and the list of other source in the cluster. Returns ------- out_dict : dict(int:int) A single valued dictionary pointing from source index to cluster key for each source in a cluster. Note that the key does not point to itself.
def info(self): return sorted(self._checkers.values(), key=lambda x: (x.ns, x.name))
Returns information on all the registered checkers. Sorted by namespace and then name :returns a list of CheckerInfo
def inband_solarflux(self, rsr, scale=1.0, **options): return self._band_calculations(rsr, True, scale, **options)
Derive the inband solar flux for a given instrument relative spectral response valid for an earth-sun distance of one AU.
def consensus(self, vs=None): r return functools.reduce(operator.and_, self.iter_cofactors(vs))
r"""Return the consensus of a function over a sequence of N variables. The *vs* argument is a sequence of :math:`N` Boolean variables. The *consensus* of :math:`f(x_1, x_2, \dots, x_i, \dots, x_n)` with respect to variable :math:`x_i` is: :math:`C_{x_i}(f) = f_{x_i} \cdot f_{x_i'}` This is the same as the universal quantification operator: :math:`\forall \{x_1, x_2, \dots\} \: f`
def on_compiled(self, name=None, key_schema=None, value_schema=None, as_mapping_key=None): if self.name is None: self.name = name if self.key_schema is None: self.key_schema = key_schema if self.value_schema is None: self.value_schema = value_schema if as_mapping_key: self.as_mapping_key = True return self
When CompiledSchema compiles this marker, it sets informational values onto it. Note that arguments may be provided in two incomplete sets, e.g. (name, key_schema, None) and then (None, None, value_schema). Thus, all assignments must be handled individually. It is possible that a marker may have no `value_schema` at all: e.g. in the case of { Extra: Reject } -- `Reject` will have no value schema, but `Extra` will have compiled `Reject` as the value. :param key_schema: Compiled key schema :type key_schema: CompiledSchema|None :param value_schema: Compiled value schema :type value_schema: CompiledSchema|None :param name: Human-friendly marker name :type name: unicode|None :param as_mapping_key: Whether it's used as a mapping key? :type as_mapping_key: bool|None :rtype: Marker
def remove_constraint(self,*names): for name in names: for pop in self.poplist: if name in pop.constraints: pop.remove_constraint(name) else: logging.info('%s model does not have %s constraint' % (pop.model,name)) if name in self.constraints: self.constraints.remove(name)
Removes constraint from each population See :func:`vespa.stars.StarPopulation.remove_constraint
def update_node(self, job_record): if job_record.process_name not in self.process_hierarchy: raise ValueError('unable to update the node due to unknown process: {0}'.format(job_record.process_name)) time_qualifier = self.process_hierarchy[job_record.process_name].process_entry.time_qualifier node = self._get_node(time_qualifier, job_record.timeperiod) node.job_record = job_record
Updates job record property for a tree node associated with the given Job
def read_json(fn): with open(fn) as f: return json.load(f, object_hook=_operator_object_hook)
Convenience method to read pyquil.operator_estimation objects from a JSON file. See :py:func:`to_json`.
def subpacket_prefix_len(item): n = len(item) if n >= 8384: prefix = b'\xFF' + struct.pack('>L', n) elif n >= 192: n = n - 192 prefix = struct.pack('BB', (n // 256) + 192, n % 256) else: prefix = struct.pack('B', n) return prefix + item
Prefix subpacket length according to RFC 4880 section-5.2.3.1.
def body(self, data, data_type, **kwargs): if data is None: raise ValidationError("required", "body", True) internal_data_type = data_type.strip('[]{}') internal_data_type = self.dependencies.get(internal_data_type, None) if internal_data_type and not isinstance(internal_data_type, Enum): try: deserializer = Deserializer(self.dependencies) deserializer.additional_properties_detection = False if issubclass(internal_data_type, Model) and internal_data_type.is_xml_model(): deserializer.key_extractors = [ attribute_key_case_insensitive_extractor, ] else: deserializer.key_extractors = [ rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor, last_rest_key_case_insensitive_extractor ] data = deserializer._deserialize(data_type, data) except DeserializationError as err: raise_with_traceback( SerializationError, "Unable to build a model: "+str(err), err) if self.client_side_validation: errors = _recursive_validate(data_type, data_type, data) if errors: raise errors[0] return self._serialize(data, data_type, **kwargs)
Serialize data intended for a request body. :param data: The data to be serialized. :param str data_type: The type to be serialized from. :rtype: dict :raises: SerializationError if serialization fails. :raises: ValueError if data is None
def vowels(self): return IPAString(ipa_chars=[c for c in self.ipa_chars if c.is_vowel])
Return a new IPAString, containing only the vowels in the current string. :rtype: IPAString
def paretoint(avg, alpha): return int(random.paretovariate(alpha) * avg / (alpha / (alpha - 1)))
Returns a random integer that's avg on average, following a power law. alpha determines the shape of the power curve. alpha has to be larger than 1. The closer alpha is to 1, the higher the variation of the returned numbers.
def get_project_by_network_id(network_id,**kwargs): user_id = kwargs.get('user_id') projects_i = db.DBSession.query(Project).join(ProjectOwner).join(Network, Project.id==Network.project_id).filter( Network.id==network_id, ProjectOwner.user_id==user_id).order_by('name').all() ret_project = None for project_i in projects_i: try: project_i.check_read_permission(user_id) ret_project = project_i except: log.info("Can't return project %s. User %s does not have permission to read it.", project_i.id, user_id) return ret_project
get a project complexmodel by a network_id
def get_json(self): json = { 'DHCPUsage': self.dhcp_usage, 'AuthenticationMethod': self.auth_method, } if not self.dhcp_usage: json['Name'] = self.iqn json['IPv4Address'] = self.ip json['PortNumber'] = self.port json['BootLUN'] = self.lun if self.chap_user: json['ChapUserName'] = self.chap_user if self.chap_secret: json['ChapSecret'] = self.chap_secret if self.mutual_chap_secret: json['MutualChapSecret'] = self.mutual_chap_secret return json
Create JSON data for iSCSI target. :returns: JSON data for iSCSI target as follows: { "DHCPUsage":{ }, "Name":{ }, "IPv4Address":{ }, "PortNumber":{ }, "BootLUN":{ }, "AuthenticationMethod":{ }, "ChapUserName":{ }, "ChapSecret":{ }, "MutualChapSecret":{ } }
def handle(self, *args, **options): for index in options.pop("indexes"): data = {} try: data = self.do_index_command(index, **options) except TransportError as ex: logger.warning("ElasticSearch threw an error: %s", ex) data = {"index": index, "status": ex.status_code, "reason": ex.error} finally: logger.info(data)
Run do_index_command on each specified index and log the output.
def init_app(self, app): if self.path: self.register_endpoint(self.path, app) if self._export_defaults: self.export_defaults( self.buckets, self.group_by, self._defaults_prefix, app )
This callback can be used to initialize an application for the use with this prometheus reporter setup. This is usually used with a flask "app factory" configuration. Please see: http://flask.pocoo.org/docs/1.0/patterns/appfactories/ Note, that you need to use `PrometheusMetrics(app=None, ...)` for this mode, otherwise it is called automatically. :param app: the Flask application
def get_relationship_search_session_for_family(self, family_id=None, proxy=None): if not family_id: raise NullArgument if not self.supports_relationship_search(): raise Unimplemented() try: from . import sessions except ImportError: raise OperationFailed() proxy = self._convert_proxy(proxy) try: session = sessions.RelationshipSearchSession(family_id, proxy=proxy, runtime=self._runtime) except AttributeError: raise OperationFailed() return session
Gets the ``OsidSession`` associated with the relationship search service for the given family. arg: family_id (osid.id.Id): the ``Id`` of the family arg: proxy (osid.proxy.Proxy): a proxy return: (osid.relationship.RelationshipSearchSession) - a ``RelationshipSearchSession`` raise: NotFound - no ``Family`` found by the given ``Id`` raise: NullArgument - ``family_id`` or ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_relationship_search()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_relationship_search()`` and ``supports_visible_federation()`` are ``true``*
def register_model_once(cls, ModelClass, **kwargs): if cls._static_registry.get_for_model(ModelClass) is None: logger.warn("Model is already registered with {0}: '{1}'" .format(cls, ModelClass)) else: cls.register_model.register(ModelClass, **kwargs)
Tweaked version of `AnyUrlField.register_model` that only registers the given model after checking that it is not already registered.
def fix_tags_on_cands_missing_reals(user_id, vos_dir, property): "At the moment this just checks for a single user's missing reals. Easy to generalise it to all users." con = context.get_context(vos_dir) user_progress = [] listing = con.get_listing(tasks.get_suffix('reals')) mpc_listing = con.get_listing('mpc') for filename in listing: if not filename.startswith('fk'): user = storage.get_property(con.get_full_path(filename), property) if (user is not None): is_present = False for mpcfile in [f for f in mpc_listing if not f.startswith('fk')]: if mpcfile.startswith(filename): print filename, user, 'exists!', mpcfile is_present = True if not is_present: user_progress.append(filename) print filename, user, 'no mpc file' storage.set_property(con.get_full_path(filename), property, None) print 'Fixed files:', len(user_progress) return
At the moment this just checks for a single user's missing reals. Easy to generalise it to all users.
def handleResponse(self, response): requestId = KafkaCodec.get_response_correlation_id(response) tReq = self.requests.pop(requestId, None) if tReq is None: log.warning('Unexpected response with correlationId=%d: %r', requestId, reprlib.repr(response)) else: tReq.d.callback(response)
Handle the response string received by KafkaProtocol. Ok, we've received the response from the broker. Find the requestId in the message, lookup & fire the deferred with the response.
def create_condition(self, service_id, version_number, name, _type, statement, priority="10", comment=None): body = self._formdata({ "name": name, "type": _type, "statement": statement, "priority": priority, "comment": comment, }, FastlyCondition.FIELDS) content = self._fetch("/service/%s/version/%d/condition" % (service_id, version_number), method="POST", body=body) return FastlyCondition(self, content)
Creates a new condition.
def get_pager(self, *path, **kwargs): page_arg = kwargs.pop('page_size', None) limit_arg = kwargs.pop('limit', None) kwargs['limit'] = page_arg or limit_arg or self.default_page_size return self.adapter.get_pager(self.get, path, kwargs)
A generator for all the results a resource can provide. The pages are lazily loaded.
def to_coo(self): try: from scipy.sparse import coo_matrix except ImportError: raise ImportError('Scipy is not installed') dtype = find_common_type(self.dtypes) if isinstance(dtype, SparseDtype): dtype = dtype.subtype cols, rows, datas = [], [], [] for col, name in enumerate(self): s = self[name] row = s.sp_index.to_int_index().indices cols.append(np.repeat(col, len(row))) rows.append(row) datas.append(s.sp_values.astype(dtype, copy=False)) cols = np.concatenate(cols) rows = np.concatenate(rows) datas = np.concatenate(datas) return coo_matrix((datas, (rows, cols)), shape=self.shape)
Return the contents of the frame as a sparse SciPy COO matrix. .. versionadded:: 0.20.0 Returns ------- coo_matrix : scipy.sparse.spmatrix If the caller is heterogeneous and contains booleans or objects, the result will be of dtype=object. See Notes. Notes ----- The dtype will be the lowest-common-denominator type (implicit upcasting); that is to say if the dtypes (even of numeric types) are mixed, the one that accommodates all will be chosen. e.g. If the dtypes are float16 and float32, dtype will be upcast to float32. By numpy.find_common_type convention, mixing int64 and and uint64 will result in a float64 dtype.
def get_backup_paths(cls, block_id, impl, working_dir): backup_dir = config.get_backups_directory(impl, working_dir) backup_paths = [] for p in cls.get_state_paths(impl, working_dir): pbase = os.path.basename(p) backup_path = os.path.join( backup_dir, pbase + (".bak.%s" % block_id)) backup_paths.append( backup_path ) return backup_paths
Get the set of backup paths, given the virtualchain implementation module and block number
def html_load_time(self): load_times = self.get_load_times('html') return round(mean(load_times), self.decimal_precision)
Returns aggregate html load time for all pages.
def save(self, msg=None): if msg is None: msg = 'Saving %s' % self.name log.debug(msg) self.repo.addItem(self, msg)
Modify item data and commit to repo. Git objects are immutable, to save means adding a new item :param msg: Commit message.
def from_annotype(cls, anno, writeable, **kwargs): ret = cls(description=anno.description, writeable=writeable, **kwargs) widget = ret.default_widget() if widget != Widget.NONE: ret.set_tags([widget.tag()]) return ret
Return an instance of this class from an Anno
def _update_separator(self, offset): offset_line = self.handles['offset_line'] if offset == 0: offset_line.set_visible(False) else: offset_line.set_visible(True) if self.invert_axes: offset_line.set_xdata(offset) else: offset_line.set_ydata(offset)
Compute colorbar offset and update separator line if map is non-zero.
def get_blocks_byte_array(self, buffer=False): if buffer: length = len(self.blocksList) return BytesIO(pack(">i", length)+self.get_blocks_byte_array()) else: return array.array('B', self.blocksList).tostring()
Return a list of all blocks in this chunk.
def add_showcase(self, showcase, showcases_to_check=None): dataset_showcase = self._get_dataset_showcase_dict(showcase) if showcases_to_check is None: showcases_to_check = self.get_showcases() for showcase in showcases_to_check: if dataset_showcase['showcase_id'] == showcase['id']: return False showcase = hdx.data.showcase.Showcase({'id': dataset_showcase['showcase_id']}, configuration=self.configuration) showcase._write_to_hdx('associate', dataset_showcase, 'package_id') return True
Add dataset to showcase Args: showcase (Union[Showcase,Dict,str]): Either a showcase id or showcase metadata from a Showcase object or dictionary showcases_to_check (List[Showcase]): list of showcases against which to check existence of showcase. Defaults to showcases containing dataset. Returns: bool: True if the showcase was added, False if already present
def reconnect(self): if self._connection.closed(): self._connection.reset() yield from self._connection.connect()
Connected the stream if needed. Coroutine.
def _build_inheritance_chain(cls, bases, *names, merge=False): result = [] for name in names: maps = [] for base in bases: bmap = getattr(base, name, None) if bmap is not None: assert isinstance(bmap, (dict, ChainMap)) if len(bmap): if isinstance(bmap, ChainMap): maps.extend(bmap.maps) else: maps.append(bmap) result.append(ChainMap({}, *maps)) if merge: result = [dict(map) for map in result] if len(names) == 1: return result[0] return result
For all of the names build a ChainMap containing a map for every base class.
def add_text(self, setting, width=300, height=100, multiline=False): tab = self.panel(setting.tab) if multiline: ctrl = wx.TextCtrl(tab, -1, "", size=(width,height), style=wx.TE_MULTILINE|wx.TE_PROCESS_ENTER) else: ctrl = wx.TextCtrl(tab, -1, "", size=(width,-1) ) self._add_input(setting, ctrl)
add a text input line
def arithm_expr_parse(line): vals = [] ops = [] for tok in line + [';']: if tok in priority: while (tok != '(' and ops and priority[ops[-1]] >= priority[tok]): right = vals.pop() left = vals.pop() vals.append((left, ops.pop(), right)) if tok == ')': ops.pop() else: ops.append(tok) elif tok.isdigit(): vals.append(int(tok)) else: vals.append(tok) return vals.pop()
Constructs an arithmetic expression tree :param line: list of token strings containing the expression :returns: expression tree :complexity: linear
def get_spell_damage(self, amount: int) -> int: amount += self.spellpower amount <<= self.controller.spellpower_double return amount
Returns the amount of damage \a amount will do, taking SPELLPOWER and SPELLPOWER_DOUBLE into account.
def _get_background_color(self): color = self.cell_attributes[self.key]["bgcolor"] return tuple(c / 255.0 for c in color_pack2rgb(color))
Returns background color rgb tuple of right line
def _get_point_data_handler_for(self, point): with self.__point_data_handlers: try: return self.__point_data_handlers[point] except KeyError: return self.__point_data_handlers.setdefault(point, PointDataObjectHandler(point, self))
Used by point instances and data callbacks
def showMenu(self, point): menu = QMenu(self) acts = {} acts['edit'] = menu.addAction('Edit quick filter...') trigger = menu.exec_(self.mapToGlobal(point)) if trigger == acts['edit']: text, accepted = XTextEdit.getText(self.window(), 'Edit Format', 'Format:', self.filterFormat(), wrapped=False) if accepted: self.setFilterFormat(text)
Displays the menu for this filter widget.
def Any(a, axis, keep_dims): return np.any(a, axis=axis if not isinstance(axis, np.ndarray) else tuple(axis), keepdims=keep_dims),
Any reduction op.
def on_menu_import_meas_file(self, event): meas_file = self.choose_meas_file() WD = os.path.split(meas_file)[0] self.WD = WD self.magic_file = meas_file self.reset_backend()
Open measurement file, reset self.magic_file and self.WD, and reset everything.
def construct_field(model_name, field_name, field_type, all_models, **kwargs): field_type_parts = field_type.split('->') _field_type = field_type_parts[0].strip().split('[]')[0].strip() back_populates = field_type_parts[1].strip() if len(field_type_parts) > 1 else None error_context = kwargs.pop('error_context', StatikErrorContext()) _kwargs = copy(kwargs) _kwargs['back_populates'] = back_populates if _field_type not in FIELD_TYPES and _field_type not in all_models: raise InvalidFieldTypeError( model_name, field_name, context=error_context ) if _field_type in FIELD_TYPES: return FIELD_TYPES[_field_type](field_name, **_kwargs) if field_type_parts[0].strip().endswith('[]'): return StatikManyToManyField(field_name, _field_type, **_kwargs) return StatikForeignKeyField(field_name, _field_type, **_kwargs)
Helper function to build a field from the given field name and type. Args: model_name: The name of the model for which we're building this field. field_name: The name of the field to build. field_type: A string indicator as to which field type must be built. all_models: A list containing the names of all of the models, which will help us when building foreign key lookups.
def update_labels(self, func): if not isinstance(self.data, LabelArray): raise TypeError( 'update_labels only supported if data is of type LabelArray.' ) self._data = self._data.map(func) for _, row_adjustments in iteritems(self.adjustments): for adjustment in row_adjustments: adjustment.value = func(adjustment.value)
Map a function over baseline and adjustment values in place. Note that the baseline data values must be a LabelArray.
def main(argv: Optional[Sequence[str]] = None) -> None: parser = ArgumentParser(description="Convert Jupyter Notebook assignments to PDFs") parser.add_argument( "--hw", type=int, required=True, help="Homework number to convert", dest="hw_num", ) parser.add_argument( "-p", "--problems", type=int, help="Problem numbers to convert", dest="problems", nargs="*", ) parser.add_argument( "--by-hand", type=int, help="Problem numbers to be completed by hand", dest="by_hand", nargs="*", ) args = parser.parse_args(argv) prefix = Path(f"homework/homework-{args.hw_num}") process(args.hw_num, args.problems, prefix=prefix, by_hand=args.by_hand)
Parse arguments and process the homework assignment.
def add_repo_to_team(self, auth, team_id, repo_name): url = "/admin/teams/{t}/repos/{r}".format(t=team_id, r=repo_name) self.put(url, auth=auth)
Add or update repo from team. :param auth.Authentication auth: authentication object, must be admin-level :param str team_id: Team's id :param str repo_name: Name of the repo to be added to the team :raises NetworkFailure: if there is an error communicating with the server :raises ApiFailure: if the request cannot be serviced
def register_precmd_hook(self, func: Callable[[plugin.PrecommandData], plugin.PrecommandData]) -> None: self._validate_prepostcmd_hook(func, plugin.PrecommandData) self._precmd_hooks.append(func)
Register a hook to be called before the command function.
def _register(self, assignment): name = assignment.dependency.name old_positive = self._positive.get(name) if old_positive is not None: self._positive[name] = old_positive.intersect(assignment) return ref = assignment.dependency.name negative_by_ref = self._negative.get(name) old_negative = None if negative_by_ref is None else negative_by_ref.get(ref) if old_negative is None: term = assignment else: term = assignment.intersect(old_negative) if term.is_positive(): if name in self._negative: del self._negative[name] self._positive[name] = term else: if name not in self._negative: self._negative[name] = {} self._negative[name][ref] = term
Registers an Assignment in _positive or _negative.
def runInactiveDeviceCleanup(self): yield self.deleteInactiveDevicesByQuota( self.__inactive_per_jid_max, self.__inactive_global_max ) yield self.deleteInactiveDevicesByAge(self.__inactive_max_age)
Runs both the deleteInactiveDevicesByAge and the deleteInactiveDevicesByQuota methods with the configuration that was set when calling create.
def advance2(self, height, ignore_overflow=False): if height <= self.remaining_height: self._self_cursor.grow(height) elif ignore_overflow: self._self_cursor.grow(float(self.remaining_height)) else: return False return True
Advance the cursor by `height`. Returns `True` on success. Returns `False` if this would cause the cursor to point beyond the bottom of the container.
def get_user(user=None): if user is None: user = getSecurityManager().getUser() elif isinstance(user, MemberData): user = user.getUser() elif isinstance(user, basestring): user = get_member_by_login_name(get_portal(), user, False) if user: user = user.getUser() return user
Get the user object :param user: A user id, memberdata object or None for the current user :returns: Plone User (PlonePAS) / Propertied User (PluggableAuthService)
def convert_to_sngl_inspiral_table(params, proc_id): sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable) col_names = ['mass1','mass2','spin1z','spin2z'] for values in params: tmplt = return_empty_sngl() tmplt.process_id = proc_id for colname, value in zip(col_names, values): setattr(tmplt, colname, value) tmplt.mtotal, tmplt.eta = pnutils.mass1_mass2_to_mtotal_eta( tmplt.mass1, tmplt.mass2) tmplt.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta( tmplt.mass1, tmplt.mass2) tmplt.template_duration = 0 tmplt.event_id = sngl_inspiral_table.get_next_id() sngl_inspiral_table.append(tmplt) return sngl_inspiral_table
Convert a list of m1,m2,spin1z,spin2z values into a basic sngl_inspiral table with mass and spin parameters populated and event IDs assigned Parameters ----------- params : iterable Each entry in the params iterable should be a sequence of [mass1, mass2, spin1z, spin2z] in that order proc_id : ilwd char Process ID to add to each row of the sngl_inspiral table Returns ---------- SnglInspiralTable Bank of templates in SnglInspiralTable format
def _threaded(self, *args, **kwargs): for target in self.targets: result = target(*args, **kwargs) self.queue.put(result)
Call the target and put the result in the Queue.
def UpdateUser(self, user, ssh_keys): if not bool(USER_REGEX.match(user)): self.logger.warning('Invalid user account name %s.', user) return False if not self._GetUser(user): if not (self._AddUser(user) and self._UpdateUserGroups(user, self.groups)): return False if not self._UpdateSudoer(user, sudoer=True): return False pw_entry = self._GetUser(user) if pw_entry and os.path.basename(pw_entry.pw_shell) == 'nologin': message = 'Not updating user %s. User set `nologin` as login shell.' self.logger.debug(message, user) return True try: self._UpdateAuthorizedKeys(user, ssh_keys) except (IOError, OSError) as e: message = 'Could not update the authorized keys file for user %s. %s.' self.logger.warning(message, user, str(e)) return False else: return True
Update a Linux user with authorized SSH keys. Args: user: string, the name of the Linux user account. ssh_keys: list, the SSH key strings associated with the user. Returns: bool, True if the user account updated successfully.
def create_xml_string(self): root = self.create_xml() xml = '<?xml version="1.0" encoding="UTF-8"?>\n' + tostring( root, pretty_print=True ) return xml
Create a UNTL document in a string from a UNTL metadata root object. untl_xml_string = metadata_root_object.create_xml_string()
def lookup(self,value): for k,v in self.iteritems(): if value == v: return k return None
return the first key in dict where value is name
def eval_grad(self): return self.D.T.dot(self.D.dot(self.Y) - self.S)
Compute gradient in spatial domain for variable Y.
def _next_timestamp(self, now, last): if now > last: self.last = now return now else: self._maybe_warn(now=now) self.last = last + 1 return self.last
Returns the timestamp that should be used if ``now`` is the current time and ``last`` is the last timestamp returned by this object. Intended for internal and testing use only; to generate timestamps, call an instantiated ``MonotonicTimestampGenerator`` object. :param int now: an integer to be used as the current time, typically representing the current time in microseconds since the UNIX epoch :param int last: an integer representing the last timestamp returned by this object
def _handle_processing_error(err, errstream, client): errors = sorted(err.events, key=operator.attrgetter("index")) failed = [e.event for e in errors] silent = all(isinstance(e.error, OutOfOrderError) for e in errors) if errstream: _deliver_errored_events(errstream, failed) must_raise = False else: must_raise = True for _, event, error, tb in errors: if isinstance(error, OutOfOrderError): continue try: raise six.reraise(*tb) except Exception as err: if client: client.captureException() msg = "{}{}: {}".format(type(err).__name__, err.args, json.dumps(event, indent=4)) rlogger.error(msg, exc_info=tb) if must_raise: raise
Handle ProcessingError exceptions.
def x10(self, feature): x10_product = None for product in self._x10_products: if feature.lower() == product.feature: x10_product = product if not x10_product: x10_product = X10Product(feature, None) return x10_product
Return an X10 device based on a feature. Current features: - OnOff - Dimmable
def send(self, content): try: self.process.stdin.write(content) except IOError as e: raise ProxyCommandFailure(" ".join(self.cmd), e.strerror) return len(content)
Write the content received from the SSH client to the standard input of the forked command. :param str content: string to be sent to the forked command
def gaussian_kernel(gstd): Nc = np.ceil(gstd*3)*2+1 x = np.linspace(-(Nc-1)/2,(Nc-1)/2,Nc,endpoint=True) g = np.exp(-.5*((x/gstd)**2)) g = g/np.sum(g) return g
Generate odd sized truncated Gaussian The generated filter kernel has a cutoff at $3\sigma$ and is normalized to sum to 1 Parameters ------------- gstd : float Standard deviation of filter Returns ------------- g : ndarray Array with kernel coefficients
def find_all_matches(text_log_error, matchers): for matcher_func in matchers: matches = matcher_func(text_log_error) if not matches: continue for score, classified_failure_id in matches: yield TextLogErrorMatch( score=score, matcher_name=matcher_func.__name__, classified_failure_id=classified_failure_id, text_log_error=text_log_error, )
Find matches for the given error using the given matcher classes Returns *unsaved* TextLogErrorMatch instances.
def get_listing_calendar(self, listing_id, starting_date=datetime.datetime.now(), calendar_months=6): params = { '_format': 'host_calendar_detailed' } starting_date_str = starting_date.strftime("%Y-%m-%d") ending_date_str = ( starting_date + datetime.timedelta(days=30)).strftime("%Y-%m-%d") r = self._session.get(API_URL + "/calendars/{}/{}/{}".format( str(listing_id), starting_date_str, ending_date_str), params=params) r.raise_for_status() return r.json()
Get host availability calendar for a given listing
def get_files_from_storage(paths): for path in paths: f = default_storage.open(path) f.name = os.path.basename(path) try: yield f except ClientError: logger.exception("File not found: %s", path)
Return S3 file where the name does not include the path.
def start_stream_subscriber(self): if not self._stream_process_started: if sys.platform.startswith("win"): self._stream_process_started = True self._stream() self._stream_process_started = True self._stream_process.start()
Starts the stream consumer's main loop. Called when the stream consumer has been set up with the correct callbacks.
def _get_type_list(self, props): type_list = [] for k, v in list(props.items()): t = self._get_property_type(v) if t is not None: type_list.append(t) return sorted(type_list)
Return a list of non-primitive types used by this object.
def write(self, path, data, offset, fh): with self.attr_lock: base = self.attr[path][BASE_KEY] staged = self.attr[path][STAGED_KEY] if not staged.closed: base.st_size += len(data) staged.write(data) return len(data)
This is a readonly filesystem right now
def set_meta(self, name, format, *args): return lib.zcert_set_meta(self._as_parameter_, name, format, *args)
Set certificate metadata from formatted string.
def _operations_from_methods(handler_class): for httpmethod in yaml_utils.PATH_KEYS: method = getattr(handler_class, httpmethod) operation_data = yaml_utils.load_yaml_from_docstring(method.__doc__) if operation_data: operation = {httpmethod: operation_data} yield operation
Generator of operations described in handler's http methods :param handler_class: :type handler_class: RequestHandler descendant
def missing_or_other_newer(path, other_path, cwd=None): cwd = cwd or '.' path = get_abspath(path, cwd=cwd) other_path = get_abspath(other_path, cwd=cwd) if not os.path.exists(path): return True if os.path.getmtime(other_path) - 1e-6 >= os.path.getmtime(path): return True return False
Investigate if path is non-existant or older than provided reference path. Parameters ========== path: string path to path which might be missing or too old other_path: string reference path cwd: string working directory (root of relative paths) Returns ======= True if path is older or missing.
def Notify(self, message_type, subject, msg, source): pending = self.Get(self.Schema.PENDING_NOTIFICATIONS) if pending is None: pending = self.Schema.PENDING_NOTIFICATIONS() if message_type.split( ":", 2)[0] not in rdf_flows.Notification.notification_types: raise TypeError("Invalid notification type %s" % message_type) pending.Append( type=message_type, subject=subject, message=msg, source=source, timestamp=int(time.time() * 1e6)) while len(pending) > 50: pending.Pop(0) self.Set(self.Schema.PENDING_NOTIFICATIONS, pending)
Send an AFF4-based notification to the user in the UI. Args: message_type: One of aff4_grr.Notification.notification_types e.g. "ViewObject", "HostInformation", "GrantAccess" or the same with an added ":[new-style notification type] suffix, e.g. "ViewObject:TYPE_CLIENT_INTERROGATED". subject: The subject to use, normally a URN. msg: The message to display. source: The class doing the notification. Raises: TypeError: On invalid message_type.
def orientation(self, image, geometry, options): if options.get('orientation', settings.THUMBNAIL_ORIENTATION): return self._orientation(image) self.reoriented = True return image
Wrapper for ``_orientation``
def flush(self): items = [] for data in self._to_put: items.append(encode_put(self.connection.dynamizer, data)) for data in self._to_delete: items.append(encode_delete(self.connection.dynamizer, data)) self._write(items) self._to_put = [] self._to_delete = []
Flush pending items to Dynamo
def _hash(x, elementType, relicHashFunc): barray = bytearray() map(barray.extend, bytes(x)) result = elementType() buf = getBuffer(barray) relicHashFunc(byref(result), byref(buf), sizeof(buf)) return result
Hash an array of bytes, @x, using @relicHashFunc and returns the result of @elementType.
def port(port_number, return_format=None): response = _get('port/{number}'.format(number=port_number), return_format) if 'bad port number' in str(response): raise Error('Bad port number, {number}'.format(number=port_number)) else: return response
Summary information about a particular port. In the returned data: Records: Total number of records for a given date. Targets: Number of unique destination IP addresses. Sources: Number of unique originating IPs. :param port_number: a string or integer port number
def get_notebook_item(name): env = notebook_environment() return google.datalab.utils.get_item(env, name)
Get an item from the IPython environment.
def do_header(self, node): data = self.extract_text(node) self.add_text('\n/*\n %s \n*/\n' % data) parent = node.parentNode idx = parent.childNodes.index(node) if len(parent.childNodes) >= idx + 2: nd = parent.childNodes[idx + 2] if nd.nodeName == 'description': nd = parent.removeChild(nd) self.add_text('\n/*') self.subnode_parse(nd) self.add_text('\n*/\n')
For a user defined section def a header field is present which should not be printed as such, so we comment it in the output.
def _show_selection(self, text, bbox): x, y, width, height = bbox textw = self._font.measure(text) canvas = self._canvas canvas.configure(width=width, height=height) canvas.coords(canvas.text, width - textw, height / 2 - 1) canvas.itemconfigure(canvas.text, text=text) canvas.place(in_=self._calendar, x=x, y=y)
Configure canvas for a new selection.
def dump_graph(self): with self.lock: return { dot_separated(k): v.dump_graph_entry() for k, v in self.relations.items() }
Dump a key-only representation of the schema to a dictionary. Every known relation is a key with a value of a list of keys it is referenced by.
def for_category(self, category, context=None): assert self.installed(), "Actions not enabled on this application" actions = self._state["categories"].get(category, []) if context is None: context = self.context return [a for a in actions if a.available(context)]
Returns actions list for this category in current application. Actions are filtered according to :meth:`.Action.available`. if `context` is None, then current action context is used (:attr:`context`)
def match_option_with_value(arguments, option, value): return ('%s=%s' % (option, value) in arguments or contains_sublist(arguments, [option, value]))
Check if a list of command line options contains an option with a value. :param arguments: The command line arguments (a list of strings). :param option: The long option (a string). :param value: The expected value (a string). :returns: :data:`True` if the command line contains the option/value pair, :data:`False` otherwise.
def load_combo_catalog(): user_dir = user_data_dir() global_dir = global_data_dir() desc = 'Generated from data packages found on your intake search path' cat_dirs = [] if os.path.isdir(user_dir): cat_dirs.append(user_dir + '/*.yaml') cat_dirs.append(user_dir + '/*.yml') if os.path.isdir(global_dir): cat_dirs.append(global_dir + '/*.yaml') cat_dirs.append(global_dir + '/*.yml') for path_dir in conf.get('catalog_path', []): if path_dir != '': if not path_dir.endswith(('yaml', 'yml')): cat_dirs.append(path_dir + '/*.yaml') cat_dirs.append(path_dir + '/*.yml') else: cat_dirs.append(path_dir) return YAMLFilesCatalog(cat_dirs, name='builtin', description=desc)
Load a union of the user and global catalogs for convenience
def reset(self, n_particles=None, only_params=None, reset_weights=True): if n_particles is not None and only_params is not None: raise ValueError("Cannot set both n_particles and only_params.") if n_particles is None: n_particles = self.n_particles if reset_weights: self.particle_weights = np.ones((n_particles,)) / n_particles if only_params is None: sl = np.s_[:, :] self.particle_locations = np.zeros((n_particles, self.model.n_modelparams)) else: sl = np.s_[:, only_params] self.particle_locations[sl] = self.prior.sample(n=n_particles)[sl] if self._canonicalize: self.particle_locations[sl] = self.model.canonicalize(self.particle_locations[sl])
Causes all particle locations and weights to be drawn fresh from the initial prior. :param int n_particles: Forces the size of the new particle set. If `None`, the size of the particle set is not changed. :param slice only_params: Resets only some of the parameters. Cannot be set if ``n_particles`` is also given. :param bool reset_weights: Resets the weights as well as the particles.
def browse_node_lookup(self, ResponseGroup="BrowseNodeInfo", **kwargs): response = self.api.BrowseNodeLookup( ResponseGroup=ResponseGroup, **kwargs) root = objectify.fromstring(response) if root.BrowseNodes.Request.IsValid == 'False': code = root.BrowseNodes.Request.Errors.Error.Code msg = root.BrowseNodes.Request.Errors.Error.Message raise BrowseNodeLookupException( "Amazon BrowseNode Lookup Error: '{0}', '{1}'".format( code, msg)) return [AmazonBrowseNode(node.BrowseNode) for node in root.BrowseNodes]
Browse Node Lookup. Returns the specified browse node's name, children, and ancestors. Example: >>> api.browse_node_lookup(BrowseNodeId='163357')
def _update_from_database(self): collection = JSONClientValidated('assessment', collection='AssessmentSection', runtime=self._runtime) self._my_map = collection.find_one({'_id': self._my_map['_id']})
Updates map to latest state in database. Should be called prior to major object events to assure that an assessment being taken on multiple devices are reasonably synchronized.
def get_oauth_url(self): params = OrderedDict() if "?" in self.url: url = self.url[:self.url.find("?")] for key, value in parse_qsl(urlparse(self.url).query): params[key] = value else: url = self.url params["oauth_consumer_key"] = self.consumer_key params["oauth_timestamp"] = self.timestamp params["oauth_nonce"] = self.generate_nonce() params["oauth_signature_method"] = "HMAC-SHA256" params["oauth_signature"] = self.generate_oauth_signature(params, url) query_string = urlencode(params) return "%s?%s" % (url, query_string)
Returns the URL with OAuth params
def residual_block(x, hparams): k = (hparams.kernel_height, hparams.kernel_width) dilations_and_kernels = [((1, 1), k) for _ in range(3)] y = common_layers.subseparable_conv_block( x, hparams.hidden_size, dilations_and_kernels, padding="SAME", separability=0, name="residual_block") x = common_layers.layer_norm(x + y, hparams.hidden_size, name="lnorm") return tf.nn.dropout(x, 1.0 - hparams.dropout)
A stack of convolution blocks with residual connection.
def correction(self, word): return max(self.candidates(word), key=self.word_probability)
The most probable correct spelling for the word Args: word (str): The word to correct Returns: str: The most likely candidate
def create_default_item_node(field, state): default_item = nodes.definition_list_item() default_item.append(nodes.term(text="Default")) default_item_content = nodes.definition() default_item_content.append( nodes.literal(text=repr(field.default)) ) default_item.append(default_item_content) return default_item
Create a definition list item node that describes the default value of a Field config. Parameters ---------- field : ``lsst.pex.config.Field`` A configuration field. state : ``docutils.statemachine.State`` Usually the directive's ``state`` attribute. Returns ------- ``docutils.nodes.definition_list_item`` Definition list item that describes the default target of a ConfigurableField config.
def logodds(args): from math import log from jcvi.formats.base import DictFile p = OptionParser(logodds.__doc__) opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) cnt1, cnt2 = args d = DictFile(cnt2) fp = open(cnt1) for row in fp: scf, c1 = row.split() c2 = d[scf] c1, c2 = float(c1), float(c2) c1 += 1 c2 += 1 score = int(100 * (log(c1) - log(c2))) print("{0}\t{1}".format(scf, score))
%prog logodds cnt1 cnt2 Compute log likelihood between two db.
def cli(ctx): ctx.obj = dict() ctx.obj['app_key'] = os.environ.get('TRELLOSTATS_APP_KEY') ctx.obj['app_token'] = os.environ.get('TRELLOSTATS_APP_TOKEN') init_db(db_proxy)
This is a command line app to get useful stats from a trello board and report on them in useful ways. Requires the following environment varilables: TRELLOSTATS_APP_KEY=<your key here> TRELLOSTATS_APP_TOKEN=<your token here>
def _repr_html_(self): TileServer.run_tileserver(self, self.footprint()) capture = "raster: %s" % self._filename mp = TileServer.folium_client(self, self.footprint(), capture=capture) return mp._repr_html_()
Required for jupyter notebook to show raster as an interactive map.
def get_config_string_option(parser: ConfigParser, section: str, option: str, default: str = None) -> str: if not parser.has_section(section): raise ValueError("config missing section: " + section) return parser.get(section, option, fallback=default)
Retrieves a string value from a parser. Args: parser: instance of :class:`ConfigParser` section: section name within config file option: option (variable) name within that section default: value to return if option is absent Returns: string value Raises: ValueError: if the section is absent
def get_projects(self, **kwargs): _login = kwargs.get('login', self._login) search_url = SEARCH_URL.format(login=_login) return self._request_api(url=search_url).json()
Get a user's project. :param str login: User's login (Default: self._login) :return: JSON
def _preprocess_edges_for_pydot(edges_with_data): for (source, target, attrs) in edges_with_data: if 'label' in attrs: yield (quote_for_pydot(source), quote_for_pydot(target), {'label': quote_for_pydot(attrs['label'])}) else: yield (quote_for_pydot(source), quote_for_pydot(target), {})
throw away all edge attributes, except for 'label
def item_after(self, item): next_iter = self._next_iter_for(item) if next_iter is not None: return self._object_at_iter(next_iter)
The item after an item
def is_valid_geometry(geometry): if isinstance(geometry, Polygon) or isinstance(geometry, MultiPolygon): return True else: return False
Confirm that the geometry type is of type Polygon or MultiPolygon. Args: geometry (BaseGeometry): BaseGeometry instance (e.g. Polygon) Returns: bool