code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_unique_connection_configs(config=None): if config is None: from .settings import QUEUES config = QUEUES connection_configs = [] for key, value in config.items(): value = filter_connection_params(value) if value not in connection_configs: connection_configs.append(value) return connection_configs
Returns a list of unique Redis connections from config
def DropTables(self): rows, _ = self.ExecuteQuery( "SELECT table_name FROM information_schema.tables " "WHERE table_schema='%s'" % self.database_name) for row in rows: self.ExecuteQuery("DROP TABLE `%s`" % row["table_name"])
Drop all existing tables.
async def addFeedData(self, name, items, seqn=None): return await self.core.addFeedData(name, items, seqn)
Add feed data to the cortex.
def plot_wigner_seitz(lattice, ax=None, **kwargs): ax, fig, plt = get_ax3d_fig_plt(ax) if "color" not in kwargs: kwargs["color"] = "k" if "linewidth" not in kwargs: kwargs["linewidth"] = 1 bz = lattice.get_wigner_seitz_cell() ax, fig, plt = get_ax3d_fig_plt(ax) for iface in range(len(bz)): for line in itertools.combinations(bz[iface], 2): for jface in range(len(bz)): if iface < jface and any( np.all(line[0] == x) for x in bz[jface]) \ and any(np.all(line[1] == x) for x in bz[jface]): ax.plot(*zip(line[0], line[1]), **kwargs) return fig, ax
Adds the skeleton of the Wigner-Seitz cell of the lattice to a matplotlib Axes Args: lattice: Lattice object ax: matplotlib :class:`Axes` or None if a new figure should be created. kwargs: kwargs passed to the matplotlib function 'plot'. Color defaults to black and linewidth to 1. Returns: matplotlib figure and matplotlib ax
def start_all_linking(self, mode, group): msg = StartAllLinking(mode, group) self.send_msg(msg)
Put the IM into All-Linking mode. Puts the IM into All-Linking mode for 4 minutes. Parameters: mode: 0 | 1 | 3 | 255 0 - PLM is responder 1 - PLM is controller 3 - Device that initiated All-Linking is Controller 255 = Delete All-Link group: All-Link group number (0 - 255)
def check_output(*args, **kwargs): if hasattr(subprocess, 'check_output'): return subprocess.check_output(stderr=subprocess.STDOUT, universal_newlines=True, *args, **kwargs) else: process = subprocess.Popen(*args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, **kwargs) output, _ = process.communicate() retcode = process.poll() if retcode: error = subprocess.CalledProcessError(retcode, args[0]) error.output = output raise error return output
Compatibility wrapper for Python 2.6 missin g subprocess.check_output
def update_state(url, state_obj): try: req = urllib2.Request(url) req.add_header('Content-Type', 'application/json') response = urllib2.urlopen(req, json.dumps(state_obj)) except urllib2.URLError as ex: raise ValueError(str(ex)) if response.code == 400: raise ValueError(response.message) elif response.code == 404: raise ValueError('unknown model run')
Update the state of a given model run. The state object is a Json representation of the state as created by the SCO-Server. Throws a ValueError if the resource is unknown or the update state request failed. Parameters ---------- url : string Url to POST model run create model run request state_obj : Json object State object serialization as expected by the API.
def perform(self): last_value = None last_step = None while self.items.qsize(): item = self.items.get() if item.flag == self.do: last_value = item.item(*item.args, **item.kwargs) last_step = item.message elif item.flag == self.expect: message = item.message local = {'value': last_value, 'expectation': item.item} expression = 'value {operator} expectation'.format(operator=item.operator) result = eval(expression, local) format_vars = { 'actual': last_value, 'expected': item.item, 'step': last_step, 'operator': item.operator } for var, val in format_vars.iteritems(): message = message.replace('{' + str(var) + '}', str(val)) assert result, message return last_value
Runs through all of the steps in the chain and runs each of them in sequence. :return: The value from the lat "do" step performed
def create_page_from_template(template_file, output_path): mkdir_p(os.path.dirname(output_path)) shutil.copy(os.path.join(livvkit.resource_dir, template_file), output_path)
Copy the correct html template file to the output directory
def _render_after(self, element): if element.inline_content: return "</%s>%s" % (element.tag, self.render_newlines()) elif element.self_close: return self.render_newlines() elif self.children: return "%s</%s>\n" % (self.spaces, element.tag) else: return "</%s>\n" % (element.tag)
Render closing tag
def add_comes_from(self, basic_block): if basic_block is None: return if self.lock: return if basic_block in self.comes_from: return self.lock = True self.comes_from.add(basic_block) basic_block.add_goes_to(self) self.lock = False
This simulates a set. Adds the basic_block to the comes_from list if not done already.
def get_backup_blocks(cls, impl, working_dir): ret = [] backup_dir = config.get_backups_directory(impl, working_dir) if not os.path.exists(backup_dir): return [] for name in os.listdir( backup_dir ): if ".bak." not in name: continue suffix = name.split(".bak.")[-1] try: block_id = int(suffix) except: continue backup_paths = cls.get_backup_paths(block_id, impl, working_dir) for p in backup_paths: if not os.path.exists(p): block_id = None continue if block_id is not None: ret.append(block_id) return ret
Get the set of block IDs that were backed up
def _finalize(self): "Computes _fobj, the completed hash." hobj = self._hobj for hashname in self._algorithms[1:]: fobj = hashlib.new(hashname) fobj.update(hobj.digest()) hobj = fobj self._fobj = hobj
Computes _fobj, the completed hash.
def _get_shareinfo(self, data_el): if (data_el is None) or not (isinstance(data_el, ET.Element)): return None return ShareInfo(self._xml_to_dict(data_el))
Simple helper which returns instance of ShareInfo class :param data_el: 'data' element extracted from _make_ocs_request :returns: instance of ShareInfo class
def extract(self, item): doc = Document(deepcopy(item['spider_response'].body)) description = doc.summary() article_candidate = ArticleCandidate() article_candidate.extractor = self._name article_candidate.title = doc.short_title() article_candidate.description = description article_candidate.text = self._text(item) article_candidate.topimage = self._topimage(item) article_candidate.author = self._author(item) article_candidate.publish_date = self._publish_date(item) article_candidate.language = self._language(item) return article_candidate
Creates an readability document and returns an ArticleCandidate containing article title and text. :param item: A NewscrawlerItem to parse. :return: ArticleCandidate containing the recovered article data.
def stats(syslog_ng_sbin_dir=None): try: ret = _run_command_in_extended_path(syslog_ng_sbin_dir, 'syslog-ng-ctl', ('stats',)) except CommandExecutionError as err: return _format_return_data(retcode=-1, stderr=six.text_type(err)) return _format_return_data(ret['retcode'], ret.get('stdout'), ret.get('stderr'))
Returns statistics from the running syslog-ng instance. If syslog_ng_sbin_dir is specified, it is added to the PATH during the execution of the command syslog-ng-ctl. CLI Example: .. code-block:: bash salt '*' syslog_ng.stats salt '*' syslog_ng.stats /home/user/install/syslog-ng/sbin
def __add_dependency(self, word_instance, sent_id): head = word_instance.__getattribute__(self.head_attr) deprel = word_instance.__getattribute__(self.deprel_attr) if head == '0': source_id = sent_id else: source_id = '{0}_t{1}'.format(sent_id, head) if source_id not in self.node: self.add_node(source_id, layers={self.ns}) target_id = '{0}_t{1}'.format(sent_id, word_instance.word_id) try: self.add_edge(source_id, target_id, layers={self.ns, self.ns+':dependency'}, relation_type=deprel, label=deprel, edge_type=EdgeTypes.dominance_relation) except AssertionError: print "source: {0}, target: {1}".format(source_id, target_id)
adds an ingoing dependency relation from the projected head of a token to the token itself.
def copy(self): return Poly(self.A.copy(), self.dim, self.shape, self.dtype)
Return a copy of the polynomial.
def lower_camel(string, prefix='', suffix=''): return require_valid(append_underscore_if_keyword(''.join( word.lower() if index == 0 else upper_case_first_char(word) for index, word in enumerate(en.words(' '.join([prefix, string, suffix])))) ))
Generate a camel-case identifier. Useful for unit test methods. Takes a string, prefix, and optional suffix. `prefix` can be set to `''`, though be careful - without a prefix, the function will throw `InvalidIdentifier` when your string starts with a number. Example: >>> lower_camel("User can login", prefix='test') 'testUserCanLogin'
def item_related_name(self): if not hasattr(self, '_item_related_name'): many_to_many_rels = \ get_section_many_to_many_relations(self.__class__) if len(many_to_many_rels) != 1: self._item_related_name = None else: self._item_related_name = many_to_many_rels[0].field.name return self._item_related_name
The ManyToMany field on the item class pointing to this class. If there is more than one field, this value will be None.
def facter_info(): with suppress(FileNotFoundError): proc = subprocess.Popen(['facter', '--yaml'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() if not proc.returncode: data = serializer.load(stdout) return {'facter': data}
Returns data from facter.
def _regex_strings(self): domain = 0 if domain not in self.domains: self.register_domain(domain=domain) return self.domains[domain]._regex_strings
A property to link into IntentEngine's _regex_strings. Warning: this is only for backwards compatiblility and should not be used if you intend on using domains. Returns: the domains _regex_strings from its IntentEngine
def _linux_disks(): ret = {'disks': [], 'SSDs': []} for entry in glob.glob('/sys/block/*/queue/rotational'): try: with salt.utils.files.fopen(entry) as entry_fp: device = entry.split('/')[3] flag = entry_fp.read(1) if flag == '0': ret['SSDs'].append(device) log.trace('Device %s reports itself as an SSD', device) elif flag == '1': ret['disks'].append(device) log.trace('Device %s reports itself as an HDD', device) else: log.trace( 'Unable to identify device %s as an SSD or HDD. It does ' 'not report 0 or 1', device ) except IOError: pass return ret
Return list of disk devices and work out if they are SSD or HDD.
def describe_thing_type(thingTypeName, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) res = conn.describe_thing_type(thingTypeName=thingTypeName) if res: res.pop('ResponseMetadata', None) thingTypeMetadata = res.get('thingTypeMetadata') if thingTypeMetadata: for dtype in ('creationDate', 'deprecationDate'): dval = thingTypeMetadata.get(dtype) if dval and isinstance(dval, datetime.date): thingTypeMetadata[dtype] = '{0}'.format(dval) return {'thing_type': res} else: return {'thing_type': None} except ClientError as e: err = __utils__['boto3.get_error'](e) if e.response.get('Error', {}).get('Code') == 'ResourceNotFoundException': return {'thing_type': None} return {'error': err}
Given a thing type name describe its properties. Returns a dictionary of interesting properties. .. versionadded:: 2016.11.0 CLI Example: .. code-block:: bash salt myminion boto_iot.describe_thing_type mythingtype
def set(self, time): self._time = time self._pb.sec = int(self._time) self._pb.nsec = int((self._time - self._pb.sec) * 10 ** 9)
Sets time in seconds since Epoch Args: time (:obj:`float`): time in seconds since Epoch (see time.time()) Returns: None
def get_correctness(self, question_id): response = self.get_response(question_id) if response.is_answered(): item = self._get_item(response.get_item_id()) return item.get_correctness_for_response(response) raise errors.IllegalState()
get measure of correctness for the question
def set_windows_permissions(filename): if os.name == 'nt': try: everyone, domain, type = win32security.LookupAccountName( "", "Everyone") except Exception: everyone, domain, type = win32security.LookupAccountName ("", win32api.GetUserName()) sd = win32security.GetFileSecurity( filename, win32security.DACL_SECURITY_INFORMATION) dacl = sd.GetSecurityDescriptorDacl() dacl.AddAccessAllowedAce( win32security.ACL_REVISION, con.FILE_ALL_ACCESS, everyone) sd.SetSecurityDescriptorDacl(1, dacl, 0) win32security.SetFileSecurity( filename, win32security.DACL_SECURITY_INFORMATION, sd)
At least on windows 7 if a file is created on an Admin account, Other users will not be given execute or full control. However if a user creates the file himself it will work... So just always change permissions after creating a file on windows Change the permissions for Allusers of the application The Everyone Group Full access http://timgolden.me.uk/python/win32_how_do_i/add-security-to-a-file.html
def _should_proxy(self, attr): if attr in type(self).__notproxied__: return False if _oga(self, "__notproxied__") is True: return False return True
Determines whether `attr` should be looked up on the proxied object, or the proxy itself.
def slugify(field_name, slug_field_name=None, mutable=False): slug_field_name = slug_field_name or 'slug' def _set_slug(target, value, old_value, _, mutable=False): existing_slug = getattr(target, slug_field_name) if existing_slug and not mutable: return if value and (not existing_slug or value != old_value): setattr(target, slug_field_name, _slugify(value)) def wrapper(cls): event.listen(getattr(cls, field_name), 'set', partial(_set_slug, mutable=mutable)) return cls return wrapper
Class decorator to specify a field to slugify. Slugs are immutable by default unless mutable=True is passed. Usage:: @slugify('title') def Post(Model): title = Column(String(100)) slug = Column(String(100)) # pass a second argument to specify the slug attribute field: @slugify('title', 'title_slug') def Post(Model) title = Column(String(100)) title_slug = Column(String(100)) # optionally set mutable to True for a slug that changes every time # the slugified field changes: @slugify('title', mutable=True) def Post(Model): title = Column(String(100)) slug = Column(String(100))
def raw_incron(user): if __grains__['os_family'] == 'Solaris': cmd = 'incrontab -l {0}'.format(user) else: cmd = 'incrontab -l -u {0}'.format(user) return __salt__['cmd.run_stdout'](cmd, rstrip=False, runas=user, python_shell=False)
Return the contents of the user's incrontab CLI Example: .. code-block:: bash salt '*' incron.raw_incron root
def execute_callback(self, *args, **kwargs): response = self._sijax.execute_callback(*args, **kwargs) return _make_response(response)
Executes a callback and returns the proper response. Refer to :meth:`sijax.Sijax.execute_callback` for more details.
def from_xml(cls, xml_bytes): root = XML(xml_bytes) return cls(root.findtext('Bucket'), root.findtext('Key'), root.findtext('UploadId'))
Create an instance of this from XML bytes. @param xml_bytes: C{str} bytes of XML to parse @return: an instance of L{MultipartInitiationResponse}
def mac_addresses(self): mac_addresses = [self.findtext("general/mac_address")] if self.findtext("general/alt_mac_address"): mac_addresses.append(self.findtext("general/alt_mac_address")) return mac_addresses
Return a list of mac addresses for this device. Computers don't tell you which network device is which.
def add(self, key, metric, **dims): return super(MetricsRegistry, self).add( self.metadata.register(key, **dims), metric)
Adds custom metric instances to the registry with dimensions which are not created with their constructors default arguments
def set_weights(params, new_params): for param, new_param in zip(params, new_params): param.data.copy_(new_param.data)
Copies parameters from new_params to params :param params: dst parameters :param new_params: src parameters
def run_toy_HMC(gpu_id=None): X, Y, X_test, Y_test = load_toy() minibatch_size = Y.shape[0] noise_precision = 1 / 9.0 net = get_toy_sym(True, noise_precision) data_shape = (minibatch_size,) + X.shape[1::] data_inputs = {'data': nd.zeros(data_shape, ctx=dev(gpu_id)), 'teacher_output_label': nd.zeros((minibatch_size, 1), ctx=dev(gpu_id))} initializer = mx.init.Uniform(0.07) sample_pool = HMC(net, data_inputs=data_inputs, X=X, Y=Y, X_test=X_test, Y_test=Y_test, sample_num=300000, initializer=initializer, prior_precision=1.0, learning_rate=1E-3, L=10, dev=dev(gpu_id))
Run HMC on toy dataset
def mkdir(self, target_folder): self.printv("Making directory: %s" % target_folder) self.k.key = re.sub(r"^/|/$", "", target_folder) + "/" self.k.set_contents_from_string('') self.k.close()
Create a folder on S3. Examples -------- >>> s3utils.mkdir("path/to/my_folder") Making directory: path/to/my_folder
def _hijack_target(self): if self._target.is_class_or_module(): setattr(self._target.obj, self._method_name, self) elif self._attr.kind == 'property': proxy_property = ProxyProperty( double_name(self._method_name), self._original_method, ) setattr(self._target.obj.__class__, self._method_name, proxy_property) self._target.obj.__dict__[double_name(self._method_name)] = self else: self._target.obj.__dict__[self._method_name] = self if self._method_name in ['__call__', '__enter__', '__exit__']: self._target.hijack_attr(self._method_name)
Replaces the target method on the target object with the proxy method.
def edges(self): canonical_edges = set() for v1, neighbours in self._vertices.items(): for v2 in neighbours: edge = self.canonical_order((v1, v2)) canonical_edges.add(edge) return canonical_edges
Edges of this graph, in canonical order.
def find_data_files(self, package, src_dir): patterns = self._get_platform_patterns( self.package_data, package, src_dir, ) globs_expanded = map(glob, patterns) globs_matches = itertools.chain.from_iterable(globs_expanded) glob_files = filter(os.path.isfile, globs_matches) files = itertools.chain( self.manifest_files.get(package, []), glob_files, ) return self.exclude_data_files(package, src_dir, files)
Return filenames for package's data files in 'src_dir
def points(self, include_hidden=False): return sum(x.points for x in self.testable_results if include_hidden or not x.testable.is_hidden)
Return the number of points awarded to this submission.
def union(*sets, **kwargs): sets = _set_preprocess(sets, **kwargs) return as_index( _set_concatenate(sets), axis=0, base=True).unique
all unique items which occur in any one of the sets Parameters ---------- sets : tuple of indexable objects Returns ------- union of all items in all sets
def Forget(self, obj): obj = _get_idstr(obj) try: self.memo.remove(obj) except ValueError: pass
Forget we've seen this object.
def set_sleep(minutes): value = _validate_sleep(minutes) cmd = 'systemsetup -setsleep {0}'.format(value) salt.utils.mac_utils.execute_return_success(cmd) state = [] for check in (get_computer_sleep, get_display_sleep, get_harddisk_sleep): state.append(salt.utils.mac_utils.confirm_updated( value, check, )) return all(state)
Sets the amount of idle time until the machine sleeps. Sets the same value for Computer, Display, and Hard Disk. Pass "Never" or "Off" for computers that should never sleep. :param minutes: Can be an integer between 1 and 180 or "Never" or "Off" :ptype: int, str :return: True if successful, False if not :rtype: bool CLI Example: .. code-block:: bash salt '*' power.set_sleep 120 salt '*' power.set_sleep never
def get_host(self): host = self.get_as_nullable_string("host") host = host if host != None else self.get_as_nullable_string("ip") return host
Gets the host name or IP address. :return: the host name or IP address.
def login(self) -> bool: user_check = isinstance(self.username, str) and len(self.username) > 0 pass_check = isinstance(self.password, str) and len(self.password) > 0 if user_check and pass_check: response, _ = helpers.call_api( '/cloud/v1/user/login', 'post', json=helpers.req_body(self, 'login') ) if response and helpers.check_response(response, 'login'): self.token = response['result']['token'] self.account_id = response['result']['accountID'] self.enabled = True return True else: logger.error('Error logging in with username and password') return False else: if user_check is False: logger.error('Username invalid') if pass_check is False: logger.error('Password invalid') return False
Return True if log in request succeeds
def _call(self, x, out=None): def per_axis_interp(arg, out=None): if is_valid_input_meshgrid(arg, self.grid.ndim): input_type = 'meshgrid' else: input_type = 'array' interpolator = _PerAxisInterpolator( self.grid.coord_vectors, x, schemes=self.schemes, nn_variants=self.nn_variants, input_type=input_type) return interpolator(arg, out=out) return self.range.element(per_axis_interp, vectorized=True)
Create an interpolator from grid values ``x``. Parameters ---------- x : `Tensor` The array of values to be interpolated out : `FunctionSpaceElement`, optional Element in which to store the interpolator Returns ------- out : `FunctionSpaceElement` Per-axis interpolator for the grid of this operator. If ``out`` was provided, the returned object is a reference to it.
def GetDecrypter(cls, encryption_method, **kwargs): encryption_method = encryption_method.lower() decrypter = cls._decrypters.get(encryption_method, None) if not decrypter: return None return decrypter(**kwargs)
Retrieves the decrypter object for a specific encryption method. Args: encryption_method (str): encryption method identifier. kwargs (dict): keyword arguments depending on the decrypter. Returns: Decrypter: decrypter or None if the encryption method does not exists. Raises: CredentialError: if the necessary credentials are missing.
def remove_each(text, items, count=None, strip=False): for item in items: text = remove(text, item, count=count, strip=strip) return text
Like ``remove``, where each occurrence in ``items`` is ``what`` to remove.
def delete_library(self, library): lib = ArcticLibraryBinding(self, library) colname = lib.get_top_level_collection().name if not [c for c in lib._db.list_collection_names(False) if re.match(r"^{}([\.].*)?$".format(colname), c)]: logger.info('Nothing to delete. Arctic library %s does not exist.' % colname) logger.info('Dropping collection: %s' % colname) lib._db.drop_collection(colname) for coll in lib._db.list_collection_names(): if coll.startswith(colname + '.'): logger.info('Dropping collection: %s' % coll) lib._db.drop_collection(coll) if library in self._library_cache: del self._library_cache[library] del self._library_cache[lib.get_name()] self._cache.delete_item_from_key('list_libraries', self._sanitize_lib_name(library))
Delete an Arctic Library, and all associated collections in the MongoDB. Parameters ---------- library : `str` The name of the library. e.g. 'library' or 'user.library'
def percent_chance(self, pct): if pct <= 0: return False if pct >= 100: return True return pct / 100 < self.random()
Given a ``pct``% chance of something happening right now, decide at random whether it actually happens, and return ``True`` or ``False`` as appropriate. Values not between 0 and 100 are treated as though they were 0 or 100, whichever is nearer.
def fix_multiclass_predict_proba(y_proba, seen_classes, complete_classes ): assert set(complete_classes) >= set(seen_classes) y_proba_fixed = np.zeros( shape=(y_proba.shape[0], len(complete_classes)), dtype=y_proba.dtype, ) class_mapping = np.searchsorted(complete_classes, seen_classes) y_proba_fixed[:, class_mapping] = y_proba return y_proba_fixed
Add missing columns to predict_proba result. When a multiclass classifier is fit on a dataset which only contains a subset of possible classes its predict_proba result only has columns corresponding to seen classes. This function adds missing columns.
def admin_tools_render_menu(context, menu=None): if menu is None: menu = get_admin_menu(context) menu.init_with_context(context) has_bookmark_item = False bookmark = None if len([c for c in menu.children if isinstance(c, items.Bookmarks)]) > 0: has_bookmark_item = True url = context['request'].get_full_path() try: bookmark = Bookmark.objects.filter( user=context['request'].user, url=url )[0] except: pass context.update({ 'template': menu.template, 'menu': menu, 'has_bookmark_item': has_bookmark_item, 'bookmark': bookmark, 'admin_url': reverse('%s:index' % get_admin_site_name(context)), }) return context
Template tag that renders the menu, it takes an optional ``Menu`` instance as unique argument, if not given, the menu will be retrieved with the ``get_admin_menu`` function.
def __attr_index(self, attribute: str) -> Optional[int]: attr_index = None for i, (key_node, _) in enumerate(self.yaml_node.value): if key_node.value == attribute: attr_index = i break return attr_index
Finds an attribute's index in the yaml_node.value list.
def get_proc_return_status(self): if self._session is None: return None if not self._session.has_status: self._session.find_return_status() return self._session.ret_status if self._session.has_status else None
Last stored proc result
def upload_log_data(url, stream_or_file, config): try: logger.debug("Uploading log data to IOpipe") if isinstance(stream_or_file, StringIO): stream_or_file.seek(0) response = requests.put( url, data=stream_or_file, timeout=config["network_timeout"] ) else: with open(stream_or_file, "rb") as data: response = requests.put( url, data=data, timeout=config["network_timeout"] ) response.raise_for_status() except Exception as e: logger.debug("Error while uploading log data: %s", e) logger.exception(e) if hasattr(e, "response") and hasattr(e.response, "content"): logger.debug(e.response.content) else: logger.debug("Log data uploaded successfully") finally: if isinstance(stream_or_file, str) and os.path.exists(stream_or_file): os.remove(stream_or_file)
Uploads log data to IOpipe. :param url: The signed URL :param stream_or_file: The log data stream or file :param config: The IOpipe config
def simplify(self): node = self.node.simplify() if node is self.node: return self else: return _expr(node)
Return a simplified expression.
def generate_nonce_timestamp(): global count rng = botan.rng().get(30) uuid4 = uuid.uuid4().bytes tmpnonce = (bytes(str(count).encode('utf-8'))) + uuid4 + rng nonce = tmpnonce[:41] count += 1 return nonce
Generate unique nonce with counter, uuid and rng.
def _new_theme_part(cls, package): partname = package.next_partname('/ppt/theme/theme%d.xml') content_type = CT.OFC_THEME theme = CT_OfficeStyleSheet.new_default() return XmlPart(partname, content_type, theme, package)
Create and return a default theme part suitable for use with a notes master.
def instance(): start_time = time.time() print(_green("Started...")) env.host_string = _create_ec2_instance() print(_green("Waiting 30 seconds for server to boot...")) time.sleep(30) for item in tasks.configure_instance: try: print(_yellow(item['message'])) except KeyError: pass globals()["_" + item['action']](item['params']) end_time = time.time() print(_green("Runtime: %f minutes" % ((end_time - start_time) / 60))) print(_green("\nPLEASE ADD ADDRESS THIS TO YOUR ")), print(_yellow("project_conf.py")), print(_green(" FILE UNDER ")), print(_yellow("fabconf['EC2_INSTANCES'] : ")), print(_green(env.host_string))
Creates an EC2 instance from an Ubuntu AMI and configures it as a Django server with nginx + gunicorn
def treeWidgetChanged(self): ParameterItem.treeWidgetChanged(self) if self.widget is not None: tree = self.treeWidget() if tree is None: return tree.setItemWidget(self, 1, self.layoutWidget) self.displayLabel.hide() self.selected(False)
Called when this item is added or removed from a tree.
def remove_output_data_port(self, data_port_id, force=False, destroy=True): if data_port_id in self._output_data_ports: if destroy: self.remove_data_flows_with_data_port_id(data_port_id) self._output_data_ports[data_port_id].parent = None return self._output_data_ports.pop(data_port_id) else: raise AttributeError("output data port with name %s does not exit", data_port_id)
Remove an output data port from the state :param int data_port_id: the id of the output data port to remove :raises exceptions.AttributeError: if the specified input data port does not exist
def forward(self, X): return self.W(X).sum(dim=1) + self.b
Execute sparse linear layer Args: X: an [n, h] torch.LongTensor containing up to h indices of features whose weights should be looked up and used in a sparse linear multiplication.
def create_consumer(self): with self.connection_pool.acquire(block=True) as conn: yield self.consumer(conn)
Context manager that yields an instance of ``Consumer``.
async def verify_chain_of_trust(chain): log_path = os.path.join(chain.context.config["task_log_dir"], "chain_of_trust.log") scriptworker_log = logging.getLogger('scriptworker') with contextual_log_handler( chain.context, path=log_path, log_obj=scriptworker_log, formatter=AuditLogFormatter( fmt=chain.context.config['log_fmt'], datefmt=chain.context.config['log_datefmt'], ) ): try: await build_task_dependencies(chain, chain.task, chain.name, chain.task_id) await download_cot(chain) verify_cot_signatures(chain) await download_cot_artifacts(chain) task_count = await verify_task_types(chain) check_num_tasks(chain, task_count) await verify_worker_impls(chain) await trace_back_to_tree(chain) except (BaseDownloadError, KeyError, AttributeError) as exc: log.critical("Chain of Trust verification error!", exc_info=True) if isinstance(exc, CoTError): raise else: raise CoTError(str(exc)) log.info("Good.")
Build and verify the chain of trust. Args: chain (ChainOfTrust): the chain we're operating on Raises: CoTError: on failure
def add_route(self, handler, uri, methods=frozenset({'GET'}), host=None, strict_slashes=False): stream = False if hasattr(handler, 'view_class'): http_methods = ( 'GET', 'POST', 'PUT', 'HEAD', 'OPTIONS', 'PATCH', 'DELETE') methods = set() for method in http_methods: _handler = getattr(handler.view_class, method.lower(), None) if _handler: methods.add(method) if hasattr(_handler, 'is_stream'): stream = True if isinstance(handler, self.composition_view_class): methods = handler.handlers.keys() for _handler in handler.handlers.values(): if hasattr(_handler, 'is_stream'): stream = True break self.route(uri=uri, methods=methods, host=host, strict_slashes=strict_slashes, stream=stream)(handler) return handler
A helper method to register class instance or functions as a handler to the application url routes. :param handler: function or class instance :param uri: path of the URL :param methods: list or tuple of methods allowed, these are overridden if using a HTTPMethodView :param host: :return: function or class instance
def confirm_user(query): user = _query_to_user(query) if click.confirm(f'Are you sure you want to confirm {user!r}?'): if security_service.confirm_user(user): click.echo(f'Successfully confirmed {user!r} at ' f'{user.confirmed_at.strftime("%Y-%m-%d %H:%M:%S%z")}') user_manager.save(user, commit=True) else: click.echo(f'{user!r} has already been confirmed.') else: click.echo('Cancelled.')
Confirm a user account.
def parse_cl_args(arg_vector): parser = argparse.ArgumentParser(description='Compiles markdown files into html files for remark.js') parser.add_argument('source', metavar='source', help='the source to compile. If a directory is provided, all markdown files in that directory are compiled. Output is saved in the current working directory under a md2remark_build subdirectory.') return parser.parse_args(arg_vector)
Parses the command line arguments
def values_for_column(self, column_name, limit=10000): logging.info( 'Getting values for columns [{}] limited to [{}]' .format(column_name, limit)) if self.fetch_values_from: from_dttm = utils.parse_human_datetime(self.fetch_values_from) else: from_dttm = datetime(1970, 1, 1) qry = dict( datasource=self.datasource_name, granularity='all', intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(), aggregations=dict(count=count('count')), dimension=column_name, metric='count', threshold=limit, ) client = self.cluster.get_pydruid_client() client.topn(**qry) df = client.export_pandas() return [row[column_name] for row in df.to_records(index=False)]
Retrieve some values for the given column
async def write_register(self, address, value, skip_encode=False): await self._request('write_registers', address, value, skip_encode=skip_encode)
Write a modbus register.
def ParsePath(path, opts = None): precondition.AssertType(path, Text) rcount = 0 normalized_path = path.replace(os.path.sep, "/") for item in normalized_path.split("/"): component = ParsePathItem(item, opts=opts) if isinstance(component, RecursiveComponent): rcount += 1 if rcount > 1: raise ValueError("path cannot have more than one recursive component") yield component
Parses given path into a stream of `PathComponent` instances. Args: path: A path to be parsed. opts: An `PathOpts` object. Yields: `PathComponent` instances corresponding to the components of the given path. Raises: ValueError: If path contains more than one recursive component.
def VcardFieldsEqual(field1, field2): field1_vals = set([ str(f.value) for f in field1 ]) field2_vals = set([ str(f.value) for f in field2 ]) if field1_vals == field2_vals: return True else: return False
Handle comparing vCard fields where inputs are lists of components. Handle parameters? Are any used aside from 'TYPE'? Note: force cast to string to compare sub-objects like Name and Address
async def unformat(self): self._data = await self._handler.unformat( system_id=self.node.system_id, id=self.id)
Unformat this block device.
def register_prefix(self, path): if path not in self._prefixes: LOG.debug('%r: registering prefix %r', self, path) self._prefixes.add(path)
Authorize a path and any subpaths for access by children. Repeat calls with the same path has no effect. :param str path: File path.
def editor_example(): sensors = [Sensors.PIXEL_CAMERA, Sensors.LOCATION_SENSOR, Sensors.VELOCITY_SENSOR] agent = AgentDefinition("uav0", agents.UavAgent, sensors) env = HolodeckEnvironment(agent, start_world=False) env.agents["uav0"].set_control_scheme(1) command = [0, 0, 10, 50] for i in range(10): env.reset() for _ in range(1000): state, reward, terminal, _ = env.step(command)
This editor example shows how to interact with holodeck worlds while they are being built in the Unreal Engine. Most people that use holodeck will not need this.
def disconnect(self): if self.sock is not None: try: self.sock.close() except socket.error: pass finally: self.sock = None
Disconnect from the Graphite server if connected.
def set_up_log(filename, verbose=True): filename += '.log' if verbose: print('Preparing log file:', filename) logging.captureWarnings(True) formatter = logging.Formatter(fmt='%(asctime)s %(message)s', datefmt='%d/%m/%Y %H:%M:%S') fh = logging.FileHandler(filename=filename, mode='w') fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) log = logging.getLogger(filename) log.setLevel(logging.DEBUG) log.addHandler(fh) log.info('The log file has been set-up.') return log
Set up log This method sets up a basic log. Parameters ---------- filename : str Log file name Returns ------- logging.Logger instance
def generate_search_subparser(subparsers): parser = subparsers.add_parser( 'search', description=constants.SEARCH_DESCRIPTION, epilog=constants.SEARCH_EPILOG, formatter_class=ParagraphFormatter, help=constants.SEARCH_HELP) parser.set_defaults(func=search_texts) utils.add_common_arguments(parser) utils.add_db_arguments(parser) utils.add_corpus_arguments(parser) utils.add_query_arguments(parser) parser.add_argument('ngrams', help=constants.SEARCH_NGRAMS_HELP, nargs='*', metavar='NGRAMS')
Adds a sub-command parser to `subparsers` to generate search results for a set of n-grams.
def can_create_assets(self): url_path = construct_url('authorization', bank_id=self._catalog_idstr) return self._get_request(url_path)['assetHints']['canCreate']
Tests if this user can create ``Assets``. A return of true does not guarantee successful authorization. A return of false indicates that it is known creating an ``Asset`` will result in a ``PermissionDenied``. This is intended as a hint to an application that may opt not to offer create operations to an unauthorized user. :return: ``false`` if ``Asset`` creation is not authorized, ``true`` otherwise :rtype: ``boolean`` *compliance: mandatory -- This method must be implemented.*
def Parse(self, stat, file_object, knowledge_base): _, _ = stat, knowledge_base field_parser = NtpdFieldParser() for line in field_parser.ParseEntries( utils.ReadFileBytesAsUnicode(file_object)): field_parser.ParseLine(line) yield rdf_config_file.NtpConfig( config=field_parser.config, server=field_parser.keyed.get("server"), restrict=field_parser.keyed.get("restrict"), fudge=field_parser.keyed.get("fudge"), trap=field_parser.keyed.get("trap"), peer=field_parser.keyed.get("peer"), broadcast=field_parser.keyed.get("broadcast"), manycastclient=field_parser.keyed.get("manycastclient"))
Parse a ntp config into rdf.
def expected_part_size(self, part_number): last_part = self.multipart.last_part_number if part_number == last_part: return self.multipart.last_part_size elif part_number >= 0 and part_number < last_part: return self.multipart.chunk_size else: raise MultipartInvalidPartNumber()
Get expected part size for a particular part number.
def _parse_odata_timestamp(in_date): timestamp = int(in_date.replace('/Date(', '').replace(')/', '')) seconds = timestamp // 1000 ms = timestamp % 1000 return datetime.utcfromtimestamp(seconds) + timedelta(milliseconds=ms)
Convert the timestamp received from OData JSON API to a datetime object.
def _run_and_measure(self, quil_program, qubits, trials, random_seed) -> np.ndarray: payload = run_and_measure_payload(quil_program, qubits, trials, random_seed) response = post_json(self.session, self.sync_endpoint + "/qvm", payload) return np.asarray(response.json())
Run a Forest ``run_and_measure`` job. Users should use :py:func:`WavefunctionSimulator.run_and_measure` instead of calling this directly.
def mount(self, prefix, adapter): self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] for key in keys_to_move: self.adapters[key] = self.adapters.pop(key)
Registers a connection adapter to a prefix. Adapters are sorted in descending order by prefix length.
def hincrbyfloat(self, hashkey, attribute, increment=1.0): return self._hincrby(hashkey, attribute, 'HINCRBYFLOAT', float, increment)
Emulate hincrbyfloat.
def add_route(self, handler, uri, methods=frozenset({'GET'}), host=None, strict_slashes=False): if hasattr(handler, 'view_class'): http_methods = ( 'GET', 'POST', 'PUT', 'HEAD', 'OPTIONS', 'PATCH', 'DELETE') methods = set() for method in http_methods: if getattr(handler.view_class, method.lower(), None): methods.add(method) if isinstance(handler, self._composition_view_class): methods = handler.handlers.keys() self.route(uri=uri, methods=methods, host=host, strict_slashes=strict_slashes)(handler) return handler
Create a blueprint route from a function. :param handler: function for handling uri requests. Accepts function, or class instance with a view_class method. :param uri: endpoint at which the route will be accessible. :param methods: list of acceptable HTTP methods. :return: function or class instance
def check_perms(perms, user, slug, raise_exception=False): if isinstance(perms, string_types): perms = {perms} else: perms = set(perms) allowed_users = ACLRule.get_users_for(perms, slug) if allowed_users: return user in allowed_users if perms.issubset(set(WALIKI_ANONYMOUS_USER_PERMISSIONS)): return True if is_authenticated(user) and perms.issubset(set(WALIKI_LOGGED_USER_PERMISSIONS)): return True if user.has_perms(['waliki.%s' % p for p in perms]): return True if raise_exception: raise PermissionDenied return False
a helper user to check if a user has the permissions for a given slug
def set_permutation_symmetry(force_constants): fc_copy = force_constants.copy() for i in range(force_constants.shape[0]): for j in range(force_constants.shape[1]): force_constants[i, j] = (force_constants[i, j] + fc_copy[j, i].T) / 2
Enforce permutation symmetry to force cosntants by Phi_ij_ab = Phi_ji_ba i, j: atom index a, b: Cartesian axis index This is not necessary for harmonic phonon calculation because this condition is imposed when making dynamical matrix Hermite in dynamical_matrix.py.
def export_gpg_key(key): cmd = flatten([gnupg_bin(), gnupg_verbose(), gnupg_home(), "--export", key]) handle, gpg_stderr = stderr_handle() try: gpg_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=gpg_stderr) output, _err = gpg_proc.communicate() if handle: handle.close() return portable_b64encode(output) except subprocess.CalledProcessError as exception: LOGGER.debug("GPG Command %s", ' '.join(exception.cmd)) LOGGER.debug("GPG Output %s", exception.output) raise CryptoritoError('GPG encryption error')
Exports a GPG key and returns it
def _check_dedup(data): if dd.get_analysis(data).lower() in ["rna-seq", "smallrna-seq"] or not dd.get_aligner(data): dup_param = utils.get_in(data, ("config", "algorithm", "mark_duplicates"), False) else: dup_param = utils.get_in(data, ("config", "algorithm", "mark_duplicates"), True) if dup_param and isinstance(dup_param, six.string_types): logger.info("Warning: bcbio no longer support explicit setting of mark_duplicate algorithm. " "Using best-practice choice based on input data.") dup_param = True return dup_param
Check configuration for de-duplication. Defaults to no de-duplication for RNA-seq and small RNA, the back compatible default. Allow overwriting with explicit `mark_duplicates: true` setting. Also defaults to false for no alignment inputs.
def taylor(fun, z0=0, n=1, r=0.0061, num_extrap=3, step_ratio=1.6, **kwds): return Taylor(fun, n=n, r=r, num_extrap=num_extrap, step_ratio=step_ratio, **kwds)(z0)
Return Taylor coefficients of complex analytic function using FFT Parameters ---------- fun : callable function to differentiate z0 : real or complex scalar at which to evaluate the derivatives n : scalar integer, default 1 Number of taylor coefficents to compute. Maximum number is 100. r : real scalar, default 0.0061 Initial radius at which to evaluate. For well-behaved functions, the computation should be insensitive to the initial radius to within about four orders of magnitude. num_extrap : scalar integer, default 3 number of extrapolation steps used in the calculation step_ratio : real scalar, default 1.6 Initial grow/shrinking factor for finding the best radius. max_iter : scalar integer, default 30 Maximum number of iterations min_iter : scalar integer, default max_iter // 2 Minimum number of iterations before the solution may be deemed degenerate. A larger number allows the algorithm to correct a bad initial radius. full_output : bool, optional If `full_output` is False, only the coefficents is returned (default). If `full_output` is True, then (coefs, status) is returned Returns ------- coefs : ndarray array of taylor coefficents status: Optional object into which output information is written: degenerate: True if the algorithm was unable to bound the error iterations: Number of iterations executed function_count: Number of function calls final_radius: Ending radius of the algorithm failed: True if the maximum number of iterations was reached error_estimate: approximate bounds of the rounding error. Notes ----- This module uses the method of Fornberg to compute the Taylor series coefficents of a complex analytic function along with error bounds. The method uses a Fast Fourier Transform to invert function evaluations around a circle into Taylor series coefficients and uses Richardson Extrapolation to improve and bound the estimate. Unlike real-valued finite differences, the method searches for a desirable radius and so is reasonably insensitive to the initial radius-to within a number of orders of magnitude at least. For most cases, the default configuration is likely to succeed. Restrictions The method uses the coefficients themselves to control the truncation error, so the error will not be properly bounded for functions like low-order polynomials whose Taylor series coefficients are nearly zero. If the error cannot be bounded, degenerate flag will be set to true, and an answer will still be computed and returned but should be used with caution. Examples -------- Compute the first 6 taylor coefficients 1 / (1 - z) expanded round z0 = 0: >>> import numdifftools.fornberg as ndf >>> import numpy as np >>> c, info = ndf.taylor(lambda x: 1./(1-x), z0=0, n=6, full_output=True) >>> np.allclose(c, np.ones(8)) True >>> np.all(info.error_estimate < 1e-9) True >>> (info.function_count, info.iterations, info.failed) == (144, 18, False) True References ---------- [1] Fornberg, B. (1981). Numerical Differentiation of Analytic Functions. ACM Transactions on Mathematical Software (TOMS), 7(4), 512-526. http://doi.org/10.1145/355972.355979
def cli(env, identifier): mgr = SoftLayer.ObjectStorageManager(env.client) credential_limit = mgr.limit_credential(identifier) table = formatting.Table(['limit']) table.add_row([ credential_limit, ]) env.fout(table)
Credential limits for this IBM Cloud Object Storage account.
def _chunks(iterable, n): iterable = iter(iterable) while True: yield chain([next(iterable)], islice(iterable, n-1))
Splits an iterable into chunks of size n.
def transform(self, trans): _data = deepcopy(self._data) _data.data_block[:, 0:3] = trans(_data.data_block[:, 0:3]) return FstNeuron(_data, self.name)
Return a copy of this neuron with a 3D transformation applied
def get_bar_data_from_undetermined(self, flowcells): bar_data = defaultdict(dict) for lane_id, lane in flowcells.items(): try: for barcode, count in islice(lane['unknown_barcodes'].items(), 20): bar_data[barcode][lane_id] = count except AttributeError: pass bar_data = OrderedDict(sorted( bar_data.items(), key=lambda x: sum(x[1].values()), reverse=True )) return OrderedDict( (key, value) for key, value in islice(bar_data.items(), 20) )
Get data to plot for undetermined barcodes.
def _get_input_args(bam_file, data, out_base, background): if dd.get_genome_build(data) in ["hg19"]: return ["--PileupFile", _create_pileup(bam_file, data, out_base, background)] else: return ["--BamFile", bam_file]
Retrieve input args, depending on genome build. VerifyBamID2 only handles GRCh37 (1, 2, 3) not hg19, so need to generate a pileup for hg19 and fix chromosome naming.
def update_ostree_summary(self, release): self.log.info('Updating the ostree summary for %s', release['name']) self.mock_chroot(release, release['ostree_summary']) return os.path.join(release['output_dir'], 'summary')
Update the ostree summary file and return a path to it
def reset(self): self._attempts = 0 self._cur_delay = self.delay self._cur_stoptime = None
Reset the attempt counter
def value_from_person(self, array, role, default = 0): self.entity.check_role_validity(role) if role.max != 1: raise Exception( 'You can only use value_from_person with a role that is unique in {}. Role {} is not unique.' .format(self.key, role.key) ) self.members.check_array_compatible_with_entity(array) members_map = self.ordered_members_map result = self.filled_array(default, dtype = array.dtype) if isinstance(array, EnumArray): result = EnumArray(result, array.possible_values) role_filter = self.members.has_role(role) entity_filter = self.any(role_filter) result[entity_filter] = array[members_map][role_filter[members_map]] return result
Get the value of ``array`` for the person with the unique role ``role``. ``array`` must have the dimension of the number of persons in the simulation If such a person does not exist, return ``default`` instead The result is a vector which dimension is the number of entities
def write_json(self, chunk, code=None, headers=None): assert chunk is not None, 'None cound not be written in write_json' self.set_header("Content-Type", "application/json; charset=UTF-8") if isinstance(chunk, dict) or isinstance(chunk, list): chunk = self.json_encode(chunk) try: chunk = utf8(chunk) except Exception: app_log.error('chunk encoding error, repr: %s' % repr(chunk)) raise_exc_info(sys.exc_info()) self.write(chunk) if code: self.set_status(code) if headers: for k, v in headers.items(): self.set_header(k, v)
A convenient method that binds `chunk`, `code`, `headers` together chunk could be any type of (str, dict, list)