code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def fontsize(count,maxsize,minsize,maxcount): size = int(maxsize - (maxsize)*((float(maxcount-count)/maxcount))) if size < minsize: size = minsize return size
A formula for determining font sizes.
def qr(self,text): qr_code = qrcode.QRCode(version=4, box_size=4, border=1) qr_code.add_data(text) qr_code.make(fit=True) qr_img = qr_code.make_image() im = qr_img._img.convert("RGB") self._convert_image(im)
Print QR Code for the provided string
def parse_url_rules(urls_fp): url_rules = [] for line in urls_fp: re_url = line.strip() if re_url: url_rules.append({'str': re_url, 're': re.compile(re_url)}) return url_rules
URL rules from given fp
def _on_process_finished(self): exit_code = self._process.exitCode() if self._process.exitStatus() != self._process.NormalExit: exit_code = 139 self._formatter.append_message('\x1b[0m\nProcess finished with exit code %d' % exit_code, output_format=OutputFormat.CustomFormat) self.setReadOnly(True) self.process_finished.emit()
Write the process finished message and emit the `finished` signal.
def add_page_if_missing(request): try: page = Page.objects.for_request(request, best_match=True) return { 'leonardo_page': page, 'feincms_page': page, } except Page.DoesNotExist: return {}
Returns ``feincms_page`` for request.
async def expand(self, request: Request, layer: BaseLayer): if isinstance(layer, lyr.RawText): t = self.reading_time(layer.text) yield layer yield lyr.Sleep(t) elif isinstance(layer, lyr.MultiText): texts = await render(layer.text, request, True) for text in texts: t = self.reading_time(text) yield lyr.RawText(text) yield lyr.Sleep(t) elif isinstance(layer, lyr.Text): text = await render(layer.text, request) t = self.reading_time(text) yield lyr.RawText(text) yield lyr.Sleep(t) else: yield layer
Expand a layer into a list of layers including the pauses.
def db_exec_and_commit(self, sql: str, *args) -> int: rowcount = self.db_exec(sql, *args) self.commit() return rowcount
Execute SQL and commit.
def http_reply(self): data = { 'status': self.status, 'error': self.code.upper(), 'error_description': str(self) } if self.error_caught: data['error_caught'] = pformat(self.error_caught) if self.error_id: data['error_id'] = self.error_id if self.user_message: data['user_message'] = self.user_message r = jsonify(data) r.status_code = self.status if str(self.status) != "200": log.warn("ERROR: caught error %s %s [%s]" % (self.status, self.code, str(self))) return r
Return a Flask reply object describing this error
def handle(cls, vm, args): docker = Iaas.info(vm) if not docker: raise Exception('docker vm %s not found' % vm) if docker['state'] != 'running': Iaas.start(vm) remote_addr = docker['ifaces'][0]['ips'][0]['ip'] port = unixpipe.setup(remote_addr, 'root', '/var/run/docker.sock') os.environ['DOCKER_HOST'] = 'tcp://localhost:%d' % port cls.echo('using DOCKER_HOST=%s' % os.environ['DOCKER_HOST']) subprocess.call(['docker'] + list(args))
Setup forwarding connection to given VM and pipe docker cmds over SSH.
def port_type(arg): error_msg = "{0} is not a valid port".format(repr(arg)) try: arg = ast.literal_eval(arg) except ValueError: raise argparse.ArgumentTypeError(error_msg) if arg < 0 or arg > 65535: raise argparse.ArgumentTypeError(error_msg) return arg
An argparse type representing a tcp or udp port number.
def run(self): try: if self.should_fetch: self.fetch() self.rebase_all_branches() if self.with_bundler(): self.check_bundler() if self.settings['push.auto']: self.push() except GitError as error: self.print_error(error) if self.testing: raise else: sys.exit(1)
Run all the git-up stuff.
def start(context, mip_config, email, priority, dryrun, command, start_with, family): mip_cli = MipCli(context.obj['script']) mip_config = mip_config or context.obj['mip_config'] email = email or environ_email() kwargs = dict(config=mip_config, family=family, priority=priority, email=email, dryrun=dryrun, start_with=start_with) if command: mip_command = mip_cli.build_command(**kwargs) click.echo(' '.join(mip_command)) else: try: mip_cli(**kwargs) if not dryrun: context.obj['store'].add_pending(family, email=email) except MipStartError as error: click.echo(click.style(error.message, fg='red'))
Start a new analysis.
def delete_all(self): def delete_action_gen(): scanner = scan(self.es, index=self.index_name, query={'query': {'match_all':{}}}) for v in scanner: yield { '_op_type': 'delete', '_index': self.index_name, '_type': v['_type'], '_id': v['_id'], } bulk(self.es, delete_action_gen())
Delete all books from the index
def find_destination_type(self, resource_url): resolvers = self.converters.values() for resolver in resolvers: if not hasattr(resolver, 'get_destination_type'): continue destination_type = resolver.get_destination_type(resource_url) if destination_type: return destination_type
Given a resource_url, figure out what it would resolve into
def assumed_state(self): return (not self._controller.car_online[self.id()] and (self._controller._last_update_time[self.id()] - self._controller._last_wake_up_time[self.id()] > self._controller.update_interval))
Return whether the data is from an online vehicle.
def begin_script(self): if self.remote_bridge.status in (BRIDGE_STATUS.RECEIVED, BRIDGE_STATUS.VALIDATED, BRIDGE_STATUS.EXECUTING): return [1] self.remote_bridge.status = BRIDGE_STATUS.WAITING self.remote_bridge.error = 0 self.remote_bridge.script_error = None self.remote_bridge.parsed_script = None self._device.script = bytearray() return [0]
Indicate we are going to start loading a script.
def leaves(tree): lvs = [] def _leaves(node): if np.isscalar(node): lvs.append(node) elif isinstance(node, tuple) and len(node) == 2: _leaves(node[0]) _leaves(node[1]) else: raise Exception("Not a tree!") _leaves(tree) return lvs
Return the leaves in this subtree.
def _get_arg_names(func): args, varargs, keywords, defaults = inspect.getargspec(func) return(tuple(args))
this returns the arg names since dictionaries dont guarantee order
def fit_all(xy,uv,mode='rscale',center=None,verbose=True): if mode not in ['general', 'shift', 'rscale']: mode = 'rscale' if not isinstance(xy,np.ndarray): xy = np.array(xy) if not isinstance(uv,np.ndarray): uv = np.array(uv) if mode == 'shift': logstr = 'Performing "shift" fit' if verbose: print(logstr) else: log.info(logstr) result = fit_shifts(xy, uv) elif mode == 'general': logstr = 'Performing "general" fit' if verbose: print(logstr) else: log.info(logstr) result = fit_general(xy, uv) else: logstr = 'Performing "rscale" fit' if verbose: print(logstr) else: log.info(logstr) result = geomap_rscale(xy, uv, center=center) return result
Performs an 'rscale' fit between matched lists of pixel positions xy and uv
def auth_from_hass_config(path=None, **kwargs): if path is None: path = config.find_hass_config() return Auth(os.path.join(path, ".storage/auth"), **kwargs)
Initialize auth from HASS config.
def open(self): device_type = 'cisco_ios' if self.transport == 'telnet': device_type = 'cisco_ios_telnet' self.device = ConnectHandler(device_type=device_type, host=self.hostname, username=self.username, password=self.password, **self.netmiko_optional_args) self.device.enable()
Open a connection to the device.
def sZ(qubit: Qubit, coefficient: complex = 1.0) -> Pauli: return Pauli.sigma(qubit, 'Z', coefficient)
Return the Pauli sigma_Z operator acting on the given qubit
def filter(self, *LayoutClasses, **kwargs): self._check_layout() max_level = kwargs.pop('max_level', 0) greedy = kwargs.pop('greedy', False) filtered_layout_objects = self.layout.get_layout_objects(LayoutClasses, max_level=max_level, greedy=greedy) return LayoutSlice(self.layout, filtered_layout_objects)
Returns a LayoutSlice pointing to layout objects of type `LayoutClass`
def _save_params(self): self.model.save_params_to_file(self.current_params_fname) utils.cleanup_params_files(self.model.output_dir, self.max_params_files_to_keep, self.state.checkpoint, self.state.best_checkpoint, self.keep_initializations)
Saves model parameters at current checkpoint and optionally cleans up older parameter files to save disk space.
def acquire_restore(lock, state): if hasattr(lock, '_acquire_restore'): lock._acquire_restore(state) elif hasattr(lock, 'acquire'): lock.acquire() else: raise TypeError('expecting Lock/RLock')
Acquire a lock and restore its state.
def fetch_pkg_list(self): self.logger.debug("DEBUG: Fetching package name list from PyPI") package_list = self.list_packages() cPickle.dump(package_list, open(self.pkg_cache_file, "w")) self.pkg_list = package_list
Fetch and cache master list of package names from PYPI
async def ctcp_reply(self, target, query, response): if self.is_channel(target) and not self.in_channel(target): raise client.NotInChannel(target) await self.notice(target, construct_ctcp(query, response))
Send a CTCP reply to a target.
def escape_md_section(text, snob=False): text = md_backslash_matcher.sub(r"\\\1", text) if snob: text = md_chars_matcher_all.sub(r"\\\1", text) text = md_dot_matcher.sub(r"\1\\\2", text) text = md_plus_matcher.sub(r"\1\\\2", text) text = md_dash_matcher.sub(r"\1\\\2", text) return text
Escapes markdown-sensitive characters across whole document sections.
def zip_dir(directory): result = io.BytesIO() dlen = len(directory) with ZipFile(result, "w") as zf: for root, dirs, files in os.walk(directory): for name in files: full = os.path.join(root, name) rel = root[dlen:] dest = os.path.join(rel, name) zf.write(full, dest) return result
zip a directory tree into a BytesIO object
def arp_packet(opcode, src_mac, src_ip, dst_mac, dst_ip): pkt = packet.Packet() eth_pkt = ethernet.ethernet(dst_mac, src_mac, ETH_TYPE_ARP) pkt.add_protocol(eth_pkt) arp_pkt = arp.arp_ip(opcode, src_mac, src_ip, dst_mac, dst_ip) pkt.add_protocol(arp_pkt) pkt.serialize() return pkt.data
Generate ARP packet with ethernet encapsulated.
def all_issues(issues): logging.info('finding issues...') seen = set() for issue in issues: if issue['title'] not in seen: seen.add(issue['title']) yield issue
Yields unique set of issues given a list of issues.
def _CreateAllTypes(self, enumTypes, dataTypes, managedTypes): for typeInfo in managedTypes: name = typeInfo[0] version = typeInfo[3] VmomiSupport.AddVersion(version, '', '1.0', 0, name) VmomiSupport.AddVersionParent(version, 'vmodl.version.version0') VmomiSupport.AddVersionParent(version, 'vmodl.version.version1') VmomiSupport.AddVersionParent(version, version) for fn, infos in (VmomiSupport.CreateEnumType, enumTypes), \ (VmomiSupport.CreateDataType, dataTypes), \ (VmomiSupport.CreateManagedType, managedTypes): for typeInfo in infos: try: fn(*typeInfo) except Exception as err: pass
Create pyVmomi types from pyVmomi type definitions
def paren_split(sep,string): if len(sep) != 1: raise Exception("Separation string must be one character long") retlist = [] level = 0 blevel = 0 left = 0 for i in range(len(string)): if string[i] == "(": level += 1 elif string[i] == ")": level -= 1 elif string[i] == "[": blevel += 1 elif string[i] == "]": blevel -= 1 elif string[i] == sep and level == 0 and blevel == 0: retlist.append(string[left:i]) left = i+1 retlist.append(string[left:]) return retlist
Splits the string into pieces divided by sep, when sep is outside of parentheses.
def transition(trname='', field='', check=None, before=None, after=None): if is_callable(trname): raise ValueError( "The @transition decorator should be called as " "@transition(['transition_name'], **kwargs)") if check or before or after: warnings.warn( "The use of check=, before= and after= in @transition decorators is " "deprecated in favor of @transition_check, @before_transition and " "@after_transition decorators.", DeprecationWarning, stacklevel=2) return TransitionWrapper(trname, field=field, check=check, before=before, after=after)
Decorator to declare a function as a transition implementation.
def convert(self, amount: Number, currency: str, to: str, reverse: bool=False) -> Number: rate = self.get_rate_for(currency, to, reverse) if self.return_decimal: amount = Decimal(amount) return amount * rate
Convert amount to another currency
def _convert(self, val): if isinstance(val, dict) and not isinstance(val, DotDict): return DotDict(val), True elif isinstance(val, list) and not isinstance(val, DotList): return DotList(val), True return val, False
Convert the type if necessary and return if a conversion happened.
def _get_err_msg(row, col, fld, val, prt_flds): import traceback traceback.print_exc() err_msg = ( "ROW({R}) COL({C}) FIELD({F}) VAL({V})\n".format(R=row, C=col, F=fld, V=val), "PRINT FIELDS({N}): {F}".format(N=len(prt_flds), F=" ".join(prt_flds))) return "\n".join(err_msg)
Return an informative message with details of xlsx write attempt.
def signed_ge(a, b): a, b = match_bitwidth(as_wires(a), as_wires(b), signed=True) r = b - a return (r[-1] ^ (~a[-1]) ^ (~b[-1])) | (a == b)
Return a single bit result of signed greater than or equal comparison.
def _cell_output(cell): outputs = cell.get('outputs', []) stdout = ('\n'.join(_ensure_string(output.get('text', '')) for output in outputs)).rstrip() text_outputs = [] for output in outputs: out = output.get('data', {}).get('text/plain', []) out = _ensure_string(out) if out.startswith('<matplotlib'): continue text_outputs.append(out) return stdout + '\n'.join(text_outputs).rstrip()
Return the output of an ipynb cell.
def _compute_mfcc_c_extension(self): self.log(u"Computing MFCCs using C extension...") try: self.log(u"Importing cmfcc...") import aeneas.cmfcc.cmfcc self.log(u"Importing cmfcc... done") self.__mfcc = (aeneas.cmfcc.cmfcc.compute_from_data( self.audio_file.audio_samples, self.audio_file.audio_sample_rate, self.rconf[RuntimeConfiguration.MFCC_FILTERS], self.rconf[RuntimeConfiguration.MFCC_SIZE], self.rconf[RuntimeConfiguration.MFCC_FFT_ORDER], self.rconf[RuntimeConfiguration.MFCC_LOWER_FREQUENCY], self.rconf[RuntimeConfiguration.MFCC_UPPER_FREQUENCY], self.rconf[RuntimeConfiguration.MFCC_EMPHASIS_FACTOR], self.rconf[RuntimeConfiguration.MFCC_WINDOW_LENGTH], self.rconf[RuntimeConfiguration.MFCC_WINDOW_SHIFT] )[0]).transpose() self.log(u"Computing MFCCs using C extension... done") return (True, None) except Exception as exc: self.log_exc(u"An unexpected error occurred while running cmfcc", exc, False, None) return (False, None)
Compute MFCCs using the Python C extension cmfcc.
def UpdateManifestResourcesFromXML(dstpath, xmlstr, names=None, languages=None): logger.info("Updating manifest in %s", dstpath) if dstpath.lower().endswith(".exe"): name = 1 else: name = 2 winresource.UpdateResources(dstpath, xmlstr, RT_MANIFEST, names or [name], languages or [0, "*"])
Update or add manifest XML as resource in dstpath
def branches(directory=None, verbose=False): config = current_app.extensions['migrate'].migrate.get_config(directory) if alembic_version >= (0, 7, 0): command.branches(config, verbose=verbose) else: command.branches(config)
Show current branch points
def ack(self, msg): self.log.info("receiverId <%s> Received: <%s> " % (self.receiverId, msg['body'])) return stomper.NO_REPONSE_NEEDED
Process the message and determine what to do with it.
def __parse_domain_to_employer_line(self, raw_domain, raw_org): d = re.match(self.DOMAIN_REGEX, raw_domain, re.UNICODE) if not d: cause = "invalid domain format: '%s'" % raw_domain raise InvalidFormatError(cause=cause) dom = d.group('domain').strip() o = re.match(self.ORGANIZATION_REGEX, raw_org, re.UNICODE) if not o: cause = "invalid organization format: '%s'" % raw_org raise InvalidFormatError(cause=cause) org = o.group('organization').strip() org = self.__encode(org) dom = self.__encode(dom) return org, dom
Parse domain to employer lines
def _export_corpus(self): if not os.path.exists(self.mallet_bin): raise IOError("MALLET path invalid or non-existent.") self.input_path = os.path.join(self.temp, "input.mallet") exit = subprocess.call([ self.mallet_bin, 'import-file', '--input', self.corpus_path, '--output', self.input_path, '--keep-sequence', '--remove-stopwords']) if exit != 0: msg = "MALLET import-file failed with exit code {0}.".format(exit) raise RuntimeError(msg)
Calls MALLET's `import-file` method.
def sync_one(self, aws_syncr, amazon, gateway): gateway_info = amazon.apigateway.gateway_info(gateway.name, gateway.location) if not gateway_info: amazon.apigateway.create_gateway(gateway.name, gateway.location, gateway.stages, gateway.resources, gateway.api_keys, gateway.domain_names) else: amazon.apigateway.modify_gateway(gateway_info, gateway.name, gateway.location, gateway.stages, gateway.resources, gateway.api_keys, gateway.domain_names)
Make sure this gateway exists and has only attributes we want it to have
def closed(self): closed = self._closing or self._closed if not closed and self._reader and self._reader.at_eof(): self._closing = closed = True self._loop.call_soon(self._do_close, None) return closed
True if connection is closed.
def SetPlatformArchContext(): _CONFIG.AddContext("Platform:%s" % platform.system().title()) machine = platform.uname()[4] if machine in ["x86_64", "AMD64", "i686"]: if platform.architecture()[0] == "32bit": arch = "i386" else: arch = "amd64" elif machine == "x86": arch = "i386" else: arch = machine _CONFIG.AddContext("Arch:%s" % arch)
Add the running contexts to the config system.
def from_json(self, data): try: d = json.loads(data) except ValueError: raise InvalidMessageException() self.from_dict(d)
Initialise an API message from a JSON representation.
def use_plenary_agent_view(self): self._object_views['agent'] = PLENARY for session in self._get_provider_sessions(): try: session.use_plenary_agent_view() except AttributeError: pass
Pass through to provider ResourceAgentSession.use_plenary_agent_view
def _from_dict(cls, _dict): args = {} if 'request' in _dict: args['request'] = MessageRequest._from_dict(_dict.get('request')) else: raise ValueError( 'Required property \'request\' not present in Log JSON') if 'response' in _dict: args['response'] = MessageResponse._from_dict(_dict.get('response')) else: raise ValueError( 'Required property \'response\' not present in Log JSON') if 'log_id' in _dict: args['log_id'] = _dict.get('log_id') else: raise ValueError( 'Required property \'log_id\' not present in Log JSON') if 'request_timestamp' in _dict: args['request_timestamp'] = _dict.get('request_timestamp') else: raise ValueError( 'Required property \'request_timestamp\' not present in Log JSON' ) if 'response_timestamp' in _dict: args['response_timestamp'] = _dict.get('response_timestamp') else: raise ValueError( 'Required property \'response_timestamp\' not present in Log JSON' ) if 'workspace_id' in _dict: args['workspace_id'] = _dict.get('workspace_id') else: raise ValueError( 'Required property \'workspace_id\' not present in Log JSON') if 'language' in _dict: args['language'] = _dict.get('language') else: raise ValueError( 'Required property \'language\' not present in Log JSON') return cls(**args)
Initialize a Log object from a json dictionary.
def rnumlistwithoutreplacement(min, max): if checkquota() < 1: raise Exception("Your www.random.org quota has already run out.") requestparam = build_request_parameterNR(min, max) request = urllib.request.Request(requestparam) request.add_header('User-Agent', 'randomwrapy/0.1 very alpha') opener = urllib.request.build_opener() numlist = opener.open(request).read() return numlist.split()
Returns a randomly ordered list of the integers between min and max
def transform(self, x, warn=True): array_list = [encodeSplines(x[..., i].reshape((-1, 1)), n_bases=self.n_bases, spline_order=self.degree, warn=warn, start=self.data_min_[i], end=self.data_max_[i]).reshape(x[..., i].shape + (self.n_bases,)) for i in range(x.shape[-1])] return np.stack(array_list, axis=-2)
Obtain the transformed values
def example(): client = Client(ACCOUNT_SID, AUTH_TOKEN) all_messages = client.messages.list() print('There are {} messages in your account.'.format(len(all_messages))) some_messages = client.messages.list(limit=10) print('Here are the last 10 messages in your account:') for m in some_messages: print(m) all_messages = client.messages.list(page_size=10) print('There are {} messages in your account.'.format(len(all_messages))) print('Sending a message...') new_message = client.messages.create(to='XXXX', from_='YYYY', body='Twilio rocks!') print('Making a call...') new_call = client.calls.create(to='XXXX', from_='YYYY', method='GET') print('Serving TwiML') twiml_response = VoiceResponse() twiml_response.say('Hello!') twiml_response.hangup() twiml_xml = twiml_response.to_xml() print('Generated twiml: {}'.format(twiml_xml))
Some example usage of different twilio resources.
def recipe_list(backend, kitchen): err_str, use_kitchen = Backend.get_kitchen_from_user(kitchen) if use_kitchen is None: raise click.ClickException(err_str) click.secho("%s - Getting the list of Recipes for Kitchen '%s'" % (get_datetime(), use_kitchen), fg='green') check_and_print(DKCloudCommandRunner.list_recipe(backend.dki, use_kitchen))
List the Recipes in a Kitchen
def append_tier(self, coro, **kwargs): source = self.tiers[-1] if self.tiers else None return self.add_tier(coro, source=source, **kwargs)
Implicitly source from the tail tier like a pipe.
def insert_option_group(self, idx, *args, **kwargs): group = self.add_option_group(*args, **kwargs) self.option_groups.pop() self.option_groups.insert(idx, group) return group
Insert an OptionGroup at a given position.
def _compute_quads(self, element, data, mapping): quad_mapping = {'left': 'x0', 'right': 'x1', 'bottom': 'y0', 'top': 'y1'} quad_data = dict(data['scatter_1']) quad_data.update({'x0': [], 'x1': [], 'y0': [], 'y1': []}) for node in element._sankey['nodes']: quad_data['x0'].append(node['x0']) quad_data['y0'].append(node['y0']) quad_data['x1'].append(node['x1']) quad_data['y1'].append(node['y1']) data['scatter_1'].update(quad_data) data['quad_1'] = data['scatter_1'] mapping['quad_1'] = quad_mapping
Computes the node quad glyph data.x
def format(self, record): if record.levelno == DEBUG: return self.debug_formatter.format(record) if record.levelno == INFO: return self.info_formatter.format(record) if record.levelno == ERROR: return self.error_formatter.format(record) if record.levelno == WARNING: return self.warning_formatter.format(record) if record.levelno == CRITICAL: return self.critical_formatter.format(record)
Format the record using the corresponding formatter.
def handle_events(self): for event in pymlgame.get_events(): if event.type == E_NEWCTLR: self.players[event.uid] = {'name': 'alien_{}'.format(event.uid), 'score': 0} elif event.type == E_DISCONNECT: self.players.pop(event.uid) elif event.type == E_KEYDOWN: self.colors.append(self.colors.pop(0)) elif event.type == E_KEYUP: self.colors.append(self.colors.pop(0)) elif event.type == E_PING: pass
Loop through all events.
def available_backends(): print 'The following LiveSync agents are available:' for name, backend in current_plugin.backend_classes.iteritems(): print cformat(' - %{white!}{}%{reset}: {} ({})').format(name, backend.title, backend.description)
Lists the currently available backend types
def rollforward(self, date): if self.onOffset(date): return date else: return date + QuarterEnd(month=self.month)
Roll date forward to nearest end of quarter
def swap_buffers(self): self.widget.swapBuffers() self.set_default_viewport() self.app.processEvents() self.frames += 1
Swap buffers, set viewport, trigger events and increment frame counter
def mark_featured(self, request, queryset): queryset.update(featured=True) self.message_user( request, _('Selected entries are now marked as featured.'))
Mark selected as featured post.
def safe_load_sensors(self): try: loaded = self._load_sensors() except (EOFError, ValueError): _LOGGER.error('Bad file contents: %s', self.persistence_file) loaded = False if not loaded: _LOGGER.warning('Trying backup file: %s', self.persistence_bak) try: if not self._load_sensors(self.persistence_bak): _LOGGER.warning('Failed to load sensors from file: %s', self.persistence_file) except (EOFError, ValueError): _LOGGER.error('Bad file contents: %s', self.persistence_file) _LOGGER.warning('Removing file: %s', self.persistence_file) os.remove(self.persistence_file)
Load sensors safely from file.
def reduce_mean(attrs, inputs, proto_obj): new_attrs = translation_utils._fix_attribute_names(attrs, {'axes':'axis'}) return 'mean', new_attrs, inputs
Reduce the array along a given axis by mean value
def run_check(self, data): if not data: sys.exit(1) data, sentences, chars, num_words = self.pre_check(data) w_dict = Counter(data) uniq_len, uncommon, uncom_len = self.gsl(w_dict) non_dchall_set = Counter({word: count for word, count in w_dict.items() if word and word not in self.dale_chall_words}) diff_count = sum(non_dchall_set.values()) dc_score = round(self.dale_chall(diff_count, num_words, sentences), 1) cli_score = round(self.coleman_liau(chars, num_words, sentences), 1) return uncommon, uncom_len, uniq_len, dc_score, cli_score
Check for uncommon words and difficult words in file.
def find_pyqt5(python): pyqt5 = ( _state.get("pyqt5") or os.getenv("PYBLISH_QML_PYQT5") ) if not pyqt5: try: path = subprocess.check_output([ python, "-c", "import PyQt5, sys;" "sys.stdout.write(PyQt5.__file__)" ], universal_newlines=True) pyqt5 = os.path.dirname(os.path.dirname(path)) except subprocess.CalledProcessError: pass return pyqt5
Search for PyQt5 automatically
def timing(function): @wraps(function) def wrapped(*args, **kwargs): start_time = time.time() ret = function(*args, **salt.utils.args.clean_kwargs(**kwargs)) end_time = time.time() if function.__module__.startswith('salt.loaded.int.'): mod_name = function.__module__[16:] else: mod_name = function.__module__ fstr = 'Function %s.%s took %.{0}f seconds to execute'.format( sys.float_info.dig ) log.profile(fstr, mod_name, function.__name__, end_time - start_time) return ret return wrapped
Decorator wrapper to log execution time, for profiling purposes
def filter_304_headers(headers): return [(k, v) for k, v in headers if k.lower() not in _filter_from_304]
Filter a list of headers to include in a "304 Not Modified" response.
def logged_insert(self, user): self.insert() entry = ChangeLogEntry({ 'type': 'ADDED', 'documents': [self], 'user': user }) entry.insert() return entry
Create and insert the document and log the event in the change log
def save(self, *args, **kwargs): self._create_slug() self._create_date_slug() self._render_content() send_published_signal = False if self.published and self.published_on is None: send_published_signal = self._set_published() super(Entry, self).save(*args, **kwargs) if send_published_signal: entry_published.send(sender=self, entry=self)
Auto-generate a slug from the name.
def save_info(self, dirn): with current_directory(dirn): info('Saving distribution info') with open('dist_info.json', 'w') as fileh: json.dump({'dist_name': self.ctx.dist_name, 'bootstrap': self.ctx.bootstrap.name, 'archs': [arch.arch for arch in self.ctx.archs], 'ndk_api': self.ctx.ndk_api, 'use_setup_py': self.ctx.use_setup_py, 'recipes': self.ctx.recipe_build_order + self.ctx.python_modules, 'hostpython': self.ctx.hostpython, 'python_version': self.ctx.python_recipe.major_minor_version_string}, fileh)
Save information about the distribution in its dist_dir.
def setdefault(self, name: str, default: Any=None) -> Any: return self.__dict__.setdefault(name, default)
Set an attribute with a default value.
def _getPayload(self, record): payload = super(LogglyHandler, self)._getPayload(record) payload['tags'] = self._implodeTags() return payload
The data that will be sent to loggly.
def unblockall(self): for q in self.queues.values(): q.unblockall() self.blockEvents.clear()
Remove all blocks from the queue and all sub-queues
def create_instance(self, parent): self.kwargs['instantiate'] = True self.kwargs['parent'] = parent instance = self.cls(*self.args, **self.kwargs) instance._field_seqno = self._field_seqno return instance
Create an instance based off this placeholder with some parent
def memorized_timedelta(seconds): try: return _timedelta_cache[seconds] except KeyError: delta = timedelta(seconds=seconds) _timedelta_cache[seconds] = delta return delta
Create only one instance of each distinct timedelta
def _assign_work_unit(self, node): assert self.workqueue scope, work_unit = self.workqueue.popitem(last=False) assigned_to_node = self.assigned_work.setdefault(node, default=OrderedDict()) assigned_to_node[scope] = work_unit worker_collection = self.registered_collections[node] nodeids_indexes = [ worker_collection.index(nodeid) for nodeid, completed in work_unit.items() if not completed ] node.send_runtest_some(nodeids_indexes)
Assign a work unit to a node.
def _iter_path_collection(paths, path_transforms, offsets, styles): N = max(len(paths), len(offsets)) if not path_transforms: path_transforms = [np.eye(3)] edgecolor = styles['edgecolor'] if np.size(edgecolor) == 0: edgecolor = ['none'] facecolor = styles['facecolor'] if np.size(facecolor) == 0: facecolor = ['none'] elements = [paths, path_transforms, offsets, edgecolor, styles['linewidth'], facecolor] it = itertools return it.islice(py3k.zip(*py3k.map(it.cycle, elements)), N)
Build an iterator over the elements of the path collection
def zoom_out(self): viewer = self.getfocus_viewer() if hasattr(viewer, 'zoom_out'): viewer.zoom_out() return True
Zoom the view out one zoom step.
def complete_event(self, event_id: str): event_ids = DB.get_list(self._processed_key) if event_id not in event_ids: raise KeyError('Unable to complete event. Event {} has not been ' 'processed (ie. it is not in the processed ' 'list).'.format(event_id)) DB.remove_from_list(self._processed_key, event_id, pipeline=True) key = _keys.completed_events(self._object_type, self._subscriber) DB.append_to_list(key, event_id, pipeline=True) DB.execute()
Complete the specified event.
def ginga_to_matplotlib_cmap(cm, name=None): if name is None: name = cm.name from matplotlib.colors import ListedColormap carr = np.asarray(cm.clst) mpl_cm = ListedColormap(carr, name=name, N=len(carr)) return mpl_cm
Convert Ginga colormap to matplotlib's.
def _getFromDate(l, date): try: date = _toDate(date) i = _insertDateIndex(date, l) - 1 if i == -1: return l[0] return l[i] except (ValueError, TypeError): return l[0]
returns the index of given or best fitting date
def build(self, shutit): target_child = self.start_container(shutit, 'target_child') self.setup_host_child(shutit) self.setup_target_child(shutit, target_child) shutit.send('chmod -R 777 ' + shutit_global.shutit_global_object.shutit_state_dir + ' && mkdir -p ' + shutit_global.shutit_global_object.shutit_state_dir_build_db_dir + '/' + shutit_global.shutit_global_object.build_id, shutit_pexpect_child=target_child, echo=False) return True
Sets up the target ready for building.
def nodes_to_object(self, node, object): "Map all child nodes to one object's attributes" for n in list(node): self.node_to_object(n, object)
Map all child nodes to one object's attributes
def ensure_dir(path): if not (os.path.exists(path) and os.path.isdir(path)): os.mkdir(path)
Ensures a directory exists
def format_cffi_externs(cls): extern_decls = [ f.extern_signature.pretty_print() for _, f in cls._extern_fields.items() ] return ( 'extern "Python" {\n' + '\n'.join(extern_decls) + '\n}\n')
Generate stubs for the cffi bindings from @_extern_decl methods.
def log_leave(event, nick, channel): if channel not in pmxbot.config.log_channels: return ParticipantLogger.store.log(nick, channel, event.type)
Log a quit or part event.
def move_to(self, n): self.term.stream.write(self.term.move_up * n)
Move back N lines in terminal.
def parse_statement(self): token = self.stream.current if token.type != 'name': self.fail('tag name expected', token.lineno) self._tag_stack.append(token.value) pop_tag = True try: if token.value in _statement_keywords: return getattr(self, 'parse_' + self.stream.current.value)() if token.value == 'call': return self.parse_call_block() if token.value == 'filter': return self.parse_filter_block() ext = self.extensions.get(token.value) if ext is not None: return ext(self) self._tag_stack.pop() pop_tag = False self.fail_unknown_tag(token.value, token.lineno) finally: if pop_tag: self._tag_stack.pop()
Parse a single statement.
def update_settings(self): self.display() self.display_markers() if self.parent.notes.annot is not None: self.parent.notes.display_notes()
After changing the settings, we need to recreate the whole image.
def training_job_analytics(self): if self._current_job_name is None: raise ValueError('Estimator is not associated with a TrainingJob') return TrainingJobAnalytics(self._current_job_name, sagemaker_session=self.sagemaker_session)
Return a ``TrainingJobAnalytics`` object for the current training job.
def bigger_version(version_string_a, version_string_b): major_a, minor_a, patch_a = parse_version_string(version_string_a) major_b, minor_b, patch_b = parse_version_string(version_string_b) if major_a > major_b: return version_string_a elif major_a == major_b and minor_a > minor_b: return version_string_a elif major_a == major_b and minor_a == minor_b and patch_a > patch_b: return version_string_a return version_string_b
Returns the bigger version of two version strings.
def issue(self, test, err): self.step.setProgress('tests failed', len(self.failures) + len(self.errors))
An issue - failing, erroring etc test.
def init(scope, app, settings): cfg = settings.get('rw.static', {}) static = Static() scope['static'] = static scope['template_env'].globals['static'] = static for base_uri, sources in cfg.items(): full_paths = [] for source in sources: if isinstance(source, dict): full_path = source['path'] full_paths.append(full_path.format(**os.environ)) continue elif ',' in source: module_name, path = [part.strip() for part in source.split(',')] else: module_name = source path = 'static' full_path = pkg_resources.resource_filename(module_name, path) full_paths.append(full_path) app.root.mount('/' + base_uri + '/<h>/<path:path>', StaticHandler, {'path': full_paths}, name='static_' + base_uri.replace('.', '_')) static.handlers.append((base_uri, StaticHandler, full_paths)) static.setup()
Plugin for serving static files in development mode
def _convert_connected_app(self): if self.services and "connected_app" in self.services: return connected_app = self.get_connected_app() if not connected_app: return self.logger.warning( "Reading Connected App info from deprecated config." " Connected App should be changed to a service." " If using environment keychain, update the environment variable." " Otherwise, it has been handled automatically and you should not" " see this message again." ) ca_config = ServiceConfig( { "callback_url": connected_app.callback_url, "client_id": connected_app.client_id, "client_secret": connected_app.client_secret, } ) self.set_service("connected_app", ca_config)
Convert Connected App to service
def create_node_rating_counts_settings(sender, **kwargs): created = kwargs['created'] node = kwargs['instance'] if created: create_related_object.delay(NodeRatingCount, {'node': node}) create_related_object.delay(NodeParticipationSettings, {'node': node})
create node rating count and settings
def _get_v_angle_guess(self, case): v_angle = array([bus.v_angle * (pi / 180.0) for bus in case.connected_buses]) return v_angle
Make the vector of voltage phase guesses.
def add_view(self, path: str, handler: AbstractView, **kwargs: Any) -> AbstractRoute: return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
Shortcut for add_route with ANY methods for a class-based view