code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def request(self, *args, **kwargs): try: return self._http.request(*args, timeout=TIMEOUT, **kwargs) except Exception as exc: raise RequestException(exc, args, kwargs)
Issue the HTTP request capturing any errors that may occur.
def delayed_close(closable): close = getattr(closable, "close", None) if close: def replacement_close(*args, **kw): pass setattr(closable, "close", replacement_close) try: yield closable finally: if close: setattr(closable, "close", close) closable.close()
Delay close until this contextmanager dies
def factors(self, rank): self._check_rank(rank) return [result.factors for result in self.results[rank]]
Returns KTensor factors for models with specified rank.
def _input_file_as_html_links(cls, session: AppSession): scrape_result = session.factory['HTMLScraper'].scrape_file( session.args.input_file, encoding=session.args.local_encoding or 'utf-8' ) for context in scrape_result.link_contexts: yield context.link
Read input file as HTML and return the links.
def json_serial(obj): if isinstance(obj, (datetime.datetime, datetime.date)): return obj.isoformat() raise TypeError('Type {} not serializable.'.format(type(obj)))
Custom JSON serializer for objects not serializable by default.
def save_gradebook_column(self, gradebook_column_form, *args, **kwargs): if gradebook_column_form.is_for_update(): return self.update_gradebook_column(gradebook_column_form, *args, **kwargs) else: return self.create_gradebook_column(gradebook_column_form, *args, **kwargs)
Pass through to provider GradebookColumnAdminSession.update_gradebook_column
def _flatterm_iter(cls, expression: Expression) -> Iterator[TermAtom]: if isinstance(expression, Operation): yield type(expression) for operand in op_iter(expression): yield from cls._flatterm_iter(operand) yield OPERATION_END elif isinstance(expression, SymbolWildcard): yield expression.symbol_type elif isinstance(expression, (Symbol, Wildcard)): yield expression else: assert False, "Unreachable unless a new unsupported expression type is added."
Generator that yields the atoms of the expressions in prefix notation with operation end markers.
def repo_id(self, repo: str) -> str: if repo.startswith("http"): repo_id = re.sub(r"https?://(.www)?", "", repo) repo_id = re.sub(r"\.git/?$", "", repo_id) else: repo_id = repo.replace("file://", "") repo_id = re.sub(r"\.git/?$", "", repo_id) if repo_id.startswith("~"): repo_id = str(Path(repo_id).resolve()) repo_id = re.sub(r"[^a-zA-Z0-9._]+", "_", repo_id) return repo_id + hashlib.sha256(repo.encode("utf-8")).hexdigest()
Returns an unique identifier from a repo URL for the folder the repo is gonna be pulled in.
def sscan(self, key, cursor=0, match=None, count=None): tokens = [key, cursor] match is not None and tokens.extend([b'MATCH', match]) count is not None and tokens.extend([b'COUNT', count]) fut = self.execute(b'SSCAN', *tokens) return wait_convert(fut, lambda obj: (int(obj[0]), obj[1]))
Incrementally iterate Set elements.
def getAttributeUri(self, index): offset = self._get_attribute_offset(index) uri = self.m_attributes[offset + ATTRIBUTE_IX_NAMESPACE_URI] return uri
Returns the numeric ID for the namespace URI of an attribute
def py_to_go_cookie(py_cookie): go_cookie = { 'Name': py_cookie.name, 'Value': py_cookie.value, 'Domain': py_cookie.domain, 'HostOnly': not py_cookie.domain_specified, 'Persistent': not py_cookie.discard, 'Secure': py_cookie.secure, 'CanonicalHost': py_cookie.domain, } if py_cookie.path_specified: go_cookie['Path'] = py_cookie.path if py_cookie.expires is not None: unix_time = datetime.datetime.fromtimestamp(py_cookie.expires) go_cookie['Expires'] = pyrfc3339.generate(unix_time, accept_naive=True) return go_cookie
Convert a python cookie to the JSON-marshalable Go-style cookie form.
def print_scoreboard(self): output = '' innings = [] away = [] home = [] for x in self: innings.append(x['inning']) away.append(x['away']) home.append(x['home']) output += 'Inning\t' for x in innings: output += str(x) + ' ' output += '\n' for x in innings: output += '---' output += '\nAway\t' + self.__enumerate_scoreboard(away) output += '\nHome\t' + self.__enumerate_scoreboard(home) return output
Print object as a scoreboard.
def package_files(directory): paths = [] for (path, _, file_names) in walk(directory): for filename in file_names: paths.append(join('..', path, filename)) return paths
Get list of data files to add to the package.
def save_img(object, handle, **kwargs): if isinstance(object, np.ndarray): normalized = _normalize_array(object) object = PIL.Image.fromarray(normalized) if isinstance(object, PIL.Image.Image): object.save(handle, **kwargs) else: raise ValueError("Can only save_img for numpy arrays or PIL.Images!")
Save numpy array as image file on CNS.
def _writeSuperLinks(self, superLinks, fileObject): for slink in superLinks: fileObject.write('SLINK %s %s\n' % ( slink.slinkNumber, slink.numPipes)) for node in slink.superNodes: fileObject.write('NODE %s %.2f %.2f %.6f %s %s %s %.6f %.6f\n' % ( node.nodeNumber, node.groundSurfaceElev, node.invertElev, node.manholeSA, node.nodeInletCode, node.cellI, node.cellJ, node.weirSideLength, node.orificeDiameter)) for pipe in slink.pipes: fileObject.write('PIPE %s %s %.6f %.6f %.6f %.6f %.2f %.6f %.6f\n' % ( pipe.pipeNumber, pipe.xSecType, pipe.diameterOrHeight, pipe.width, pipe.slope, pipe.roughness, pipe.length, pipe.conductance, pipe.drainSpacing))
Write SuperLinks to File Method
def tryOrder(self, commit: Commit): canOrder, reason = self.canOrder(commit) if canOrder: self.logger.trace("{} returning request to node".format(self)) self.doOrder(commit) else: self.logger.debug("{} cannot return request to node: {}".format(self, reason)) return canOrder
Try to order if the Commit message is ready to be ordered.
def register (g): assert isinstance(g, Generator) id = g.id() __generators [id] = g for t in sequence.unique(g.target_types()): __type_to_generators.setdefault(t, []).append(g) base = id.split ('.', 100) [0] __generators_for_toolset.setdefault(base, []).append(g) invalidate_extendable_viable_source_target_type_cache()
Registers new generator instance 'g'.
def escape_quotes(self, val): if self.is_string(val) and self._in_quotes(val, self.quote): middle = self.remove_quotes(val).replace("\\" + self.quote, self.quote) middle = middle.replace(self.quote, "\\" + self.quote) val = self.add_quotes(middle) return val
Escape any quotes in a value
def init_progress_bar(self): disable = MapReduce._forked or not config.PROGRESS_BARS if disable: total = None else: self.iterable = list(self.iterable) total = len(self.iterable) return tqdm(total=total, disable=disable, leave=False, desc=self.description)
Initialize and return a progress bar.
def calculate_mypypath() -> List[str]: typeshed_root = None count = 0 started = time.time() for parent in itertools.chain( Path(__file__).parents, Path(mypy.api.__file__).parents, Path(os.__file__).parents, ): count += 1 candidate = parent / 'lib' / 'mypy' / 'typeshed' if candidate.is_dir(): typeshed_root = candidate break candidate = parent / 'typeshed' if candidate.is_dir(): typeshed_root = candidate break LOG.debug( 'Checked %d paths in %.2fs looking for typeshed. Found %s', count, time.time() - started, typeshed_root, ) if not typeshed_root: return [] stdlib_dirs = ('3.7', '3.6', '3.5', '3.4', '3.3', '3.2', '3', '2and3') stdlib_stubs = [ typeshed_root / 'stdlib' / stdlib_dir for stdlib_dir in stdlib_dirs ] third_party_dirs = ('3.7', '3.6', '3', '2and3') third_party_stubs = [ typeshed_root / 'third_party' / tp_dir for tp_dir in third_party_dirs ] return [ str(p) for p in stdlib_stubs + third_party_stubs ]
Return MYPYPATH so that stubs have precedence over local sources.
def distribute(build): build.packages.install("wheel") build.packages.install("twine") build.executables.run([ "python", "setup.py", "sdist", "bdist_wheel", "--universal", "upload", ]) build.executables.run([ "twine", "upload", "dist/*" ])
distribute the uranium package
async def handle_adapter_event(self, adapter_id, conn_string, conn_id, name, event): if name == 'device_seen': self._track_device_seen(adapter_id, conn_string, event) event = self._translate_device_seen(adapter_id, conn_string, event) conn_string = self._translate_conn_string(adapter_id, conn_string) elif conn_id is not None and self._get_property(conn_id, 'translate'): conn_string = self._translate_conn_string(adapter_id, conn_string) else: conn_string = "adapter/%d/%s" % (adapter_id, conn_string) await self.notify_event(conn_string, name, event)
Handle an event received from an adapter.
def _start_ssh_agent(cls): env = dict() stdout = ClHelper.run_command('ssh-agent -s') lines = stdout.split('\n') for line in lines: if not line or line.startswith('echo '): continue line = line.split(';')[0] parts = line.split('=') if len(parts) == 2: env[parts[0]] = parts[1] return env
Starts ssh-agent and returns the environment variables related to it
def _backspace(self): if self.command == ':': return logger.log(5, "Snippet keystroke `Backspace`.") self.command = self.command[:-1]
Erase the last character in the snippet command.
def array_bytes(shape, dtype): return np.product(shape)*np.dtype(dtype).itemsize
Estimates the memory in bytes required for an array of the supplied shape and dtype
def image_task(self): uri = "/%s/task" % self.uri_base resp, resp_body = self.api.method_get(uri) return resp_body
Returns a json-schema document that represents an task entity.
def parse_file(self, sourcepath): with open(sourcepath, 'r') as logfile: jsonlist = logfile.readlines() data = {} data['entries'] = [] for line in jsonlist: entry = self.parse_line(line) data['entries'].append(entry) if self.tzone: for e in data['entries']: e['tzone'] = self.tzone return data
Parse an object-per-line JSON file into a log data dict
def find_by_typename(self, typename): return self.find_by(lambda obj: type(obj).__name__ == typename)
List of all objects whose type has the given name.
def user(val, **kwargs): if not isinstance(val, six.integer_types): try: val = int(val) except (TypeError, ValueError): pass if not isinstance(val, (six.integer_types, six.string_types)): raise SaltInvocationError('Value must be a username or uid') elif isinstance(val, six.integer_types) and val < 0: raise SaltInvocationError('\'{0}\' is an invalid uid'.format(val)) return val
This can be either a string or a numeric uid
def retry_failed(FailAdmin, request, queryset): for task in queryset: async_task(task.func, *task.args or (), hook=task.hook, **task.kwargs or {}) task.delete()
Submit selected tasks back to the queue.
def wsp(word): HEAVY = r'[ieaAoO]{1}[\.]*(u|y)[^ieaAoO]+(\.|$)' delimiters = [i for i, char in enumerate(word) if char == '.'] if len(delimiters) % 2 != 0: delimiters.append(len(word)) unstressed = [] for i, d in enumerate(delimiters): if i % 2 == 0: unstressed.extend(range(d + 1, delimiters[i + 1])) heavies = re.finditer(HEAVY, word) violations = sum(1 for m in heavies if m.start(0) in unstressed) return violations
Return the number of unstressed heavy syllables.
def show_metadata_converter(self): from safe.gui.tools.metadata_converter_dialog import ( MetadataConverterDialog) dialog = MetadataConverterDialog( parent=self.iface.mainWindow(), iface=self.iface, ) dialog.exec_()
Show InaSAFE Metadata Converter.
def _cnvkit_fix(cnns, background_cnn, items, ckouts): return [_cnvkit_fix_base(cnns, background_cnn, items, ckouts)]
Normalize samples, correcting sources of bias.
def _get_cookies_as_dict(): config = ConfigParser.SafeConfigParser() config.read(_config) if config.has_section('cookies'): cookie_dict = {} for option in config.options('cookies'): option_key = option.upper() if option == 'jsessionid' else option cookie_dict[option_key] = config.get('cookies', option) return cookie_dict
Get cookies as a dict
def principal_unit(): principal_unit = os.environ.get('JUJU_PRINCIPAL_UNIT', None) if principal_unit == '': return os.environ['JUJU_UNIT_NAME'] elif principal_unit is not None: return principal_unit for reltype in relation_types(): for rid in relation_ids(reltype): for unit in related_units(rid): md = _metadata_unit(unit) if not md: continue subordinate = md.pop('subordinate', None) if not subordinate: return unit return None
Returns the principal unit of this unit, otherwise None
def find_deck(provider: Provider, key: str, version: int, prod: bool=True) -> Optional[Deck]: pa_params = param_query(provider.network) if prod: p2th = pa_params.P2TH_addr else: p2th = pa_params.test_P2TH_addr rawtx = provider.getrawtransaction(key, 1) deck = deck_parser((provider, rawtx, 1, p2th)) return deck
Find specific deck by deck id.
def _reset_changes(self): self._original = {} if self.last_updated is not None: self._original['last_updated'] = self.last_updated
Stores current values for comparison later
def cached_generator(function): cache_variable = '_cached_' + function.__name__ @wraps(function) def function_wrapper(obj, *args, **kwargs): try: for item in getattr(obj, cache_variable): yield item except AttributeError: setattr(obj, cache_variable, []) cache = getattr(obj, cache_variable) for item in function(obj, *args, **kwargs): cache.append(item) yield item return function_wrapper
Method decorator caching a generator's yielded items.
def summarize_sec2hdrgos(self, sec2d_hdrgos): hdrgos_all = set([]) hdrgos_grouped = set() hdrgos_ungrouped = set() sections_grouped = set() for sectionname, hdrgos in sec2d_hdrgos: self._chk_hdrgoids(hdrgos) hdrgos_all.update(hdrgos) if sectionname != HdrgosSections.secdflt: hdrgos_grouped.update(hdrgos) sections_grouped.add(sectionname) else: hdrgos_ungrouped.update(hdrgos) return {'G': hdrgos_grouped, 'S': sections_grouped, 'U': hdrgos_all.difference(hdrgos_grouped)}
Get counts of header GO IDs and sections.
def initialize_path(self, path_num=None): self.state = copy(self.initial_state) return self.state
initialize consumer for next path
def log(array, cutoff): arr = numpy.copy(array) arr[arr < cutoff] = cutoff return numpy.log(arr)
Compute the logarithm of an array with a cutoff on the small values
def OnLabelSizeIntCtrl(self, event): self.attrs["labelsize"] = event.GetValue() post_command_event(self, self.DrawChartMsg)
Label size IntCtrl event handler
def validate_meta_object(meta: Dict[str, Any], allow_extra_meta_fields: bool) -> None: for key, value in meta.items(): if key in META_FIELDS: if type(value) is not META_FIELDS[key]: raise ValidationError( f"Values for {key} are expected to have the type {META_FIELDS[key]}, " f"instead got {type(value)}." ) elif allow_extra_meta_fields: if key[:2] != "x-": raise ValidationError( "Undefined meta fields need to begin with 'x-', " f"{key} is not a valid undefined meta field." ) else: raise ValidationError( f"{key} is not a permitted meta field. To allow undefined fields, " "set `allow_extra_meta_fields` to True." )
Validates that every key is one of `META_FIELDS` and has a value of the expected type.
def to_json(self): if not self.is_data_loaded: self._import_data() def jsonify_dict(base_dict): new_dict = {} for key, val in base_dict.items(): new_dict[key] = val.to_json() return new_dict hot_wks = jsonify_dict(self.extreme_hot_weeks) cold_wks = jsonify_dict(self.extreme_cold_weeks) typ_wks = jsonify_dict(self.typical_weeks) grnd_temps = jsonify_dict(self.monthly_ground_temperature) return { 'location': self.location.to_json(), 'data_collections': [dc.to_json() for dc in self._data], 'metadata': self.metadata, 'heating_dict': self.heating_design_condition_dictionary, 'cooling_dict': self.cooling_design_condition_dictionary, 'extremes_dict': self.extreme_design_condition_dictionary, 'extreme_hot_weeks': hot_wks, 'extreme_cold_weeks': cold_wks, 'typical_weeks': typ_wks, "monthly_ground_temps": grnd_temps, "is_ip": self._is_ip, "is_leap_year": self.is_leap_year, "daylight_savings_start": self.daylight_savings_start, "daylight_savings_end": self.daylight_savings_end, "comments_1": self.comments_1, "comments_2": self.comments_2 }
Convert the EPW to a dictionary.
def exists(self, filename): client = boto3.client("s3") bucket, path = self.bucket_and_path(filename) r = client.list_objects(Bucket=bucket, Prefix=path, Delimiter="/") if r.get("Contents") or r.get("CommonPrefixes"): return True return False
Determines whether a path exists or not.
def asyncImap(asyncCallable, *iterables): deferreds = imap(asyncCallable, *iterables) return gatherResults(deferreds, consumeErrors=True)
itertools.imap for deferred callables
def _is_dataset(uri, config_path): uri = dtoolcore.utils.sanitise_uri(uri) storage_broker = _get_storage_broker(uri, config_path) return storage_broker.has_admin_metadata()
Helper function for determining if a URI is a dataset.
def touch(filename, timestamp): if timestamp is not None: timestamp = (timestamp, timestamp) from os import utime utime(filename, timestamp)
set the mtime of a file
def _handle_wikilink_separator(self): self._context ^= contexts.WIKILINK_TITLE self._context |= contexts.WIKILINK_TEXT self._emit(tokens.WikilinkSeparator())
Handle the separator between a wikilink's title and its text.
def write_dot(build_context, conf: Config, out_f): not_buildenv_targets = get_not_buildenv_targets(build_context) prebuilt_targets = get_prebuilt_targets(build_context) out_f.write('strict digraph {\n') for node in build_context.target_graph.nodes: if conf.show_buildenv_deps or node in not_buildenv_targets: cached = node in prebuilt_targets fillcolor = 'fillcolor="grey",style=filled' if cached else '' color = TARGETS_COLORS.get( build_context.targets[node].builder_name, 'black') out_f.write(' "{}" [color="{}",{}];\n'.format(node, color, fillcolor)) out_f.writelines(' "{}" -> "{}";\n'.format(u, v) for u, v in build_context.target_graph.edges if conf.show_buildenv_deps or (u in not_buildenv_targets and v in not_buildenv_targets)) out_f.write('}\n\n')
Write build graph in dot format to `out_f` file-like object.
def generate_split_tsv_lines(fn, header): for line in generate_tsv_psms_line(fn): yield {x: y for (x, y) in zip(header, line.strip().split('\t'))}
Returns dicts with header-keys and psm statistic values
def __resize_surface_extents(self): self.__origin.X = self.image.get_width() * self.__untransformed_nor_origin.X self.__origin.Y = self.image.get_height() * self.__untransformed_nor_origin.Y self.source.width = self.image.get_width() self.source.height = self.image.get_height()
Handles surface cleanup once a scale or rotation operation has been performed.
def create_consumer(self): with self.connection_pool.acquire(block=True) as conn: yield self.consumer(conn)
Context manager that yields an instance of ``Consumer``.
def parse(self, stream): lines = re.sub("[\r\n]+", "\n", stream.read()).split("\n") for line in lines: self.parseline(line)
Parse the given stream
def pull(self): repo_root = settings.REPO_ROOT pull_from_origin(join(repo_root, self.name))
Pull from the origin.
def header_footer_exists(filepath): with open(filepath) as f: return re.search(Utils.exp, f.read())
Check if directory-components is listed in requirements files.
def load(self, name, location='local'): path = self._get_path(name, location, file_ext='.json') if op.exists(path): return _load_json(path) path = self._get_path(name, location, file_ext='.pkl') if op.exists(path): return _load_pickle(path) logger.debug("The file `%s` doesn't exist.", path) return {}
Load saved data from the cache directory.
def show(self, imgs, ax=None): ax = ax or plt.gca() if type(imgs) is not list: imgs = [imgs] for i, img in enumerate(imgs): ax.imshow(img, cmap=plt.get_cmap("plasma")) ax.axis("off")
Visualize the persistence image
def pre_save(self, model_instance, add): file = getattr(model_instance, self.attname) if file and not file._committed: file.name = self._clean_file_name(model_instance, file.name) file.file = self._resize_image(model_instance, file) file.save(file.name, file, save=False) return file
Resizes, commits image to storage, and returns field's value just before saving.
def make_rawr_zip_payload(rawr_tile, date_time=None): if date_time is None: date_time = gmtime()[0:6] buf = StringIO() with zipfile.ZipFile(buf, mode='w') as z: for fmt_data in rawr_tile.all_formatted_data: zip_info = zipfile.ZipInfo(fmt_data.name, date_time) z.writestr(zip_info, fmt_data.data, zipfile.ZIP_DEFLATED) return buf.getvalue()
make a zip file from the rawr tile formatted data
def rand_unicode(min_char=MIN_UNICHR, max_char=MAX_UNICHR, min_len=MIN_STRLEN, max_len=MAX_STRLEN, **kwargs): from syn.five import unichr return unicode(rand_str(min_char, max_char, min_len, max_len, unichr))
For values in the unicode range, regardless of Python version.
def valid_project(self): try: path = self.projects.get_active_project_path() except AttributeError: return if bool(path): if not self.projects.is_valid_project(path): if path: QMessageBox.critical( self, _('Error'), _("<b>{}</b> is no longer a valid Spyder project! " "Since it is the current active project, it will " "be closed automatically.").format(path)) self.projects.close_project()
Handle an invalid active project.
def build_system_error(cls, errors=None): errors = [errors] if not isinstance(errors, list) else errors return cls(Status.SYSTEM_ERROR, errors)
Utility method to build a HTTP 500 System Error response
def create_reader(self, name, *args, **kwargs): self._check_format(name) return self._formats[name]['reader'](*args, **kwargs)
Create a new reader instance for a given format.
def to_index_variable(self): return IndexVariable(self.dims, self._data, self._attrs, encoding=self._encoding, fastpath=True)
Return this variable as an xarray.IndexVariable
def max_pv_count(self): self.open() count = lvm_vg_get_max_pv(self.handle) self.close() return count
Returns the maximum allowed physical volume count.
def postalCodeLookup(self, countryCode, postalCode): params = {"country": countryCode, "postalcode": postalCode} d = self._call("postalCodeLookupJSON", params) d.addCallback(operator.itemgetter("postalcodes")) return d
Looks up locations for this country and postal code.
def write(file_name, data): with open(file_name, 'w') as f: f.write(encode(data))
Encode and write a Hip file.
def create_from_flux(cls, params, emin, emax, flux, scale=1.0): params = params.copy() params[0] = 1.0 params[0] = flux / cls.eval_flux(emin, emax, params, scale=scale) return cls(params, scale)
Create a spectral function instance given its flux.
def clear_canvas(self): self.fig = None self.fmt = None self._qpix_buffer = [] self.repaint()
Clear the figure that was painted on the widget.
async def _process_lines(self, pattern: Optional[str] = None) -> None: if pattern is not None: cmp = re.compile(pattern) _LOGGER.debug("Start working with pattern '%s'.", pattern) while self.is_running: try: line = await self._input.readline() if not line: break line = line.decode() except Exception: break match = True if pattern is None else cmp.search(line) if match: _LOGGER.debug("Process: %s", line) await self._que.put(line) try: await self._loop.run_in_executor(None, self._proc.wait) finally: await self._que.put(None) _LOGGER.debug("Close read ffmpeg output.")
Read line from pipe they match with pattern.
def _find_colour(particle, start_index, screen_data): _, fg2, attr2, bg2 = screen_data index = start_index for i, colours in enumerate(particle.colours): if (fg2, attr2, bg2) == colours: index = i break return index
Helper function to find an existing colour in the particle palette.
def ec2_instances(): "Use the EC2 API to get a list of all machines" region = boto.ec2.get_region(REGION) reservations = region.connect().get_all_instances() instances = [] for reservation in reservations: instances += reservation.instances return instances
Use the EC2 API to get a list of all machines
def extendMarkdown(self, md, md_globals): md.parser.blockprocessors.add('table', TableProcessor(md.parser), '<hashheader')
Add an instance of TableProcessor to BlockParser.
def _process(self, metric): if not self.enabled: return try: try: self.lock.acquire() self.process(metric) except Exception: self.log.error(traceback.format_exc()) finally: if self.lock.locked(): self.lock.release()
Decorator for processing handlers with a lock, catching exceptions
def operations(self): ops = [] for e in self.expressions: if hasattr(e, 'op'): ops.append(e.op) return ops
A list of all operations done by the IRSB, as libVEX enum names
def try_except_handler(self, node): excepted_types = [] for handler in node.handlers: if handler.type is None: excepted_types = None break if isinstance(handler.type, ast.Tuple): excepted_types.extend([exception_type for exception_type in handler.type.elts]) else: excepted_types.append(handler.type) new_exception_list = self.ignore_exceptions if self.ignore_exceptions is not None: if excepted_types is None: new_exception_list = None else: new_exception_list = list(set(excepted_types + self.ignore_exceptions)) old_exception_handlers, self.ignore_exceptions = \ self.ignore_exceptions, new_exception_list node.body = [self.visit(node_item) for node_item in node.body] self.ignore_exceptions = old_exception_handlers
Handler for try except statement to ignore excepted exceptions.
def _get_f2rx(self, C, r_x, r_1, r_2): drx = (r_x - r_1) / (r_2 - r_1) return self.CONSTS["h4"] + (C["h5"] * drx) + (C["h6"] * (drx ** 2.))
Defines the f2 scaling coefficient defined in equation 10
def run_pylint(self): if (self.get_option('save_before', True) and not self.main.editor.save()): return self.switch_to_plugin() self.analyze(self.main.editor.get_current_filename())
Run pylint code analysis
def docstring_with_summary(docstring, pairs, key_header, summary_type): return "\n".join( [docstring, "Summary of {}:".format(summary_type), ""] + summary_table(pairs, key_header) + [""] )
Return a string joining the docstring with the pairs summary table.
def dict_partial_cmp(target_dict, dict_list, ducktype): for called_dict in dict_list: if len(target_dict) > len(called_dict): continue intersection = {} for item in target_dict: dtype = ducktype(target_dict[item]) if hasattr(dtype, "mtest"): if item in called_dict and dtype.mtest(called_dict[item]): intersection[item] = target_dict[item] else: if item in called_dict and dtype == called_dict[item]: intersection[item] = target_dict[item] if intersection == target_dict: return True return False
Whether partial dict are in dict_list or not
def upload_file(request): if request.method == 'POST': form = MediaForm(request.POST, request.FILES) if form.is_valid(): context_dict = {} try: context_dict['copied_files'] = update_media_file( request.FILES['zip_file']) except Exception as e: context_dict['error_message'] = e.message return render(request, 'django_admin/transfer_media_message.html', context_dict) else: form = MediaForm() return render(request, 'django_admin/upload_media.html', {'form': form})
Upload a Zip File Containing a single file containing media.
def parse_changelog(args: Any) -> Tuple[str, str]: with open("CHANGELOG.rst", "r") as file: match = re.match( pattern=r"(.*?Unreleased\n---+\n)(.+?)(\n*[^\n]+\n---+\n.*)", string=file.read(), flags=re.DOTALL, ) assert match header, changes, tail = match.groups() tag = "%s - %s" % (args.tag, datetime.date.today().isoformat()) tagged = "\n%s\n%s\n%s" % (tag, "-" * len(tag), changes) if args.verbose: print(tagged) return "".join((header, tagged, tail)), changes
Return an updated changelog and and the list of changes.
def _mutect2_filter(broad_runner, in_file, out_file, ref_file): params = ["-T", "FilterMutectCalls", "--reference", ref_file, "--variant", in_file, "--output", out_file] return broad_runner.cl_gatk(params, os.path.dirname(out_file))
Filter of MuTect2 calls, a separate step in GATK4.
def show_busy(self): self.progress_bar.show() self.question_group.setEnabled(False) self.question_group.setVisible(False) enable_busy_cursor() self.repaint() qApp.processEvents() self.busy = True
Hide the question group box and enable the busy cursor.
def do_gh(self, arg): if self.cmdprefix: raise CmdError("prefix not allowed") if arg: raise CmdError("too many arguments") if self.lastEvent: self.lastEvent.continueStatus = win32.DBG_EXCEPTION_HANDLED return self.do_go(arg)
gh - go with exception handled
def start(self): self.publish_properties() self.subscribe_topics() gc.collect() self.set_state("ready") while True: try: if not utils.wlan.isconnected(): utils.wifi_connect() self.publish_data() self.mqtt.check_msg() idle() sleep(1) except KeyboardInterrupt: self.set_state("disconnected") self.mqtt.disconnect()
publish device and node properties, run forever
def process_m2m_through_save(self, obj, created=False, **kwargs): if not created: return self._process_m2m_through(obj, 'post_add')
Process M2M post save for custom through model.
def component_doi(soup): component_doi = [] object_id_tags = raw_parser.object_id(soup, pub_id_type = "doi") component_list = components(soup) position = 1 for tag in object_id_tags: component_object = {} component_object["doi"] = doi_uri_to_doi(tag.text) component_object["position"] = position for component in component_list: if "doi" in component and component["doi"] == component_object["doi"]: component_object["type"] = component["type"] component_doi.append(component_object) position = position + 1 return component_doi
Look for all object-id of pub-type-id = doi, these are the component DOI tags
def generate(self, step, params): params = step.recurse(base.DictFactory, params, force_sequence=step.sequence) filename, content = self._make_content(params) return django_files.File(content.file, filename)
Fill in the field.
def on_max_tab_name_length_changed(self, spin): val = int(spin.get_value()) self.settings.general.set_int('max-tab-name-length', val) self.prefDlg.update_vte_subwidgets_states()
Changes the value of max_tab_name_length in dconf
def load_entry_point(self, group, name): ep = self.get_entry_info(group, name) if ep is None: raise ImportError("Entry point %r not found" % ((group, name),)) return ep.load()
Return the `name` entry point of `group` or raise ImportError
def oplog_thread_join(self): LOG.info("MongoConnector: Stopping all OplogThreads") for thread in self.shard_set.values(): thread.join()
Stops all the OplogThreads
def composite_decorator(func): @wraps(func) def wrapper(self, *args, **kwargs): total = [] for weight,iso in zip(self.weights,self.isochrones): subfunc = getattr(iso,func.__name__) total.append(weight*subfunc(*args,**kwargs)) return np.sum(total,axis=0) return wrapper
Decorator for wrapping functions that calculate a weighted sum
def s_demand(self, bus): Svl = array([complex(g.p, g.q) for g in self.generators if (g.bus == bus) and g.is_load], dtype=complex64) Sd = complex(bus.p_demand, bus.q_demand) return -sum(Svl) + Sd
Returns the total complex power demand.
def generate_megaman_manifold(sampling=2, nfolds=2, rotate=True, random_state=None): X, c = generate_megaman_data(sampling) for i in range(nfolds): X = np.hstack([_make_S_curve(x) for x in X.T]) if rotate: rand = check_random_state(random_state) R = rand.randn(X.shape[1], X.shape[1]) U, s, VT = np.linalg.svd(R) X = np.dot(X, U) return X, c
Generate a manifold of the megaman data
def focus_prev_sibling(self): mid = self.get_selected_mid() localroot = self._sanitize_position((mid,)) if localroot == self.get_focus()[1]: newpos = self._tree.prev_sibling_position(mid) if newpos is not None: newpos = self._sanitize_position((newpos,)) else: newpos = localroot if newpos is not None: self.body.set_focus(newpos)
focus previous sibling of currently focussed message in thread tree
def _nested_lookup(document, references, operation): if isinstance(document, list): for d in document: for result in NestedLookup._nested_lookup(d, references, operation): yield result if isinstance(document, dict): for k, v in document.items(): if operation(k, v): references.append((document, k)) yield v elif isinstance(v, dict): for result in NestedLookup._nested_lookup(v, references, operation): yield result elif isinstance(v, list): for d in v: for result in NestedLookup._nested_lookup(d, references, operation): yield result
Lookup a key in a nested document, yield a value
def translate_escaped_str(cls, str_val): if isinstance(str_val, string_types): str_val = str_val.encode('unicode_escape') if sys.version_info >= (3, 0): str_val = str_val.decode('utf-8') str_val = str_val.replace('"', r'\"') return '"{}"'.format(str_val)
Reusable by most interpreters
def _get_backends(): backends = [] backend_paths = getattr( settings, 'MAMA_CAS_SERVICE_BACKENDS', ['mama_cas.services.backends.SettingsBackend'] ) for backend_path in backend_paths: backend = import_string(backend_path)() backends.append(backend) return backends
Retrieve the list of configured service backends.