code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def shell(ctx): shell = code.InteractiveConsole({"engineer": getattr(ctx.parent, "widget", None)}) shell.interact("\n".join([ "Engineer connected to %s" % ctx.parent.params["host"], "Dispatch available through the 'engineer' object" ]))
open an engineer shell
def getOverlayFlag(self, ulOverlayHandle, eOverlayFlag): fn = self.function_table.getOverlayFlag pbEnabled = openvr_bool() result = fn(ulOverlayHandle, eOverlayFlag, byref(pbEnabled)) return result, pbEnabled
Sets flag setting for a given overlay
def draw(self): if self.state > 0: self.renderer.draw_background(self.surface, self.layout.position, self.layout.size) for row in self.layout.rows: for key in row.keys: self.renderer.draw_key(self.surface, key)
Draw the virtual keyboard into the delegate surface object if enabled.
def surviors_are_inconsistent(survivor_mapping: Mapping[BaseEntity, Set[BaseEntity]]) -> Set[BaseEntity]: victim_mapping = set() for victim in itt.chain.from_iterable(survivor_mapping.values()): if victim in survivor_mapping: victim_mapping.add(victim) return victim_mapping
Check that there's no transitive shit going on.
def const(const): try: return getattr(_c, const) except AttributeError: raise FSQEnvError(errno.EINVAL, u'No such constant:'\ u' {0}'.format(const)) except TypeError: raise TypeError(errno.EINVAL, u'const name must be a string or'\ ...
Convenience wrapper to yield the value of a constant
def install_plugin(username, repo): print("Installing plugin from " + username + "/" + repo) pip.main(['install', '-U', "git+git://github.com/" + username + "/" + repo + ".git"])
Installs a Blended plugin from GitHub
def _convert_number(self, number): number = float(number) return int(number) if number.is_integer() else float(number)
Converts a number to float or int as appropriate
def write(self, s): for line in re.split(r'\n+', s): if line != '': self._logger.log(self._level, line)
Write message to logger.
def _preloading_env(self): ctx = self.env.globals try: ctx['random_model'] = lambda *a, **kw: None ctx['random_models'] = lambda *a, **kw: None yield self.env finally: ctx['random_model'] = jinja2.contextfunction(random_model) ctx['rand...
A "stripped" jinja environment.
def _calc_footprint(self): corners = [self.corner(corner) for corner in self.corner_types()] coords = [] for corner in corners: shape = corner.get_shape(corner.crs) coords.append([shape.x, shape.y]) shp = Polygon(coords) self._footprint = GeoVector(shp, se...
Return rectangle in world coordinates, as GeoVector.
def _get_filepaths(self): self._printer(str(self.__len__()) + " file paths have been parsed in " + str(self.timer.end)) if self._hash_files: return pool_hash(self.filepaths) else: return self.filepaths
Filters list of file paths to remove non-included, remove excluded files and concatenate full paths.
def sha256sum(filename): sha256 = hashlib.sha256() mem_view = memoryview(bytearray(128*1024)) with open(filename, 'rb', buffering=0) as stream: for i in iter(lambda: stream.readinto(mem_view), 0): sha256.update(mem_view[:i]) return sha256.hexdigest()
Return SHA256 hash of file.
def _reduce(self, op, name, axis=0, skipna=True, numeric_only=None, filter_type=None, **kwds): func = getattr(self, name, None) if func is None: raise TypeError("{klass} cannot perform the operation {op}".format( klass=self.__class__.__name__, op=n...
perform the reduction type operation if we can
def increase_last(self, k): idx = self._last_idx if idx is not None: self.results[idx] += k
Increase the last result by k.
def parameters(self): parameters = {} for name in self.PARAMETERS: try: parameters[name] = self.get_parameter(name) except AttributeError: pass return parameters
Get dict with all set parameters.
def exists_query(expression): def _exists(index, expression=expression): ev = expression() if callable(expression) else expression if ev: return [ store_key for store_keys in index.get_index().values() for store_key in store...
Check that documents have a key that satisfies expression.
def _active_cli(self): cli = self.cli while cli._sub_cli: cli = cli._sub_cli return cli
Return the active `CommandLineInterface`.
def _new_from_cdata(cls, cdata: Any) -> "Color": return cls(cdata.r, cdata.g, cdata.b)
new in libtcod-cffi
def unpad(cls, data): if sys.version_info > (3, 0): return data[:-ord(data[len(data)-1:])].decode() else: return data[:-ord(data[len(data)-1:])]
Unpads data that has been padded
def usage(title, message, tutorial_message, tutorial, css_path=CSS_PATH): env = Environment() env.loader = FileSystemLoader(osp.join(CONFDIR_PATH, 'templates')) usage = env.get_template("usage.html") return usage.render(css_path=css_path, title=title, intro_message=message, tutor...
Print a usage message on the rich text view
def json(self): if self.search_results is None: return None result = [] for row in self.search_results['rows']: obj = {} for index in range(0, len(self.search_results['fields'])): obj[self.search_results['fields'][index]] = row[index] ...
Returns the search results as a list of JSON objects.
def _make_read_func(file_obj): @ffi.callback("cairo_read_func_t", error=constants.STATUS_READ_ERROR) def read_func(_closure, data, length): string = file_obj.read(length) if len(string) < length: return constants.STATUS_READ_ERROR ffi.buffer(data, length)[:len(string)] = stri...
Return a CFFI callback that reads from a file-like object.
def cli(env, identifier, enable): mgr = SoftLayer.HardwareManager(env.client) hw_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'hardware') result = env.client['Hardware_Server'].toggleManagementInterface(enable, id=hw_id) env.fout(result)
Toggle the IPMI interface on and off
def draw_image(self, image, item, source): top, left = item['top'], item['left'] width, height = item['width'], item['height'] image_file = item['image'] img = Image.open(source) img = self.rotate(img) iwidth, iheight = img.size wratio = width / iwidth hra...
Add an image to the image
def should_reschedule(self, iteration, bracket_iteration): bracket = self.get_bracket(iteration=iteration) if bracket_iteration < bracket: return False return self.get_bracket(iteration=iteration + 1) >= 0
Return a boolean to indicate if we need to reschedule another iteration.
def db_for_write(self, model, **hints): if model._meta.app_label in self._apps: return getattr(model, '_db_alias', model._meta.app_label) return None
If the app has its own database, use it for writes
def _valid_numpy_subdtype(x, numpy_types): for t in numpy_types: assert not np.issubdtype(np.generic, t) return any(np.issubdtype(x.dtype, t) for t in numpy_types)
Is any dtype from numpy_types superior to the dtype of x?
def section(title, bar=OVERLINE, strm=sys.stdout): width = utils.term.width printy(bold(title.center(width))) printy(bold((bar * width)[:width]))
Helper function for testing demo routines
def validate_response(self): if self.response is None: logger.error("Failed to submit") return False if not self.response: logger.error( "HTTP status %d: failed to submit to %s", self.response.status_code, self.response....
Checks that the response is valid and import succeeded.
def import_eit_fzj(self, filename, configfile, correction_file=None, timestep=None, **kwargs): df_emd, dummy1, dummy2 = eit_fzj.read_3p_data( filename, configfile, **kwargs ) if correction_file is not None: eit_fzj_utils.appl...
EIT data import for FZJ Medusa systems
def _setup_piddir(self): if self.pidfile is None: return piddir = os.path.dirname(self.pidfile) if not os.path.isdir(piddir): os.makedirs(piddir, 0o777 & ~self.umask) os.chown(piddir, self.uid, self.gid)
Create the directory for the PID file if necessary.
def split_pieces(piece_list, segments, num): piece_groups = [] pieces = list(piece_list) while pieces: for i in range(segments): p = pieces[i::segments][:num] if not p: break piece_groups.append(p) pieces = pieces[num * segments:] retur...
Prepare a list of all pieces grouped together
def neighborhood(self, node, degree=4): assert self.by_name[node.name] == node already_visited = frontier = set([node.name]) for _ in range(degree): neighbor_names = set() for node_name in frontier: outgoing = set(n.name for n in self.by_input[node_name]) incoming = set(self.by_n...
Am I really handcoding graph traversal please no
def generate_trajs(self, M, N, start=None, stop=None, dt=1): from msmtools.generation import generate_trajs return generate_trajs(self._P, M, N, start=start, stop=stop, dt=dt)
Generates M random trajectories of length N each with time step dt
def count(self): return functools.reduce(lambda x, y: x * y, (x.count for x in self.bounds))
Total number of array cells
def hijack_require_http_methods(fn): required_methods = ['POST'] if hijack_settings.HIJACK_ALLOW_GET_REQUESTS: required_methods.append('GET') return require_http_methods(required_methods)(fn)
Wrapper for "require_http_methods" decorator. POST required by default, GET can optionally be allowed
def to_dict(self): attributes = dict(self.attributes.items()) if self.style: attributes.update({"style": dict(self.style.items())}) vdom_dict = {'tagName': self.tag_name, 'attributes': attributes} if self.event_handlers: event_handlers = dict(self.event_handlers.i...
Converts VDOM object to a dictionary that passes our schema
def _check_frames(self, frames, fill_value): if self.seekable(): remaining_frames = self.frames - self.tell() if frames < 0 or (frames > remaining_frames and fill_value is None): frames = remaining_frames elif frames < 0: ...
Reduce frames to no more than are available in the file.
def partial_trace(self, qubits: Qubits) -> 'QubitVector': N = self.qubit_nb R = self.rank if R == 1: raise ValueError('Cannot take trace of vector') new_qubits: List[Qubit] = list(self.qubits) for q in qubits: new_qubits.remove(q) if not new_qubits...
Return the partial trace over some subset of qubits
def manage_file_analysis(args: argparse.Namespace, filename: str, data: object) -> None: key = DataStore.hashfile(filename) print('Analyzing {} --> {}'.format(filename, key)) if data.check_key(key): fit = LineFit(filename, data=data.get_data(key)) else: fit = LineFit(filename) if arg...
Take care of the analysis of a datafile
def match_file(apikey, path, metadata=None): import audioread with audioread.audio_open(path) as f: return match(apikey, iter(f), f.samplerate, int(f.duration), f.channels, metadata)
Uses the audioread library to decode an audio file and match it.
def dict_to_xml(xml_dict): import lxml.etree as etree root_tag = list(xml_dict.keys())[0] root = etree.Element(root_tag) _dict_to_xml_recurse(root, xml_dict[root_tag]) return root
Converts a dictionary to an XML ElementTree Element
def _mirbase_stats(data, out_dir): utils.safe_makedir(out_dir) out_file = os.path.join(out_dir, "%s_bcbio_mirbase.txt" % dd.get_sample_name(data)) out_file_novel = os.path.join(out_dir, "%s_bcbio_mirdeeep2.txt" % dd.get_sample_name(data)) mirbase_fn = data.get("seqbuster", None) if mirbase_fn: ...
Create stats from miraligner
def _glob_events_files(self, paths, recursive): event_files = [] for path in paths: dirs = tf.gfile.Glob(path) dirs = filter(lambda x: tf.gfile.IsDirectory(x), dirs) for dir in dirs: if recursive: dir_files_pair = [(root, filenames) for root, _, filenames in tf.gfile.Walk(dir...
Find all tf events files under a list of paths recursively.
def guess_python_env(): version, major, minor = get_version_info() if 'PyPy' in version: return 'pypy3' if major == 3 else 'pypy' return 'py{major}{minor}'.format(major=major, minor=minor)
Guess the default python env to use.
def edit_line(self, line): for code, code_obj in self.code_objs.items(): line = self.__edit_line(line, code, code_obj) return line
Edit a single line using the code expression.
def _LogInvalidRunLevels(states, valid): invalid = set() for state in states: if state not in valid: invalid.add(state) if invalid: logging.warning("Invalid init runlevel(s) encountered: %s", ", ".join(invalid))
Log any invalid run states found.
def allows_simple_recursion(self): rec_level = self.aggregate.config["recursionlevel"] if rec_level >= 0 and self.recursion_level >= rec_level: log.debug(LOG_CHECK, "... no, maximum recursion level reached.") return False if self.extern[0]: log.debug(LOG_CHECK...
Check recursion level and extern status.
def extract_fields(d, fields, delimiter='|'): rd = {} for f in fields: v = d.get(f, None) if isinstance(v, (str, unicode)): v = v.encode('utf8') elif isinstance(v, list): v = delimiter.join(v) rd[f] = v return rd
get values out of an object ``d`` for saving to a csv
def middleware_class(api=None): def decorator(middleware_class): apply_to_api = hug.API(api) if api else hug.api.from_object(middleware_class) apply_to_api.http.add_middleware(middleware_class()) return middleware_class return decorator
Registers a middleware class
def dinfflowdir(np, filleddem, flowangle, slope, workingdir=None, mpiexedir=None, exedir=None, log_file=None, runtime_file=None, hostfile=None): fname = TauDEM.func_name('dinfflowdir') return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir), {'-fe...
Run Dinf flow direction
def stick_perm(presenter, egg, dist_dict, strategy): np.random.seed() egg_pres, egg_rec, egg_features, egg_dist_funcs = parse_egg(egg) regg = order_stick(presenter, egg, dist_dict, strategy) regg_pres, regg_rec, regg_features, regg_dist_funcs = parse_egg(regg) regg_pres = list(regg_pres) egg_pre...
Computes weights for one reordering using stick-breaking method
def create(model_config, batch_size, normalize=True, num_workers=0, augmentations=None): path = model_config.data_dir('mnist') train_dataset = datasets.MNIST(path, train=True, download=True) test_dataset = datasets.MNIST(path, train=False, download=True) augmentations = [ToArray()] + (augmentations if a...
Create a MNIST dataset, normalized
def __we_c(cls, calib, tc, temp, we_v): offset_v = calib.pid_elc_mv / 1000.0 response_v = we_v - offset_v response_c = tc.correct(temp, response_v) if response_c is None: return None we_c = response_c + offset_v return we_c
Compute weC from sensor temperature compensation of weV
def repackage_var(h): if IS_TORCH_04: return h.detach() if type(h) == torch.Tensor else tuple(repackage_var(v) for v in h) else: return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
Wraps h in new Variables, to detach them from their history.
def finalize(self, params, rep): if params.get("saveNet", True): saveDir = os.path.join(params["path"], params["name"], "model_{}.pt".format(rep)) torch.save(self.model, saveDir)
Save the full model once we are done.
def _edit_config(xpath, element): query = {'type': 'config', 'action': 'edit', 'xpath': xpath, 'element': element} response = __proxy__['panos.call'](query) return _validate_response(response)
Sends an edit request to the device.
def multiply(a, col): a = a.reshape(4, 4, 4) col = col.reshape(4, 8) return fcat( rowxcol(a[0], col), rowxcol(a[1], col), rowxcol(a[2], col), rowxcol(a[3], col), )
Multiply a matrix by one column.
def run_get_clusters_from_file(self, clusters_infile, all_ref_seqs, rename_dict=None): if rename_dict is None: rename_dict = {} seq_reader = pyfastaq.sequences.file_reader(self.infile) names_list_from_fasta_file = [seq.id for seq in seq_reader] names_set_from_fasta_file = set...
Instead of running cdhit, gets the clusters info from the input file.
def image_load_time(self): load_times = self.get_load_times('image') return round(mean(load_times), self.decimal_precision)
Returns aggregate image load time for all pages.
def run(self, stdscr): self.win = stdscr curses.curs_set(0) stdscr.timeout(0) curses.init_pair(1, curses.COLOR_CYAN, curses.COLOR_BLACK) curses.init_pair(2, curses.COLOR_GREEN, curses.COLOR_BLACK) curses.init_pair(3, curses.COLOR_YELLOW, curses.COLOR_BLACK) curses...
Initialize curses and refresh in a loop
def configure(self, address): global nanoconfig_started if len(self._endpoints): raise ValueError("Nanoconfig address must be sole endpoint") endpoint_id = _nn_check_positive_rtn( wrapper.nc_configure(self.fd, address) ) if not nanoconfig_started: ...
Configure socket's addresses with nanoconfig
def tree_iter_nexson_proxy(nexson_proxy): nexml_el = nexson_proxy._nexml_el tg_order = nexml_el['^ot:treesElementOrder'] tgd = nexml_el['treesById'] for tg_id in tg_order: tg = tgd[tg_id] tree_order = tg['^ot:treeElementOrder'] tbid = tg['treeById'] otus = tg['@otus'] ...
Iterates over NexsonTreeProxy objects in order determined by the nexson blob
def cmd_wrapper(cmd_name, **kwds): cmd = si.Commands(cmd_name) if not cmd: raise Exception(cmd_name + " doesnt found!") for arg in cmd.Arguments: value = kwds.get(arg.Name) if value: arg.Value = value return cmd.Execute()
Wrap and execute a softimage command accepting named arguments
def verify_enroll(self, response): seed = session.pop('_u2f_enroll_') try: new_device, cert = complete_register(seed, response, self.__facets_list) except Exception as e: if self.__call_fail_enroll: self.__call_fail_enroll(e) return { ...
Verifies and saves U2F enroll
def info(args): session = c.Session(args) if "all" in args["names"]: feeds = session.list_feeds() else: feeds = args["names"] for feed in feeds: aux.pretty_print(session, feed)
Provide information of a number of feeds
def _raw_weights(self): if self._debug: return np.array([[],[],[],[]]) if not self._running: raise ValueError('Weight sensor is not running!') if len(self._weight_buffers) == 0: time.sleep(0.3) if len(self._weight_buffers) == 0: rai...
Create a numpy array containing the raw sensor weights.
def binary_to_term(data): if not isinstance(data, bytes): raise ParseException('not bytes input') size = len(data) if size <= 1: raise ParseException('null input') if b_ord(data[0]) != _TAG_VERSION: raise ParseException('invalid version') try: i, term = _binary_to_ter...
Decode Erlang terms within binary data into Python types
def geo_to_pixel(geo, level): lat, lon = float(geo[0]), float(geo[1]) lat = TileSystem.clip(lat, TileSystem.LATITUDE_RANGE) lon = TileSystem.clip(lon, TileSystem.LONGITUDE_RANGE) x = (lon + 180) / 360 sin_lat = sin(lat * pi / 180) y = 0.5 - log((1 + sin_lat) / (1 - sin_la...
Transform from geo coordinates to pixel coordinates
def check(self, feature): mapper = feature.as_dataframe_mapper() mapper.fit_transform(self.X, y=self.y)
Check that fit_transform can be called on reference data
def run_cli( executable, mets_url=None, resolver=None, workspace=None, page_id=None, log_level=None, input_file_grp=None, output_file_grp=None, parameter=None, working_dir=None, ): workspace = _get_workspace(workspace, resolver, mets_ur...
Create a workspace for mets_url and run MP CLI through it
def addStampAnnot(self, rect, stamp=0): CheckParent(self) val = _fitz.Page_addStampAnnot(self, rect, stamp) if not val: return val.thisown = True val.parent = weakref.proxy(self) self._annot_refs[id(val)] = val return val
Add a 'rubber stamp' in a rectangle.
def grace_period(msg='', seconds=10): import time print(msg) override = util_arg.get_argflag(('--yes', '--y', '-y')) print('starting grace period') if override: print('ending based on command line flag') return True for count in reversed(range(1, seconds + 1)): time.sleep...
Gives user a window to stop a process before it happens
def left_button_down(self, obj, event_type): click_pos = self.iren.GetEventPosition() picker = vtk.vtkWorldPointPicker() picker.Pick(click_pos[0], click_pos[1], 0, self.renderer) self.pickpoint = np.asarray(picker.GetPickPosition()).reshape((-1, 3)) if np.any(np.isnan(self.pickpo...
Register the event for a left button down click
def list_conditions(self, service_id, version_number): content = self._fetch("/service/%s/version/%d/condition" % (service_id, version_number)) return map(lambda x: FastlyCondition(self, x), content)
Gets all conditions for a particular service and version.
def entries(self): Tags = Query() tag = self.table.get(Tags.name == self.name) posts = tag['post_ids'] for id in posts: post = self.db.posts.get(doc_id=id) if not post: raise ValueError("No post found for doc_id %s" % id) yield Entry(os...
return the actual lists of entries tagged with
def kill(self, block=False, reason="unknown"): current_greenletid = id(gevent.getcurrent()) trace = "Job killed: %s" % reason for greenlet, job in context._GLOBAL_CONTEXT["greenlets"].values(): greenletid = id(greenlet) if job and job.id == self.id and greenletid != curre...
Forcefully kill all greenlets associated with this job
def svg_html_from_pyplot_figure(fig: "Figure") -> str: if fig is None: return "" memfile = io.BytesIO() fig.savefig(memfile, format="svg") return memfile.getvalue().decode("utf-8")
Converts a ``pyplot`` figure to an SVG tag.
def load(path): proto = _parse_saved_model(path) _merge_assets_key_collection(proto, path) handler = SavedModelHandler() handler._proto = proto return handler
Creates a SavedModelHandler from a SavedModel in `path`.
def runlogs_policy(log_group_ref): p = Policy( Statement=[ Statement( Effect=Allow, Resource=[ Join('', [ 'arn:aws:logs:*:*:log-group:', log_group_ref, ':log-stream:*'])], ...
Policy needed for Empire -> Cloudwatch logs to record run output.
def show_vpnservice(self, vpnservice, **_params): return self.get(self.vpnservice_path % (vpnservice), params=_params)
Fetches information of a specific VPN service.
def json(self): return { "id": self.ID, "steps": self.steps, "graph_source": self.source, "errored": self.errored }
Retrun JSON representation for this run
def build_valid_keywords_grammar(keywords=None): from invenio_query_parser.parser import KeywordQuery, KeywordRule, \ NotKeywordValue, SimpleQuery, ValueQuery if keywords: KeywordRule.grammar = attr('value', re.compile( r"(\d\d\d\w{{0,3}}|{0})\b".format("|".join(keywords), re.I))) ...
Update parser grammar to add a list of allowed keywords.
def prepare_fields_attribute(attribute_name, attributes, class_name): attribute = attributes.get(attribute_name) if not attribute: attribute = tuple() elif isinstance(attribute, std_collections.Iterable): attribute = tuple(attribute) else: raise errors...
Prepare model fields attribute.
def put_info(self, key, value): return self.instance.put_task_info(self.name, key, value)
Put associated information of the task.
def css(self, *props, **kwprops): self._stable = False styles = {} if props: if len(props) == 1 and isinstance(props[0], Mapping): styles = props[0] else: raise WrongContentError(self, props, "Arguments not valid") elif kwprops: ...
Adds css properties to this element.
def request(self, type, command_list): req = self._build_request(type, command_list) if self.nxargs['connect_over_uds']: self.connection.request('POST', req['url'], req['payload'], req['headers']) response = self.connection.getresponse() else: response = self....
Send NX-API JSON request to the NX-OS device.
def send_message( self, title=None, body=None, icon=None, data=None, sound=None, badge=None, api_key=None, **kwargs): from .fcm import fcm_send_message result = fcm_send_message( regis...
Send single notification message.
def _authorization_code_flow(self): options = { 'scope': getattr(self, 'scope', 'non-expiring'), 'client_id': self.options.get('client_id'), 'response_type': 'code', 'redirect_uri': self._redirect_uri() } url = '%s%s/connect' % (self.scheme, self.h...
Build the the auth URL so the user can authorize the app.
def do_list(self, args): try: resources = self.resource_manager.list_resources_info() except Exception as e: print(e) else: self.resources = [] for ndx, (resource_name, value) in enumerate(resources.items()): if not args: ...
List all connected resources.
def functions(context): config_file = "./lambder.json" if os.path.isfile(config_file): context.obj = FunctionConfig(config_file) pass
Manage AWS Lambda functions
def _check_unique_together(cls): if cls._meta.unique_together is None: return if not isinstance(cls._meta.unique_together, (tuple, list)): raise ConfigurationError( "'{}.unique_together' must be a list or tuple.".format(cls.__name__) ) elif any...
Check the value of "unique_together" option.
def personastate(self): if self._personastate == None: return None elif self._personastate in self.PersonaState: return self.PersonaState[self._personastate] else: return None
Return the Persona State of the Users Profile
def _run_default_moderator(comment, content_object, request): if not default_moderator.allow(comment, content_object, request): return False if default_moderator.moderate(comment, content_object, request): comment.is_public = False
Run the default moderator
def expire_file(filepath): load_message.cache_clear() orm.delete(pa for pa in model.PathAlias if pa.entry.file_path == filepath) orm.delete(item for item in model.Entry if item.file_path == filepath) orm.commit()
Expire a record for a missing file
def processed_file(self, new_text, filename, old_text=None, write=False, encoding=None): self.files.append(filename) if old_text is None: old_text = self._read_python_source(filename)[0] if old_text is None: return equal = old_text =...
Called when a file has been refactored and there may be changes.
def pycomplex(v_str): assert isinstance(v_str, str) if v_str[0] == '(' and v_str[-1] == ')' and len(v_str.split(',')) == 2: v_re, v_im = v_str[1:-1].split(',', 1) return complex(pyfloat(v_re), pyfloat(v_im)) else: raise ValueError('{0} must be in complex number form (x, y).' ...
Convert string repr of Fortran complex to Python complex.
def read_text(self, encoding='utf-8') -> str: with self.open('r', encoding=encoding) as fp: return fp.read()
read all text into memory.
def _encode_batched_write_command( namespace, operation, command, docs, check_keys, opts, ctx): buf = StringIO() to_send, _ = _batched_write_command_impl( namespace, operation, command, docs, check_keys, opts, ctx, buf) return buf.getvalue(), to_send
Encode the next batched insert, update, or delete command.
def Jobs(self): url = self._url + "/jobs" return Jobs(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port, initialize=True)
get the Jobs object