code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def increment(self, amount=1): self._primaryProgressBar.setValue(self.value() + amount) QApplication.instance().processEvents()
Increments the main progress bar by amount.
def _readbin(self, length, start): if not length: return '' startbyte, startoffset = divmod(start + self._offset, 8) endbyte = (start + self._offset + length - 1) // 8 b = self._datastore.getbyteslice(startbyte, endbyte + 1) try: c = "{:0{}b}".format(int(binascii.hexlify(b), 16), 8*len(b)) except TypeError: c = "{0:0{1}b}".format(int(binascii.hexlify(str(b)), 16), 8*len(b)) return c[startoffset:startoffset + length]
Read bits and interpret as a binary string.
async def _retrieve_messages_after_strategy(self, retrieve): after = self.after.id if self.after else None data = await self.logs_from(self.channel.id, retrieve, after=after) if len(data): if self.limit is not None: self.limit -= retrieve self.after = Object(id=int(data[0]['id'])) return data
Retrieve messages using after parameter.
def get(cls): app = AndroidApplication.instance() f = app.create_future() if cls._instance: f.set_result(cls._instance) return f def on_service(obj_id): if not cls.instance(): m = cls(__id__=obj_id) else: m = cls.instance() f.set_result(m) cls.from_(app).then(on_service) return f
Acquires the NotificationManager service async.
async def _port_poll(is_old_bootloader, ports_before_switch=None): new_port = '' while not new_port: if is_old_bootloader: new_port = await _port_on_mode_switch(ports_before_switch) else: ports = await _discover_ports() if ports: discovered_ports = list(filter( lambda x: x.endswith('bootloader'), ports)) if len(discovered_ports) == 1: new_port = '/dev/modules/{}'.format(discovered_ports[0]) await asyncio.sleep(0.05) return new_port
Checks for the bootloader port
def tick(self, filename): if self.connection_attempts < 10: self.setup(True, False) self.connection_attempts += 1 self.unqueue_and_display(filename)
Try to connect and display messages in queue.
def remove_file_no_raise(file_name, config): if config.keep_xml: return True try: if os.path.exists(file_name): os.remove(file_name) except IOError as error: loggers.root.error( "Error occurred while removing temporary created file('%s'): %s", file_name, str(error))
Removes file from disk if exception is raised.
def do_gui_update(self): with self.update_lock: changed_widget_dict = {} self.root.repr(changed_widget_dict) for widget in changed_widget_dict.keys(): html = changed_widget_dict[widget] __id = str(widget.identifier) self._send_spontaneous_websocket_message(_MSG_UPDATE + __id + ',' + to_websocket(html)) self._need_update_flag = False
This method gets called also by Timer, a new thread, and so needs to lock the update
def _call_event(self, event_name, cmd, pargs, kwargs, **kws): def get_result_params(res): if not isinstance(res, (list, tuple)): return res, pargs, kwargs elif len(res) == 2: return res, pargs, kwargs return res[0], (pargs[0], ) + tuple(res[1]), kwargs if hasattr(cmd, event_name): return get_result_params( getattr(cmd, event_name)(pargs[0], *pargs[1:], **kwargs)) elif hasattr(cmd.__cls__, event_name): return get_result_params( getattr(cmd.__cls__, event_name)( pargs[0], cmd.__cmd_name__ or cmd.__name__, *pargs[1:], **kwargs)) return None, pargs, kwargs
Try to call events for cmd.
def camel_case_from_underscores(string): components = string.split('_') string = '' for component in components: if component in abbreviations: string += component else: string += component[0].upper() + component[1:].lower() return string
Generate a CamelCase string from an underscore_string
def makedirs(p): try: os.makedirs(p, settings.FILE_UPLOAD_PERMISSIONS) except OSError: if not os.path.isdir(p): raise
A makedirs that avoids a race conditions for multiple processes attempting to create the same directory.
def _initialise_classifier(self, comparison_vectors): self.kernel.init = numpy.array( [[0.05] * len(list(comparison_vectors)), [0.95] * len(list(comparison_vectors))])
Set the centers of the clusters.
def solve(self): if self.solve_begun: raise ResolveError("cannot run solve() on a solve that has " "already been started") t1 = time.time() pt1 = package_repo_stats.package_load_time while self.status == SolverStatus.unsolved: self.solve_step() if self.status == SolverStatus.unsolved and not self._do_callback(): break self.load_time = package_repo_stats.package_load_time - pt1 self.solve_time = time.time() - t1 if self.pr.verbosity > 2: from pprint import pformat self.pr.subheader("SOLVE STATS:") self.pr(pformat(self.solve_stats)) elif self.print_stats: from pprint import pformat data = {"solve_stats": self.solve_stats} print >> (self.buf or sys.stdout), pformat(data)
Attempt to solve the request.
def output(self, name='',dest=''): "Output PDF to some destination" if(self.state<3): self.close() dest=dest.upper() if(dest==''): if(name==''): name='doc.pdf' dest='I' else: dest='F' if dest=='I': print(self.buffer) elif dest=='D': print(self.buffer) elif dest=='F': f=open(name,'wb') if(not f): self.error('Unable to create output file: '+name) if PY3K: f.write(self.buffer.encode("latin1")) else: f.write(self.buffer) f.close() elif dest=='S': return self.buffer else: self.error('Incorrect output destination: '+dest) return ''
Output PDF to some destination
def gcs_files(prefix_filter=None): top_level_xml_str = download_gcs_file("", prefix_filter=prefix_filter) xml_root = ElementTree.fromstring(top_level_xml_str) filenames = [el[0].text for el in xml_root if el.tag.endswith("Contents")] return filenames
List all files in GCS bucket.
def redirectSTDOUTtoDebugFile(): import sys kwargs = {} if sys.version_info >= (3,): kwargs["encoding"] = "utf8" sys.stdout = open( os.path.join( getDebugDirectory(), "{0}.stdout".format(getDebugIdentifier()), ), "w", 1, **kwargs ) sys.stderr = open( os.path.join( getDebugDirectory(), "{0}.stderr".format(getDebugIdentifier()), ), "w", 1, **kwargs )
Redirects the stdout and stderr of the current process to a file.
def view_help(self): def format_desc(desc): return "%s (Type: %s, Default: %s)\n%s" % ( desc.name, desc.validator.__name__.replace('validate_', ''), desc.default, desc.help or '') def format_namespace(key, desc_list): return "\nNamespace: %s\n%s" % ( key, '\n'.join(sorted(format_desc(desc) for desc in desc_list))) def namespace_cmp(item): name, _ = item return chr(0) if name == DEFAULT else name return '\n'.join(format_namespace(*desc) for desc in sorted(six.iteritems(self.descriptions), key=namespace_cmp))
Return a help message describing all the statically configured keys.
def create (self, name, *args): tab = None defn = self.get(name, None) if defn: tab = FSWTab(defn, *args) return tab
Creates a new command with the given arguments.
def triple_in_shape(expr: ShExJ.shapeExpr, label: ShExJ.tripleExprLabel, cntxt: Context) \ -> Optional[ShExJ.tripleExpr]: te = None if isinstance(expr, (ShExJ.ShapeOr, ShExJ.ShapeAnd)): for expr2 in expr.shapeExprs: te = triple_in_shape(expr2, label, cntxt) if te is not None: break elif isinstance(expr, ShExJ.ShapeNot): te = triple_in_shape(expr.shapeExpr, label, cntxt) elif isinstance(expr, ShExJ.shapeExprLabel): se = reference_of(expr, cntxt) if se is not None: te = triple_in_shape(se, label, cntxt) return te
Search for the label in a shape expression
def instruction_LSR_memory(self, opcode, ea, m): r = self.LSR(m) return ea, r & 0xff
Logical shift right memory location
def adjust_for_flatpak(self): if self.user_params.flatpak.value: remove_plugins = [ ("prebuild_plugins", "resolve_composes"), ("prepublish_plugins", "squash"), ("postbuild_plugins", "pulp_push"), ("postbuild_plugins", "pulp_tag"), ("postbuild_plugins", "pulp_sync"), ("exit_plugins", "pulp_publish"), ("exit_plugins", "pulp_pull"), ("exit_plugins", "delete_from_registry"), ] for when, which in remove_plugins: self.pt.remove_plugin(when, which, 'not needed for flatpak build')
Remove plugins that don't work when building Flatpaks
def give_consent(): if not ('hitId' in request.args and 'assignmentId' in request.args and 'workerId' in request.args): raise ExperimentError('hit_assign_worker_id_not_set_in_consent') hit_id = request.args['hitId'] assignment_id = request.args['assignmentId'] worker_id = request.args['workerId'] mode = request.args['mode'] with open('templates/consent.html', 'r') as temp_file: consent_string = temp_file.read() consent_string = insert_mode(consent_string, mode) return render_template_string( consent_string, hitid=hit_id, assignmentid=assignment_id, workerid=worker_id )
Serves up the consent in the popup window.
def superkey(self): sorted_list = [] for header in self.header: if header in self._keys: sorted_list.append(header) return sorted_list
Returns a set of column names that together constitute the superkey.
def add_callback(self, fn, *args, **kwargs): if _debug: IOCB._debug("add_callback(%d) %r %r %r", self.ioID, fn, args, kwargs) self.ioCallback.append((fn, args, kwargs)) if self.ioComplete.isSet(): self.trigger()
Pass a function to be called when IO is complete.
def kill(self): for sock in self.workers: os.kill(sock.pid, signal.SIGKILL) return 'WorkerPool %s killed' % self.ctrl_url
Send a SIGKILL to all worker processes
def _in_polygon(points, polygon): from matplotlib.path import Path points = _as_array(points) polygon = _as_array(polygon) assert points.ndim == 2 assert polygon.ndim == 2 if len(polygon): polygon = np.vstack((polygon, polygon[0])) path = Path(polygon, closed=True) return path.contains_points(points)
Return the points that are inside a polygon.
def update(self, dt): self.group.update(dt) for sprite in self.group.sprites(): if sprite.feet.collidelist(self.walls) > -1: sprite.move_back(dt)
Tasks that occur over time should be handled here
def status(self, *args, priority=0): 'Add status message to be shown until next action.' k = (priority, args) self.statuses[k] = self.statuses.get(k, 0) + 1 if self.statusHistory: prevpri, prevargs, prevn = self.statusHistory[-1] if prevpri == priority and prevargs == args: self.statusHistory[-1][2] += 1 return True self.statusHistory.append([priority, args, 1]) return True
Add status message to be shown until next action.
def stash_calibration(self, attenuations, freqs, frange, calname): self.calibration_vector = attenuations self.calibration_freqs = freqs self.calibration_frange = frange self.calname = calname
Save it for later
def _get_deploy_options(self, options): user_data = None if options.user_data and options.b64_user_data: raise CommandError( "Cannot provide both --user-data and --b64-user-data.") if options.b64_user_data: user_data = options.b64_user_data if options.user_data: user_data = base64_file(options.user_data).decode("ascii") return utils.remove_None({ 'distro_series': options.image, 'hwe_kernel': options.hwe_kernel, 'user_data': user_data, 'comment': options.comment, 'wait': False, })
Return the deployment options based on command line.
def parse(self, s): return datetime.datetime.strptime(s, self.date_format).date()
Parses a date string formatted like ``YYYY-MM-DD``.
def _get_movie_raw_metadata(): path = _get_movielens_path() if not os.path.isfile(path): _download_movielens(path) with zipfile.ZipFile(path) as datafile: return datafile.read('ml-100k/u.item').decode(errors='ignore').split('\n')
Get raw lines of the genre file.
def reversed_blocks(handle, blocksize=4096): handle.seek(0, os.SEEK_END) here = handle.tell() while 0 < here: delta = min(blocksize, here) here -= delta handle.seek(here, os.SEEK_SET) yield handle.read(delta)
Generate blocks of file's contents in reverse order.
def add_file(self,fName,content) : if not self.isdir() : raise Exception("FSQuery tried to add a file in a node which is not a directory : %s" % self.abs) self.write_file("%s/%s"%(self.abs,fName),content)
If this FSNode is a directory, write a file called fName containing content inside it
def _get_composition_collection(self, composition_id): collection = JSONClientValidated('repository', collection='Composition', runtime=self._runtime) composition_map = collection.find_one({'_id': ObjectId(composition_id.get_identifier())}) if 'assetIds' not in composition_map: raise errors.NotFound('no Assets are assigned to this Composition') return composition_map, collection
Returns a Mongo Collection and Composition given a Composition Id
def states(self): state_list = [] for state in States: if state.value & self._states != 0: state_list.append(state) if (self._flashing_states & States.FILTER) != 0: state_list.append(States.FILTER_LOW_SPEED) return state_list
Returns a set containing the enabled states.
def power_off(env, identifier, hard): virtual_guest = env.client['Virtual_Guest'] vsi = SoftLayer.VSManager(env.client) vs_id = helpers.resolve_id(vsi.resolve_ids, identifier, 'VS') if not (env.skip_confirmations or formatting.confirm('This will power off the VS with id %s. ' 'Continue?' % vs_id)): raise exceptions.CLIAbort('Aborted.') if hard: virtual_guest.powerOff(id=vs_id) else: virtual_guest.powerOffSoft(id=vs_id)
Power off an active virtual server.
def _start_primary(self): self.em.start() self.em.set_secondary_state(_STATE_RUNNING) self._set_shared_instances()
Start as the primary
def correct_chrom_names(chroms): chrom_list = [] for chrom in chroms: chrom = str(chrom) chrom = chrom.replace('23', 'X') chrom = chrom.replace('24', 'Y') chrom = chrom.replace('25', 'Mt') if not chrom.startswith('chr'): chrom = 'chr' + chrom chrom_list.append(chrom) return chrom_list
Make sure chromosome names follow UCSC chr convention.
def prepare_percolator_output(self, fn): ns = xml.get_namespace(fn) static = readers.get_percolator_static_xml(fn, ns) return ns, static
Returns namespace and static xml from percolator output file
def extract_filezip(path_to_file, dest_path, target_zipfiles=None): target_zipfiles = ['.*'] if target_zipfiles is None else target_zipfiles files = [] _, ext = os.path.splitext(path_to_file) if ext == '.zip': file = open(path_to_file, 'rb') with zipfile.ZipFile(file) as zip_file: regexp = '|'.join(target_zipfiles) if target_zipfiles else '.*' search_regex = re.compile(regexp) lista = [m.group() for x in zip_file.namelist() for m in [search_regex.search(x)] if m] for zp_file in lista: try: zip_file.extract(zp_file, dest_path) files.append(os.path.join(dest_path, zp_file)) except Exception as ex: msg = 'Fail to extract {} in {} to {} - {}'.format( zp_file, path_to_file, dest_path, ex) logger.error(msg) file.close() else: logger.warning('Not zipfile passed in args') return files
Extract file zip to destiny path folder targeting only some kind of files.
def decode_safely(self, encoded_data): decoder = self.base_decoder result = settings.null try: result = pickle.loads(decoder(encoded_data)) except: warnings.warn("Could not load and deserialize the data.", RuntimeWarning) return result
Inverse for the `encode_safely` function.
def _compile_qt_resources(): if config.QT_RES_SRC(): epab.utils.ensure_exe('pyrcc5') LOGGER.info('compiling Qt resources') elib_run.run(f'pyrcc5 {config.QT_RES_SRC()} -o {config.QT_RES_TGT()}')
Compiles PyQT resources file
def end(self): with self.__lock: if self.__write: self.__write(compress_end(self.__ctx)) else: return compress_end(self.__ctx)
Finalise lz4 frame, outputting any remaining as return from this function or by writing to fp)
def hook_fn(self, module:nn.Module, input:Tensors, output:Tensors): "Applies `hook_func` to `module`, `input`, `output`." if self.detach: input = (o.detach() for o in input ) if is_listy(input ) else input.detach() output = (o.detach() for o in output) if is_listy(output) else output.detach() self.stored = self.hook_func(module, input, output)
Applies `hook_func` to `module`, `input`, `output`.
def _disable_rpcs(self, conn, services, timeout=1.0): success, result = self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceiveHeaderCharacteristic], False, timeout) if not success: return success, result return self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceivePayloadCharacteristic], False, timeout)
Prevent this device from receiving more RPCs
def nhill_a(self,x,threshold=0.1,power=2,ichild=2): x_pow = np.power(x,power) threshold_pow = np.power(threshold,power) return x_pow / (x_pow + threshold_pow) * (1 + threshold_pow)
Normalized activating hill function.
def unlink(self): if os.name == "posix": self.__linux_unlink__() elif os.name == "nt": self.__windows_unlink__() else: raise HolodeckException("Currently unsupported os: " + os.name)
unlinks the shared memory
def rebalance(self): norms = [sci.linalg.norm(f, axis=0) for f in self.factors] lam = sci.multiply.reduce(norms) ** (1/self.ndim) self.factors = [f * (lam / fn) for f, fn in zip(self.factors, norms)] return self
Rescales factors across modes so that all norms match.
def runGetContinuousSet(self, id_): compoundId = datamodel.ContinuousSetCompoundId.parse(id_) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) continuousSet = dataset.getContinuousSet(id_) return self.runGetRequest(continuousSet)
Runs a getContinuousSet request for the specified ID.
def all_off(self): if not self.leds: return for led in self.leds.values(): led.brightness = 0
Turn all LEDs off
def global_search(self, queryset): search = self.dt_data['sSearch'] if search: if self.dt_data['bRegex']: criterions = [ Q(**{'%s__iregex' % field: search}) for field in self.get_db_fields() if self.can_regex(field) ] if len(criterions) > 0: search = reduce(or_, criterions) queryset = queryset.filter(search) else: for term in search.split(): criterions = (Q(**{'%s__icontains' % field: term}) for field in self.get_db_fields()) search = reduce(or_, criterions) queryset = queryset.filter(search) return queryset
Filter a queryset with global search
def gtype(n): t = type(n).__name__ return str(t) if t != 'Literal' else 'Literal, {}'.format(n.language)
Return the a string with the data type of a value, for Graph data
def _slice_at(self, index, length=1): length_ = len(self) if -length <= index < 0: index += length_ return slice(index, index + length)
Create a slice for index and length.
def visit(self): for enrollment in self._get_all_enrollments(): if enrollment.experiment.is_displaying_alternatives(): if not enrollment.last_seen: self._experiment_goal(enrollment.experiment, enrollment.alternative, conf.VISIT_NOT_PRESENT_COUNT_GOAL, 1) self._set_last_seen(enrollment.experiment, now()) elif now() - enrollment.last_seen >= timedelta(hours=conf.SESSION_LENGTH): self._experiment_goal(enrollment.experiment, enrollment.alternative, conf.VISIT_NOT_PRESENT_COUNT_GOAL, 1) self._experiment_goal(enrollment.experiment, enrollment.alternative, conf.VISIT_PRESENT_COUNT_GOAL, 1) self._set_last_seen(enrollment.experiment, now())
Record that the user has visited the site for the purposes of retention tracking
def public_pairs_for_script(self, tx, tx_in_idx, generator): public_pairs = [] for sec in self.extract_secs(tx, tx_in_idx): try: public_pairs.append(sec_to_public_pair(sec, generator)) except EncodingError: pass return public_pairs
For a given script, iterate over and pull out public pairs encoded as sec values.
def ensure_exists(self): if not cache.exists(self.config): logger.debug("Wily cache not found, creating.") cache.create(self.config) logger.debug("Created wily cache") else: logger.debug(f"Cache {self.config.cache_path} exists")
Ensure that cache directory exists.
def on_mouse_wheel(self, event): if event.modifiers: return dx = np.sign(event.delta[1]) * self._wheel_coeff x0, y0 = self._normalize(event.pos) self.zoom_delta((dx, dx), (x0, y0))
Zoom with the mouse wheel.
def _uri_split(uri): scheme, netloc, path, query, fragment = _safe_urlsplit(uri) auth = None port = None if '@' in netloc: auth, netloc = netloc.split('@', 1) if netloc.startswith('['): host, port_part = netloc[1:].split(']', 1) if port_part.startswith(':'): port = port_part[1:] elif ':' in netloc: host, port = netloc.split(':', 1) else: host = netloc return scheme, auth, host, port, path, query, fragment
Splits up an URI or IRI.
def show_graph_summary(g): sample_data = [] print("list(g[RDFS.Class]) = " + str(len(list(g[RDFS.Class])))) num_subj = 0 for subj in g.subjects(RDF.type): num_subj += 1 if num_subj < 5: sample_data.append("subjects.subject: " + get_string_from_rdf(subj)) print("g.subjects(RDF.type) = " + str(num_subj)) num_subj = 0 for subj, pred, obj in g: num_subj += 1 if num_subj < 5: sample_data.append("g.subject : " + get_string_from_rdf(pred)) sample_data.append("g.predicate : " + get_string_from_rdf(subj)) sample_data.append("g.object : " + get_string_from_rdf(obj)) print("g.obj(RDF.type) = " + str(num_subj)) print ("------ Sample Data ------") for line in sample_data: print(line)
display sample data from a graph
def down(self, down_uid): down_url = MPost.get_by_uid(down_uid).extinfo.get('tag__file_download', '') print('=' * 40) print(down_url) str_down_url = str(down_url)[15:] if down_url: ment_id = MEntity.get_id_by_impath(str_down_url) if ment_id: MEntity2User.create_entity2user(ment_id, self.userinfo.uid) return True else: return False
Download the entity by UID.
def task_transaction(channel): with channel.lock: if channel.poll(0): task = channel.recv() channel.send(Acknowledgement(os.getpid(), task.id)) else: raise RuntimeError("Race condition between workers") return task
Ensures a task is fetched and acknowledged atomically.
def _parse_simple_fault_geometry_surface(self, node): spacing = node["spacing"] usd, lsd, dip = (~node.upperSeismoDepth, ~node.lowerSeismoDepth, ~node.dip) coords = split_coords_2d(~node.LineString.posList) trace = geo.Line([geo.Point(*p) for p in coords]) return trace, usd, lsd, dip, spacing
Parses a simple fault geometry surface
def _process_plan_lines(self, final_line_count): if not self._lines_seen["plan"]: self._add_error(_("Missing a plan.")) return if len(self._lines_seen["plan"]) > 1: self._add_error(_("Only one plan line is permitted per file.")) return plan, at_line = self._lines_seen["plan"][0] if not self._plan_on_valid_line(at_line, final_line_count): self._add_error( _("A plan must appear at the beginning or end of the file.") ) return if plan.expected_tests != self._lines_seen["test"]: self._add_error( _("Expected {expected_count} tests but only {seen_count} ran.").format( expected_count=plan.expected_tests, seen_count=self._lines_seen["test"], ) )
Process plan line rules.
def settings(instance): with open(instance.root_dir + '/Config/config.yml') as config: config = yaml.load(config) instance.name = config['name'] instance.port = config['web']['port'] instance.host = "http://localhost" if 'host' in config['web']: instance.host = config['web']['host'] instance.debug = config['debug'] return instance
Definition to set settings from config file to the app instance.
def to_dict(self): error_dict = {} for field in ('status', 'source', 'title', 'detail', 'id', 'code', 'links', 'meta'): if getattr(self, field, None): error_dict.update({field: getattr(self, field)}) return error_dict
Return values of each fields of an jsonapi error
def read_chron(fl): indata = pd.read_csv(fl, sep=r'\s*\,\s*', index_col=None, engine='python') outcore = ChronRecord(age=indata['age'], error=indata['error'], depth=indata['depth'], labid=indata['labID']) return outcore
Create ChronRecord instance from Bacon file
def ProcessFingerprint(self, responses): if not responses.success: raise flow.FlowError("Could not fingerprint file: %s" % responses.status) response = responses.First() if response.pathspec.path: pathspec = response.pathspec else: pathspec = self.args.pathspec self.state.urn = pathspec.AFF4Path(self.client_urn) hash_obj = response.hash if data_store.AFF4Enabled(): with aff4.FACTORY.Create( self.state.urn, aff4_grr.VFSFile, mode="w", token=self.token) as fd: fd.Set(fd.Schema.HASH, hash_obj) if data_store.RelationalDBEnabled(): path_info = rdf_objects.PathInfo.FromPathSpec(pathspec) path_info.hash_entry = response.hash data_store.REL_DB.WritePathInfos(self.client_id, [path_info]) self.ReceiveFileFingerprint( self.state.urn, hash_obj, request_data=responses.request_data)
Store the fingerprint response.
def setpassword(self, pwd): if pwd and not isinstance(pwd, bytes): raise TypeError("pwd: expected bytes, got %s" % type(pwd)) if pwd: self.pwd = pwd else: self.pwd = None
Set default password for encrypted files.
def tabLayout(self): self.childWindow.column += 1 if self.childWindow.column > Layout.BUTTONS_NUMBER: self.childWindow.column = 0 self.childWindow.row += 1
For all tabs, specify the number of buttons in a row
def get(identifier, namespace='cid', domain='compound', operation=None, output='JSON', searchtype=None, **kwargs): if (searchtype and searchtype != 'xref') or namespace in ['formula']: response = request(identifier, namespace, domain, None, 'JSON', searchtype, **kwargs).read() status = json.loads(response.decode()) if 'Waiting' in status and 'ListKey' in status['Waiting']: identifier = status['Waiting']['ListKey'] namespace = 'listkey' while 'Waiting' in status and 'ListKey' in status['Waiting']: time.sleep(2) response = request(identifier, namespace, domain, operation, 'JSON', **kwargs).read() status = json.loads(response.decode()) if not output == 'JSON': response = request(identifier, namespace, domain, operation, output, searchtype, **kwargs).read() else: response = request(identifier, namespace, domain, operation, output, searchtype, **kwargs).read() return response
Request wrapper that automatically handles async requests.
def search_gallery(self, q): url = self._base_url + "/3/gallery/search?q={0}".format(q) resp = self._send_request(url) return [_get_album_or_image(thing, self) for thing in resp]
Search the gallery with the given query string.
def delete_doc(self, doc_id, revision): d = defer.Deferred() self.increase_stat('delete_doc') try: doc = self._get_doc(doc_id) if doc['_rev'] != revision: raise ConflictError("Document update conflict.") if doc.get('_deleted', None): raise NotFoundError('%s deleted' % doc_id) doc['_deleted'] = True self._expire_cache(doc['_id']) for key in doc.keys(): if key in ['_rev', '_deleted', '_id']: continue del(doc[key]) self.log('Marking document %r as deleted', doc_id) del self._attachments[doc['_id']] self._update_rev(doc) self._analize_changes(doc) d.callback(Response(ok=True, id=doc_id, rev=doc['_rev'])) except (ConflictError, NotFoundError, ) as e: d.errback(e) return d
Imitates sending DELETE request to CouchDB server
def _download_datasets(): def filepath(*args): return abspath(join(dirname(__file__), '..', 'vega_datasets', *args)) dataset_listing = {} for name in DATASETS_TO_DOWNLOAD: data = Dataset(name) url = data.url filename = filepath('_data', data.filename) print("retrieving data {0} -> {1}".format(url, filename)) urlretrieve(url, filename) dataset_listing[name] = '_data/{0}'.format(data.filename) with open(filepath('local_datasets.json'), 'w') as f: json.dump(dataset_listing, f, indent=2, sort_keys=True)
Utility to download datasets into package source
def load(path=None, root=None, db=None, load_user=True): "Load all of the config files. " config = load_config(path, load_user=load_user) remotes = load_remotes(path, load_user=load_user) if remotes: if not 'remotes' in config: config.remotes = AttrDict() for k, v in remotes.remotes.items(): config.remotes[k] = v accounts = load_accounts(path, load_user=load_user) if accounts: if not 'accounts' in config: config.accounts = AttrDict() for k, v in accounts.accounts.items(): config.accounts[k] = v update_config(config) if root: config.library.filesystem_root = root if db: config.library.database = db return config
Load all of the config files.
def _get_first_part_id(self, assessment_id): if assessment_id not in self._first_part_index: self._first_part_index[assessment_id] = get_first_part_id_for_assessment( assessment_id, runtime=self._runtime, proxy=self._proxy, create=True, bank_id=self._catalog_id) return self._first_part_index[assessment_id]
This session implemenation assumes all items are assigned to the first assessment part
def _make_links_from(self, body): ld = utils.CurieDict(self._core.default_curie, {}) for rel, link in body.get('_links', {}).items(): if rel != 'curies': if isinstance(link, list): ld[rel] = utils.LinkList( (self._navigator_or_thunk(lnk), lnk) for lnk in link) else: ld[rel] = self._navigator_or_thunk(link) return ld
Creates linked navigators from a HAL response body
def write_fixture_file(context, path): print('CWD:', os.getcwd()) print('FIXTURE:', path) with open(path, 'w') as stream: stream.write(context.text)
Write fixture to disk.
def output(self,delimiter = '\t', addnormalised=False): for type, count in self: if isinstance(type,tuple) or isinstance(type,list): if addnormalised: yield " ".join((u(x) for x in type)) + delimiter + str(count) + delimiter + str(count/self.total) else: yield " ".join((u(x) for x in type)) + delimiter + str(count) elif isstring(type): if addnormalised: yield type + delimiter + str(count) + delimiter + str(count/self.total) else: yield type + delimiter + str(count) else: if addnormalised: yield str(type) + delimiter + str(count) + delimiter + str(count/self.total) else: yield str(type) + delimiter + str(count)
Print a representation of the frequency list
def _read_color_images(self, num_images): color_images = self._ros_read_images(self._color_image_buffer, num_images, self.staleness_limit) for i in range(0, num_images): if self._flip_images: color_images[i] = np.flipud(color_images[i].astype(np.uint8)) color_images[i] = np.fliplr(color_images[i].astype(np.uint8)) color_images[i] = ColorImage(color_images[i], frame=self._frame) return color_images
Reads color images from the device
def change_password(): form_class = _security.change_password_form if request.is_json: form = form_class(MultiDict(request.get_json())) else: form = form_class() if form.validate_on_submit(): after_this_request(_commit) change_user_password(current_user._get_current_object(), form.new_password.data) if not request.is_json: do_flash(*get_message('PASSWORD_CHANGE')) return redirect(get_url(_security.post_change_view) or get_url(_security.post_login_view)) if request.is_json: form.user = current_user return _render_json(form) return _security.render_template( config_value('CHANGE_PASSWORD_TEMPLATE'), change_password_form=form, **_ctx('change_password') )
View function which handles a change password request.
def _delete_service(self, service_only=False): logging.debug('_delete_service()') return self.service.delete_service(self.service_name)
Delete a Cloud Foundry service and any associations.
def lower_context_field_existence(ir_blocks, query_metadata_table): def regular_visitor_fn(expression): if not isinstance(expression, ContextFieldExistence): return expression location_type = query_metadata_table.get_location_info(expression.location).type return BinaryComposition( u'!=', ContextField(expression.location, location_type), NullLiteral) def construct_result_visitor_fn(expression): if not isinstance(expression, ContextFieldExistence): return expression location_type = query_metadata_table.get_location_info(expression.location).type return BinaryComposition( u'!=', OutputContextVertex(expression.location, location_type), NullLiteral) new_ir_blocks = [] for block in ir_blocks: new_block = None if isinstance(block, ConstructResult): new_block = block.visit_and_update_expressions(construct_result_visitor_fn) else: new_block = block.visit_and_update_expressions(regular_visitor_fn) new_ir_blocks.append(new_block) return new_ir_blocks
Lower ContextFieldExistence expressions into lower-level expressions.
def generate(self): x, y, z = self.point1 return (x + self.size_x * random(), y + self.size_y * random(), z + self.size_z * random())
Return a random point inside the box
def _xy_locs(mask): y, x = mask.nonzero() return list(zip(x, y))
Mask should be a set of bools from comparison with a feature layer.
def _collapse_to_cwl_record(samples, want_attrs, input_files): input_keys = sorted(list(set().union(*[d["cwl_keys"] for d in samples])), key=lambda x: (-len(x), tuple(x))) out = {} for key in input_keys: if key in want_attrs: key_parts = key.split("__") vals = [] cur = [] for d in samples: vals.append(_to_cwl(tz.get_in(key_parts, d), input_files)) cur.append(_dissoc_in(d, key_parts) if len(key_parts) > 1 else d) samples = cur out[key] = vals return out
Convert nested samples from batches into a CWL record, based on input keys.
def _split_line(self, line_str): arr = line_str.rstrip().split("\t") if len(arr) != self.expected_fields: raise exceptions.InvalidRecordException( ( "The line contains an invalid number of fields. Was " "{} but expected {}\n{}".format(len(arr), 9 + len(self.samples.names), line_str) ) ) return arr
Split line and check number of columns
def create_unsigned_transaction(cls, *, nonce: int, gas_price: int, gas: int, to: Address, value: int, data: bytes) -> 'BaseUnsignedTransaction': raise NotImplementedError("Must be implemented by subclasses")
Create an unsigned transaction.
def add_robot(self, controller): controller.on_mode_change(self._on_robot_mode_change) self.robots.append(controller)
Add a robot controller
def state_create_is_valid( nameop ): assert '__state_create__' in nameop, "Not tagged with @state_create" assert nameop['__state_create__'], "BUG: tagged False by @state_create" assert '__preorder__' in nameop, "No preorder" assert '__table__' in nameop, "No table given" assert '__history_id_key__' in nameop, "No history ID key given" assert nameop['__history_id_key__'] in nameop, "No history ID given" assert '__always_set__' in nameop, "No always-set fields given" return True
Is a nameop a valid state-preorder operation?
def compile_with_symbol(self, func, theano_args=None, owner=None): if theano_args is None: theano_args = [] upc = UpdateCollector() theano_ret = func(*theano_args) if owner is None \ else func(owner, *theano_args) out = copy.copy(self.default_options) out['outputs'] = theano_ret out['updates'] = upc.extract_updates() return theano.function(theano_args, **out)
Compile the function with theano symbols
def StringIO(*args, **kwargs): raw = sync_io.StringIO(*args, **kwargs) return AsyncStringIOWrapper(raw)
StringIO constructor shim for the async wrapper.
def split_into_sentences(s): s = re.sub(r"\s+", " ", s) s = re.sub(r"[\\.\\?\\!]", "\n", s) return s.split("\n")
Split text into list of sentences.
def _compose_style(self): valid_styles = ( 'color', 'width', 'filter', 'strokeWidth', 'strokeColor', ) self.styling = '\n'.join( '{prop}: {style}'.format(prop=s, style=getattr(self, s)) for s in valid_styles if getattr(self, s) is not None )
Appends `prop` with `style` to layer styling
def purge_service_by_key(self, service_id, key): content = self._fetch("/service/%s/purge/%s" % (service_id, key), method="POST") return self._status(content)
Purge a particular service by a key.
def dst_addr(self): try: return socket.inet_ntop(self._af, self.raw[self._dst_addr].tobytes()) except (ValueError, socket.error): pass
The packet destination address.
def check_str(obj): if isinstance(obj, str): return obj if isinstance(obj, float): return str(int(obj)) else: return str(obj)
Returns a string for various input types
def common_bootsrap_payload(self): messages = get_flashed_messages(with_categories=True) locale = str(get_locale()) return { 'flash_messages': messages, 'conf': {k: conf.get(k) for k in FRONTEND_CONF_KEYS}, 'locale': locale, 'language_pack': get_language_pack(locale), 'feature_flags': get_feature_flags(), }
Common data always sent to the client
def init_library(database_dsn, accounts_password, limited_run = False): import os import signal signal.signal(signal.SIGINT, signal.SIG_IGN) os.environ['AMBRY_DB'] = database_dsn if accounts_password: os.environ['AMBRY_PASSWORD'] = accounts_password os.environ['AMBRY_LIMITED_RUN'] = '1' if limited_run else '0'
Child initializer, setup in Library.process_pool
def _from_deprecated_son(cls, id_dict, run): course_key = CourseLocator( id_dict['org'], id_dict['course'], run, id_dict['revision'], deprecated=True, ) return cls(course_key, id_dict['category'], id_dict['name'], deprecated=True)
Return the Location decoding this id_dict and run