code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _RemoveRegistryKeys(self, metadata_value_pairs): filtered_pairs = [] for metadata, stat_entry in metadata_value_pairs: if stat_entry.pathspec.pathtype != rdf_paths.PathSpec.PathType.REGISTRY: filtered_pairs.append((metadata, stat_entry)) return filtered_pairs
Filter out registry keys to operate on files.
def rgba(self, val): rgba = _user_to_rgba(val, expand=False) if self._rgba is None: self._rgba = rgba else: self._rgba[:, :rgba.shape[1]] = rgba
Set the color using an Nx4 array of RGBA floats
def main(reactor): control_ep = UNIXClientEndpoint(reactor, '/var/run/tor/control') tor = yield txtorcon.connect(reactor, control_ep) state = yield tor.create_state() print("Closing all circuits:") for circuit in list(state.circuits.values()): path = '->'.join(map(lambda r: r.id_hex, circuit...
Close all open streams and circuits in the Tor we connect to
def new_expiry(days=DEFAULT_PASTE_LIFETIME_DAYS): now = delorean.Delorean() return now + datetime.timedelta(days=days)
Return an expiration `days` in the future
def path_list(self, sep=os.pathsep): from pathlib import Path return [ Path(pathstr) for pathstr in self.split(sep) ]
Return list of Path objects.
def delete_keyring(service): keyring = _keyring_path(service) if not os.path.exists(keyring): log('Keyring does not exist at %s' % keyring, level=WARNING) return os.remove(keyring) log('Deleted ring at %s.' % keyring, level=INFO)
Delete an existing Ceph keyring.
def load_config(config_file_override=False): supernova_config = get_config_file(config_file_override) supernova_config_dir = get_config_directory(config_file_override) if not supernova_config and not supernova_config_dir: raise Exception("Couldn't find a valid configuration file to parse") nova_...
Pulls the supernova configuration file and reads it
def items(self): content_type = ContentType.objects.get_for_model(Entry) return comments.get_model().objects.filter( content_type=content_type, is_public=True).order_by( '-submit_date')[:self.limit]
Items are the discussions on the entries.
def check_predefined_conditions(): try: node_info = current_k8s_corev1_api_client.list_node() for node in node_info.items: for condition in node.status.conditions: if not condition.status: return False except ApiException as e: log.error('S...
Check k8s predefined conditions for the nodes.
def job(request): job_id = request.GET.get("job_id") recent_jobs = JobRecord.objects.order_by("-start_time")[0:100] recent_trials = TrialRecord.objects \ .filter(job_id=job_id) \ .order_by("-start_time") trial_records = [] for recent_trial in recent_trials: trial_records.appe...
View for a single job.
def commit_fw_db(self): fw_dict = self.get_fw_dict() self.update_fw_db(fw_dict.get('fw_id'), fw_dict)
Calls routine to update the FW DB.
def compute_near_isotropic_downsampling_scales(size, voxel_size, dimensions_to_downsample, max_scales=DEFAULT_MAX_DOWNSAMPLING_SCALES, ...
Compute a list of successive downsampling factors.
def render_title_tag(context, is_og=False): request = context['request'] content = '' if context.get('object'): try: content = context['object'].get_meta_title() except AttributeError: pass elif context.get('meta_tagger'): content = context['meta_tagger']....
Returns the title as string or a complete open graph meta tag.
def string(self): for term in self._terms: if isinstance(term, String): return str(term) return None
Return the first string term in the conjunction, or `None`.
def show_geonode_uploader(self): from safe.gui.tools.geonode_uploader import GeonodeUploaderDialog dialog = GeonodeUploaderDialog(self.iface.mainWindow()) dialog.show()
Show the Geonode uploader dialog.
def save_report_to_html(self): html = self.page().mainFrame().toHtml() if self.report_path is not None: html_to_file(html, self.report_path) else: msg = self.tr('report_path is not set') raise InvalidParameterError(msg)
Save report in the dock to html.
def add_package(package_name, package_path='templates', encoding='utf-8'): if not _has_jinja: raise RuntimeError(_except_text) _jload.add_loader(PackageLoader(package_name, package_path, encoding))
Adds the given package to the template search routine
def _start_vnc(self): self._display = self._get_free_display_port() if shutil.which("Xvfb") is None or shutil.which("x11vnc") is None: raise DockerError("Please install Xvfb and x11vnc before using the VNC support") self._xvfb_process = yield from asyncio.create_subprocess_exec("Xvfb...
Start a VNC server for this container
def solve_lp(clusters, quota, work_dir="work", Nmax=0, self_match=False, solver="SCIP", verbose=False): qb, qa = quota nodes, constraints_x, constraints_y = get_constraints( clusters, (qa, qb), Nmax=Nmax) if self_match: constraints_x = constraints_y = constraints_x | constraints...
Solve the formatted LP instance
def _make_bright_pixel_mask(intensity_mean, mask_factor=5.0): mask = np.zeros((intensity_mean.data.shape), bool) nebins = len(intensity_mean.data) sum_intensity = intensity_mean.data.sum(0) mean_intensity = sum_intensity.mean() for i in range(nebins): mask[i, 0:] = su...
Make of mask of all the brightest pixels
def ResolveHostnameToIP(host, port): ip_addrs = socket.getaddrinfo(host, port, socket.AF_UNSPEC, 0, socket.IPPROTO_TCP) result = ip_addrs[0][4][0] if compatibility.PY2: result = result.decode("ascii") return result
Resolves a hostname to an IP address.
def _time_show(self): if not self._time_visible: self._time_visible = True self._time_window = tk.Toplevel(self) self._time_window.attributes("-topmost", True) self._time_window.overrideredirect(True) self._time_label = ttk.Label(self._time_window) ...
Show the time marker window
def cancelAllPendingResults( self ): jobs = self.pendingResults() if len(jobs) > 0: self._abortJobs(jobs) self.notebook().cancelAllPendingResults()
Cancel all pending results.
def mk_set_headers(self, data, columns): columns = tuple(columns) lens = [] for key in columns: value_len = max(len(str(each.get(key, ''))) for each in data) lens.append(max(value_len, len(self._get_name(key)))) fmt = self.mk_fmt(*lens) return fmt
figure out sizes and create header fmt
def connectExec(connection, protocol, commandLine): deferred = connectSession(connection, protocol) @deferred.addCallback def requestSubsystem(session): return session.requestExec(commandLine) return deferred
Connect a Protocol to a ssh exec session
def secure_password(length=20, use_random=True): try: length = int(length) pw = '' while len(pw) < length: if HAS_RANDOM and use_random: while True: try: char = salt.utils.stringutils.to_str(get_random_bytes(1)) ...
Generate a secure password.
def cleanup(self): "Remove the directory containin the clone and virtual environment." log.info('Removing temp dir %s', self._tempdir.name) self._tempdir.cleanup()
Remove the directory containin the clone and virtual environment.
def ok(self, *args, cb=None): self.clear_widgets() if cb: cb() self.idx += 1 self.advance_dialog()
Clear dialog widgets, call ``cb`` if provided, and advance the dialog queue
def _serialize_date(value): if not isinstance(value, date): raise ValueError(u'The received object was not a date: ' u'{} {}'.format(type(value), value)) return value.isoformat()
Serialize a Date object to its proper ISO-8601 representation.
def publishToOther(self, roomId, name, data): tmpList = self.getRoom(roomId) userList = [x for x in tmpList if x is not self] self.publishToRoom(roomId, name, data, userList)
Publish to only other people than myself
def log_likelihood(self): ll = GP.log_likelihood(self) jacobian = self.warping_function.fgrad_y(self.Y_untransformed) return ll + np.log(jacobian).sum()
Notice we add the jacobian of the warping function here.
def wild_card_logs(): file_name = 'GLWC.TXT' z = get_zip_file(wild_card_url) data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"') data.columns = gamelog_columns return data
Pull Retrosheet Wild Card Game Logs
def new_file(self, path: str, checksum: str=None, to_archive: bool=False, tags: List[models.Tag]=None) -> models.File: new_file = self.File(path=path, checksum=checksum, to_archive=to_archive, tags=tags) return new_file
Create a new file.
def setup_environment(): try: from gi.repository import GLib user_data_folder = GLib.get_user_data_dir() except ImportError: user_data_folder = join(os.path.expanduser("~"), ".local", "share") rafcon_root_path = dirname(realpath(rafcon.__file__)) user_library_folder = join(user_d...
Ensures that the environmental variable RAFCON_LIB_PATH is existent
def _attach(cls, disk_id, vm_id, options=None): options = options or {} oper = cls.call('hosting.vm.disk_attach', vm_id, disk_id, options) return oper
Attach a disk to a vm.
def from_jsons(graph_json_str: str, check_version: bool = True) -> BELGraph: graph_json_dict = json.loads(graph_json_str) return from_json(graph_json_dict, check_version=check_version)
Read a BEL graph from a Node-Link JSON string.
def _assert_valid_key(self, name): if name not in self._measurements: raise NotAMeasurementError('Not a measurement', name, self._measurements)
Raises if name is not a valid measurement.
def std(self, bias=False, *args, **kwargs): nv.validate_window_func('std', args, kwargs) return _zsqrt(self.var(bias=bias, **kwargs))
Exponential weighted moving stddev.
def lookup_id_action(self, text, loc, var): exshared.setpos(loc, text) if DEBUG > 0: print("EXP_VAR:",var) if DEBUG == 2: self.symtab.display() if DEBUG > 2: return var_index = self.symtab.lookup_symbol(var.name, [SharedData.KINDS.GLOBAL_VAR, SharedData....
Code executed after recognising an identificator in expression
def upload_nginx_site_conf(site_name, template_name=None, context=None, enable=True): template_name = template_name or [u'nginx/%s.conf' % site_name, u'nginx/site.conf'] site_available = u'/etc/nginx/sites-available/%s' % site_name upload_template(template_name, site_available, context=context, use_sudo=Tru...
Upload Nginx site configuration from a template.
def _prepare_ogc_request_params(self): self.ogc_request.image_format = MimeType.TIFF_d32f if self.ogc_request.custom_url_params is None: self.ogc_request.custom_url_params = {} self.ogc_request.custom_url_params.update({ CustomUrlParam.SHOWLOGO: False, CustomU...
Method makes sure that correct parameters will be used for download of S-2 bands.
def to_text_format(self): return '\n'.join(itertools.chain( (self.fetch_date.strftime('%Y%m%d%H%M%S'), ), (rr.to_text() for rr in self.resource_records), (), ))
Format as detached DNS information as text.
def _POInitBuilder(env, **kw): import SCons.Action from SCons.Tool.GettextCommon import _init_po_files, _POFileBuilder action = SCons.Action.Action(_init_po_files, None) return _POFileBuilder(env, action=action, target_alias='$POCREATE_ALIAS')
Create builder object for `POInit` builder.
def run(files, temp_folder): "Check flake8 errors in the code base." try: import flake8 except ImportError: return NO_FLAKE_MSG try: from flake8.engine import get_style_guide except ImportError: from flake8.api.legacy import get_style_guide py_files = filter_pytho...
Check flake8 errors in the code base.
def delete(self, event): super(CeleryReceiver, self).delete(event) AsyncResult(event.id).revoke(terminate=True)
Abort running task if it exists.
def row_value(self, row): irow = int(row) i = self._get_key_index(irow) if i == -1: return 0.0 if i == len(self.keys) - 1: return self.keys[-1].value return TrackKey.interpolate(self.keys[i], self.keys[i + 1], row)
Get the tracks value at row
def text_assert_any(self, anchors, byte=False): found = False for anchor in anchors: if self.text_search(anchor, byte=byte): found = True break if not found: raise DataNotFound(u'Substrings not found: %s' % ',...
If no `anchors` were found then raise `DataNotFound` exception.
def _prepare_document(self): self._xml = ET.Element("Document") self._xml.set("xmlns", "urn:iso:std:iso:20022:tech:xsd:" + self.schema) self._xml.set("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance") ET.register_namespace("", ...
Build the main document node and set xml namespaces.
def node_validate(node_dict, node_num, cmd_name): req_lu = {"run": ["stopped", "Already Running"], "stop": ["running", "Already Stopped"], "connect": ["running", "Can't Connect, Node Not Running"], "details": [node_dict[node_num].state, ""]} tm = {True: ("Node {1}{2}{0}...
Validate that command can be performed on target node.
def emitFragment(fw, fragID, libID, shredded_seq, clr=None, qvchar='l', fasta=False): if fasta: s = SeqRecord(shredded_seq, id=fragID, description="") SeqIO.write([s], fw, "fasta") return seq = str(shredded_seq) slen = len(seq) qvs = qvchar * slen if clr is None: clr_...
Print out the shredded sequence.
def _add_request_parameters(func): async def decorated_func(*args, handle_ratelimit=None, max_tries=None, request_timeout=None, **kwargs): return await func(*args, handle_ratelimit=handle_ratelimit, max_tries=max_tries, request_timeout=request_timeout, **kwargs) ...
Adds the ratelimit and request timeout parameters to a function.
def xy_positions(self): if self._xy_positions is None or len(self._xy_positions) == 0: xy_pos = [] for dom_id, pos in self.dom_positions.items(): if self.domid2floor(dom_id) == 1: xy_pos.append(np.array([pos[0], pos[1]])) self._xy_positions...
XY positions of the DUs, given by the DOMs on floor 1.
def refund(self): from longclaw.utils import GATEWAY now = datetime.strftime(datetime.now(), "%b %d %Y %H:%M:%S") if GATEWAY.issue_refund(self.transaction_id, self.total): self.status = self.REFUNDED self.status_note = "Refunded on {}".format(now) else: ...
Issue a full refund for this order
def spawn_daemon(fork=None, pgrpfile=None, outfile='out.txt'): 'causes run to be executed in a newly spawned daemon process' global LAST_PGRP_PATH fork = fork or os.fork open(outfile, 'a').close() if pgrpfile and os.path.exists(pgrpfile): try: cur_pid = int(open(pgrpfile).read()....
causes run to be executed in a newly spawned daemon process
def match_length(self): length = 0 for match in self.get_matching_blocks(): a, b, size = match length += self._text_length(self.a[a:a+size]) return length
Find the total length of all words that match between the two sequences.
def rstjinja(app, docname, source): if app.builder.format != 'html': return src = source[0] rendered = app.builder.templates.render_string( src, app.config.html_context ) source[0] = rendered
Render our pages as a jinja template for fancy templating goodness.
def _createGsshaPyObjects(self, cell): gridCell = GridPipeCell(cellI=cell['i'], cellJ=cell['j'], numPipes=cell['numPipes']) gridCell.gridPipeFile = self for spipe in cell['spipes']: gridNode = GridPipeNode(linkNumber=spi...
Create GSSHAPY GridPipeCell and GridPipeNode Objects Method
def _update_tree_feature_weights(X, feature_names, clf, feature_weights): tree_value = clf.tree_.value if tree_value.shape[1] == 1: squeeze_axis = 1 else: assert tree_value.shape[2] == 1 squeeze_axis = 2 tree_value = np.squeeze(tree_value, axis=squeeze_axis) tree_feature = cl...
Update tree feature weights using decision path method.
def parent(self): if self._has_parent is None: _parent = self._ctx.backend.get_parent(self._ctx.dev) self._has_parent = _parent is not None if self._has_parent: self._parent = Device(_parent, self._ctx.backend) else: self._parent = ...
Return the parent device.
def connect(self, address, **kws): return yield_(Connect(self, address, timeout=self._timeout, **kws))
Connect to a remote socket at _address_.
def groups_get_integrations(self, room_id, **kwargs): return self.__call_api_get('groups.getIntegrations', roomId=room_id, kwargs=kwargs)
Retrieves the integrations which the group has
def classify(self, encoding, num=1): probDist = numpy.exp(encoding) / numpy.sum(numpy.exp(encoding)) sortIdx = numpy.argsort(probDist) return sortIdx[-num:].tolist()
Classify with basic one-hot local incoding
def clear(self): for ax in self.flat_grid: for im_h in ax.findobj(AxesImage): im_h.remove()
Clears all the axes to start fresh.
def resize(image, width=None, height=None, crop=False, namespace="resized"): return resize_lazy(image=image, width=width, height=height, crop=crop, namespace=namespace, as_url=True)
Returns the url of the resized image
def ShowNotifications(self, reset=True): shown_notifications = self.Schema.SHOWN_NOTIFICATIONS() pending = self.Get(self.Schema.PENDING_NOTIFICATIONS, []) for notification in pending: shown_notifications.Append(notification) notifications = self.Get(self.Schema.SHOWN_NOTIFICATIONS, []) for not...
A generator of current notifications.
def fetchText(cls, url, data, textSearch, optional): if textSearch: match = textSearch.search(data[0]) if match: text = match.group(1) out.debug(u'matched text %r with pattern %s' % (text, textSearch.pattern)) return unescape(text).strip() ...
Search text entry for given text pattern in a HTML page.
def parent_organisations(self): class ParentOrg: def __init__(self, sdo_id, org_id): self.sdo_id = sdo_id self.org_id = org_id with self._mutex: if not self._parent_orgs: for sdo in self._obj.get_organizations(): ...
The organisations this RTC belongs to.
def _check_error(response): if 'error' in response: raise InfluxDBError(response['error']) elif 'results' in response: for statement in response['results']: if 'error' in statement: msg = '{d[error]} (statement {d[statement_id]})' ...
Checks for JSON error messages and raises Python exception
def clean(self): if self.lookup == '?': return else: lookups = self.lookup.split(LOOKUP_SEP) opts = self.model_def.model_class()._meta valid = True while len(lookups): lookup = lookups.pop(0) try: ...
Make sure the lookup makes sense
def check_header_comment(filename): name = os.path.basename( filename ) sourcefile = open( filename, "rU" ) content = sourcefile.read() sourcefile.close() match = re.search(r'\$Id\$', content) if match == None: match = re.search(r'\$Id: ' + name + r'\s+[^$]+\$', content) if match...
Checks if the header-comment of the given file needs fixing.
def _required_attr(self, attr, key): assert isinstance(attr, dict) if key not in attr: raise AttributeError("Required attribute {} not found.".format(key)) return attr[key]
Wrapper for getting required attributes.
def _normalize_file_paths(self, *args): paths = [] for arg in args: if arg is None: continue elif self._is_valid_file(arg): paths.append(arg) elif isinstance(arg, list) and all(self._is_valid_file(_) for _ in arg): paths...
Returns all given configuration file paths as one list.
def page_not_found(request, template_name="errors/404.html"): context = { "STATIC_URL": settings.STATIC_URL, "request_path": request.path, } t = get_template(template_name) return HttpResponseNotFound(t.render(context, request))
Mimics Django's 404 handler but with a different template path.
def check_ipv4(ip_str): try: socket.inet_pton(socket.AF_INET, ip_str) except AttributeError: try: socket.inet_aton(ip_str) except socket.error: return False return ip_str.count('.') == 3 except socket.error: return False return True
Return True if is a valid IP v4
def node_to_object(self, node, object): "Map a single node to one object's attributes" attribute = self.to_lower(node.tag) attribute = "_yield" if attribute == "yield" else attribute try: valueString = node.text or "" value = float(valueString) except Valu...
Map a single node to one object's attributes
def watch(cams, path=None, delay=10): while True: for c in cams: c.snap(path) time.sleep(delay)
Get screenshots from all cams at defined intervall.
def hydrate_bundles(bundles_field, glob_match_error_behavior): path_globs_with_match_errors = [ pg.copy(glob_match_error_behavior=glob_match_error_behavior) for pg in bundles_field.path_globs_list ] snapshot_list = yield [Get(Snapshot, PathGlobs, pg) for pg in path_globs_with_match_errors] spec_path = b...
Given a BundlesField, request Snapshots for each of its filesets and create BundleAdaptors.
def _notify_fn(self): self._notifyrunning = True while self._notifyrunning: try: with IHCController._mutex: if self._newnotifyids: self.client.enable_runtime_notifications( self._newnotifyids) ...
The notify thread function.
def save_uca(self, rootpath, raw=False, as_int=False): self.save_array(self.uca, None, 'uca', rootpath, raw, as_int=as_int)
Saves the upstream contributing area to a file
def discovery_zookeeper(self): self.context.install_bundle("pelix.remote.discovery.zookeeper").start() with use_waiting_list(self.context) as ipopo: ipopo.add( rs.FACTORY_DISCOVERY_ZOOKEEPER, "pelix-discovery-zookeeper", { "...
Installs the ZooKeeper discovery bundles and instantiates components
def _category_slugs(self, category): key = self._category_key(category) slugs = self.r.smembers(key) return slugs
Returns a set of the metric slugs for the given category
def affected_files(self): added, modified, deleted = self._changes_cache return list(added.union(modified).union(deleted))
Get's a fast accessible file changes for given changeset
def subscribe(hub, callback_url, topic_url, lease_seconds, secret, endpoint_hook_data): for validate in hub.validators: error = validate(callback_url, topic_url, lease_seconds, secret, endpoint_hook_data) if error: send_denied(hub, callback_url, top...
5.2 Subscription Validation
def validate(self): if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format( type(self.fold_scope_location), self.fold_scope_location)) allowed_block_types = (GremlinFoldedFilter, Gre...
Validate that the GremlinFoldedContextField is correctly representable.
def save_params(step_num, model, trainer, ckpt_dir): param_path = os.path.join(ckpt_dir, '%07d.params'%step_num) trainer_path = os.path.join(ckpt_dir, '%07d.states'%step_num) logging.info('[step %d] Saving checkpoints to %s, %s.', step_num, param_path, trainer_path) model.save_parameter...
Save the model parameter, marked by step_num.
def cache(self, private=False, max_age=31536000, s_maxage=None, no_cache=False, no_store=False, must_revalidate=False, **overrides): parts = ('private' if private else 'public', 'max-age={0}'.format(max_age), 's-maxage={0}'.format(s_maxage) if s_maxage is not None else None, no_ca...
Convenience method for quickly adding cache header to route
def render_config(config: Config, indent: str = "") -> str: new_indent = indent + " " return "".join([ "{\n", f'{new_indent}"type": "{config.typ3}",\n' if config.typ3 else '', "".join(_render(item, new_indent) for item in config.items), indent, "}\n...
Pretty-print a config in sort-of-JSON+comments.
def _compute_one_step(self, t, fields, pars): fields, pars = self._hook(t, fields, pars) self.dt = (self.tmax - t if self.tmax and (t + self.dt >= self.tmax) else self.dt) before_compute = time.process_time() t, fields = self._scheme(t, fields, self....
Compute one step of the simulation, then update the timers.
def send(self, **kwargs): return_full_object = kwargs.get('return_full_object', False) _verbose = kwargs.get('_verbose', False) traversal = kwargs.get('traversal', None) timeout = kwargs.get('_timeout', 5) self.output['url'] = self.render_url() with VerboseContextManager(...
Send the request defined by the data stored in the object.
def _valid_date(self): date = self._parse_date(self.date) if not date: exit_after_echo(INVALID_DATE) try: date = datetime.strptime(date, '%Y%m%d') except ValueError: exit_after_echo(INVALID_DATE) offset = date - datetime.today() if offs...
Check and return a valid query date.
def find_by_any(self, identifier, how): if "i" in how: match = self.find_by_id(identifier) if match: return match if "l" in how: match = self.find_by_localpath(identifier) if match: return match if "c" in how: ...
how should be a string with any or all of the characters "ilc"
def _check_valid(key, val, valid): if val not in valid: raise ValueError('%s must be one of %s, not "%s"' % (key, valid, val))
Helper to check valid options
def chunker(f, n): f = iter(f) x = [] while 1: if len(x) < n: try: x.append(f.next()) except StopIteration: if len(x) > 0: yield tuple(x) break else: yield tuple(x) x = []
Utility function to split iterable `f` into `n` chunks
def _getEventsByWeek(self, request, year, month): return getAllEventsByWeek(request, year, month, home=self)
Return my child events for the given month grouped by week.
def draw_image(pixelmap, img): for item in pixelmap: color = item[2] pixelbox = (item[0][0], item[0][1], item[1][0], item[1][1]) draw = ImageDraw.Draw(img) draw.rectangle(pixelbox, fill=color)
Draws the image based on the given pixelmap.
def save_csv(p, sheet): 'Save as single CSV file, handling column names as first line.' with p.open_text(mode='w') as fp: cw = csv.writer(fp, **csvoptions()) colnames = [col.name for col in sheet.visibleCols] if ''.join(colnames): cw.writerow(colnames) for r in Progre...
Save as single CSV file, handling column names as first line.
def build_and_run(self, images): from harpoon.ship.builder import Builder Builder().make_image(self, images) try: Runner().run_container(self, images) except DockerAPIError as error: raise BadImage("Failed to start the container", error=error)
Make this image and run it
def total(self): feats = imap(lambda name: self[name], self._counters()) return sum(chain(*map(lambda mset: map(abs, mset.values()), feats)))
Returns sum of all counts in all features that are multisets.
def isEmpty(self): return self.x0 == self.x1 or self.y0 == self.y1
Check if rectangle area is empty.
def addNode(self, node): self.msg(4, "addNode", node) try: self.graph.restore_node(node.graphident) except GraphError: self.graph.add_node(node.graphident, node)
Add a node to the graph referenced by the root