code
stringlengths
51
2.34k
docstring
stringlengths
11
171
async def send(self, *args, **kwargs): if not self.frozen: raise RuntimeError("Cannot send non-frozen signal.") for receiver in self: await receiver(*args, **kwargs)
Sends data to all registered receivers.
def main(directories): msg = 'Checking module "{}" from directory "{}" for coding errors.' api_checker = ApiChecker() resource_checker = ResourceChecker() errors = [] modules = [] for loader, mname, _ in pkgutil.walk_packages(directories): sys.path.append(os.path.abspath(loader.path)) ...
Perform all checks on the API's contained in `directory`.
def _from_string(cls, serialized): course_key = CourseLocator._from_string(serialized) parsed_parts = cls.parse_url(serialized) block_id = parsed_parts.get('block_id', None) if block_id is None: raise InvalidKeyError(cls, serialized) return cls(course_key, parsed_part...
Requests CourseLocator to deserialize its part and then adds the local deserialization of block
def load_data_to_net(net, inst_net): net.dat['nodes'] = inst_net['nodes'] net.dat['mat'] = inst_net['mat'] data_formats.mat_to_numpy_arr(net)
load data into nodes and mat, also convert mat to numpy array
def close(self, code: int = None, reason: str = None) -> None: if not self.server_terminated: if not self.stream.closed(): if code is None and reason is not None: code = 1000 if code is None: close_data = b"" els...
Closes the WebSocket connection.
def remove_checksum(path): path = '{}.md5sum'.format(path) if os.path.exists(path): os.remove(path)
Remove the checksum of an image from cache if exists
def grow(self): "Add another worker to the pool." t = self.worker_factory(self) t.start() self._size += 1
Add another worker to the pool.
async def set_config(self, on=None, long=None, lat=None, sunriseoffset=None, sunsetoffset=None): data = { key: value for key, value in { 'on': on, 'long': long, 'lat': lat, 'sunriseoffset': sunriseoffset, ...
Change config of a Daylight sensor.
def _earth_orientation(date): ttt = date.change_scale('TT').julian_century s_prime = - 0.000047 * ttt return date.eop.x / 3600., date.eop.y / 3600., s_prime / 3600
Earth orientation parameters in degrees
def expanded_transform(self): segments = self._expand_transform(self.transform) if segments: segments[0]['datatype'] = self.valuetype_class for s in segments: s['column'] = self else: segments = [self.make_xform_seg(datatype=self.valuetype_clas...
Expands the transform string into segments
def callback(self, provider): provider = self.get_provider(provider) try: return provider.authorized_handler(self.login)(provider=provider) except OAuthException as ex: logging.error("Data: %s", ex.data) raise
Handles 3rd party callback and processes it's data
async def home_z(self, mount: top_types.Mount = None): if not mount: axes = [Axis.Z, Axis.A] else: axes = [Axis.by_mount(mount)] await self.home(axes)
Home the two z-axes
def open( self, **kwargs ): if self.tag in self.parent.twotags or self.tag in self.parent.onetags: self.render( self.tag, False, None, kwargs ) elif self.mode == 'strict_html' and self.tag in self.parent.deptags: raise DeprecationError( self.tag )
Append an opening tag.
def render_label(content, label_for=None, label_class=None, label_title=""): attrs = {} if label_for: attrs["for"] = label_for if label_class: attrs["class"] = label_class if label_title: attrs["title"] = label_title return render_tag("label", attrs=attrs, content=content)
Render a label with content
def draw_commands(self, surf): past_abilities = {act.ability for act in self._past_actions if act.ability} for y, cmd in enumerate(sorted(self._abilities( lambda c: c.name != "Smart"), key=lambda c: c.name), start=2): if self._queued_action and cmd == self._queued_action: color = colors.gr...
Draw the list of available commands.
def _update_clipper(self): if self.clip_children and self._clipper is None: self._clipper = Clipper() elif not self.clip_children: self._clipper = None if self._clipper is None: return self._clipper.rect = self.inner_rect self._clipper.transfor...
Called whenever the clipper for this widget may need to be updated.
def render_url(self): url = self.url.format(**self.replacements) if self.params: return url + '?' + urlencode(self.params) return url
Render the final URL based on available variables
def add_bgp_error_metadata(code, sub_code, def_desc='unknown'): if _EXCEPTION_REGISTRY.get((code, sub_code)) is not None: raise ValueError('BGPSException with code %d and sub-code %d ' 'already defined.' % (code, sub_code)) def decorator(subclass): if issubclass(subclass...
Decorator for all exceptions that want to set exception class meta-data.
def _combined_wildcards_iter(flatterm: Iterator[TermAtom]) -> Iterator[TermAtom]: last_wildcard = None for term in flatterm: if isinstance(term, Wildcard) and not isinstance(term, SymbolWildcard): if last_wildcard is not None: new_min_count = last_wildcard...
Combine consecutive wildcards in a flatterm into a single one.
def ConsultarTipoRetencion(self, sep="||"): "Consulta de tipos de Retenciones." ret = self.client.tipoRetencionConsultar( auth={ 'token': self.Token, 'sign': self.Sign, 'cuit': self.Cuit, }, )['ti...
Consulta de tipos de Retenciones.
def add_package(self, check_name, package): self._package_set.add(package) package_data = self._packages[package.name] self._checks_deps[check_name].append(package) if package.version: versions = package_data['versions'] versions[package.version].append(check_name...
Add a Package to the catalog for the given check
def _increment(self, n=1): if self._cur_position >= self.num_tokens-1: self._cur_positon = self.num_tokens - 1 self._finished = True else: self._cur_position += n
Move forward n tokens in the stream.
def search_browser(self, text): self.impl.get(self.base_url) search_div = self.impl.find_element_by_id("search") search_term = search_div.find_element_by_id("term") search_term.send_keys(text) search_div.find_element_by_id("submit").click() e = self.impl.find_element_by_css_selector("table.list ...
do a slow search via the website and return the first match
def pcap_name(self, devname): try: pcap_name = self.data[devname].pcap_name except KeyError: raise ValueError("Unknown network interface %r" % devname) else: return pcap_name
Return pcap device name for given Windows device name.
def op_symbol(op_node): ops = { ast.UAdd: '+', ast.USub: '-', ast.Add: '+', ast.Sub: '-', ast.Mult: '*', ast.Div: '/', ast.Mod: '%', ast.Eq: '=', ast.NotEq: '!=', ast.Lt: '<', ast.LtE: '<=', ast.Gt: '>', ast.GtE:...
Get the GLSL symbol for a Python operator.
def clear_caches(self): for element_name in dir(self.components): element = getattr(self.components, element_name) if hasattr(element, 'cache_val'): delattr(element, 'cache_val')
Clears the Caches for all model elements
def revnet_base(): hparams = common_hparams.basic_params1() hparams.add_hparam('num_channels', [64, 128, 256, 416]) hparams.add_hparam('num_layers_per_block', [1, 1, 10, 1]) hparams.add_hparam('bottleneck', True) hparams.add_hparam('first_batch_norm', [False, True, True, True]) hparams.add_hparam('init_stri...
Default hparams for Revnet.
def to_unix_ts(start_time): if isinstance(start_time, datetime): if is_timezone_aware(start_time): start_time = start_time.astimezone(pytz.utc) else: log.warning( "Non timezone-aware datetime object passed to IncrementalEndpoint. " "The Zendesk...
Given a datetime object, returns its value as a unix timestamp
def setOutputObject(self, newOutput=output.CalcpkgOutput(True, True)): self.output = newOutput
Set an object where all output from calcpkg will be redirected to for this repository
def print_region_quota(access_token, sub_id, region): print(region + ':') quota = azurerm.get_compute_usage(access_token, sub_id, region) if SUMMARY is False: print(json.dumps(quota, sort_keys=False, indent=2, separators=(',', ': '))) try: for resource in quota['value']: if r...
Print the Compute usage quota for a specific region
def get(self, namespace, key): cfg = self.dbconfig.get(key, namespace, as_object=True) return self.make_response({ 'message': None, 'config': cfg })
Get a specific configuration item
def onchain_exchange(self, withdraw_crypto, withdraw_address, value, unit='satoshi'): self.onchain_rate = get_onchain_exchange_rates( self.crypto, withdraw_crypto, best=True, verbose=self.verbose ) exchange_rate = float(self.onchain_rate['rate']) result = self.onchain_rate['s...
This method is like `add_output` but it sends to another
def spec(self): from ambry_sources.sources import SourceSpec d = self.dict d['url'] = self.url return SourceSpec(**d)
Return a SourceSpec to describe this source
def bethe_fermi(energy, quasipart, shift, hopping, beta): return fermi_dist(quasipart * energy - shift, beta) \ * bethe_lattice(energy, hopping)
product of the bethe lattice dos, fermi distribution
def _get_gcloud_sdk_credentials(): from google.auth import _cloud_sdk credentials_filename = ( _cloud_sdk.get_application_default_credentials_path()) if not os.path.isfile(credentials_filename): return None, None credentials, project_id = _load_credentials_from_file( credentials_...
Gets the credentials and project ID from the Cloud SDK.
def _postprocess(valuestr): intpattern = re.compile(r'^\-?\d+$') floatpattern = re.compile(r'^\-?\d+\.\d+(E[+-]?\d\d+)?$') datedtpattern = '%Y-%m-%d' datedttimepattern = '%Y-%m-%dT%H:%M:%SZ' timedtpattern = '%H:%M:%S.%f' timepattern = re.compile(r'^\d{2}:\d{2}:\d{2}(\.\d{6})?') if valuestr.s...
Takes value as str, returns str, int, float, date, datetime, or time
def _get_metadata(self): self.metadata = self.fetch_header() self.variables = {g.name for g in self.metadata.grids}
Get header information and store as metadata for the endpoint.
def read(self, _file): with open(_file) as fh: data = fh.read() if self.verbose: sys.stdout.write("read %d bytes from %s\n" % (fh.tell(), _file)) return data
return local file contents as endpoint.
def start_logger(self): level = self.real_level(self.level) logging.basicConfig(level=level) self.set_logger(self.name, self.level) config.dictConfig(self.config) self.logger = logging.getLogger(self.name)
Enables the root logger and configures extra loggers.
def _approximate_unkown_bond_lengths(self): dataset = self.lengths[BOND_SINGLE] for n1 in periodic.iter_numbers(): for n2 in periodic.iter_numbers(): if n1 <= n2: pair = frozenset([n1, n2]) atom1 = periodic[n1] atom2...
Completes the bond length database with approximations based on VDW radii
def status(self): return self.repository._repo.status(self._ctx.p1().node(), self._ctx.node())
Returns modified, added, removed, deleted files for current changeset
def escape(self): self.obj = self.obj.replace("&", self._html_escape_table["&"]) for i, k in enumerate(self._html_escape_table): v = self._html_escape_table[k] if k is not "&": self.obj = self.obj.replace(k, v) return self._wrap(self.obj)
Escape a string for HTML interpolation.
def write_long(self, n, pack=Struct('>I').pack): if 0 <= n <= 0xFFFFFFFF: self._output_buffer.extend(pack(n)) else: raise ValueError('Long %d out of range 0..0xFFFFFFFF', n) return self
Write an integer as an unsigned 32-bit value.
def _get_default_annual_spacing(nyears): if nyears < 11: (min_spacing, maj_spacing) = (1, 1) elif nyears < 20: (min_spacing, maj_spacing) = (1, 2) elif nyears < 50: (min_spacing, maj_spacing) = (1, 5) elif nyears < 100: (min_spacing, maj_spacing) = (5, 10) elif nyears...
Returns a default spacing between consecutive ticks for annual data.
def split_by_idx(self, valid_idx:Collection[int])->'ItemLists': "Split the data according to the indexes in `valid_idx`." train_idx = np.setdiff1d(arange_of(self.items), valid_idx) return self.split_by_idxs(train_idx, valid_idx)
Split the data according to the indexes in `valid_idx`.
def as_dict(self): attrs = vars(self) return {key: attrs[key] for key in attrs if not key.startswith('_')}
Package up the public attributes as a dict.
def on_unicode_checkbox(self, w=None, state=False): logging.debug("unicode State is %s", state) self.controller.smooth_graph_mode = state if state: self.hline = urwid.AttrWrap( urwid.SolidFill(u'\N{LOWER ONE QUARTER BLOCK}'), 'line') else: self.hli...
Enable smooth edges if utf-8 is supported
def _get_num_interval(config, num_pre, num_post): post = int(num_post) if num_post else 0 pre = int(num_pre) if num_pre is not None else _get_last_snapshot(config)['id'] return pre, post
Returns numerical interval based on optionals num_pre, num_post values
def create( self, request, parent_lookup_seedteam=None, parent_lookup_seedteam__organization=None): team = self.check_team_permissions( request, parent_lookup_seedteam, parent_lookup_seedteam__organization) serializer = self.get_serializer(data=request.dat...
Add a permission to a team.
def _poll_trigger(self): try: os.write(self._poll_send, '\0'.encode('utf-8')) except Exception as e: log = self._getparam('log', self._discard) log.debug("Ignoring self-pipe write error -- %s", e)
Trigger activity for the caller by writting a NUL to the self-pipe.
def as_dict(self): dicts = [x.as_dict for x in self.children] return {'{0} {1}'.format(self.name, self.value): dicts}
Return all child objects in nested dict.
def from_name(cls, name): result = cls.list() vlans = {} for vlan in result: vlans[vlan['name']] = vlan['id'] return vlans.get(name)
Retrieve vlan id associated to a name.
def change_column_length(table: Table, column: Column, length: int, engine: Engine) -> None: if column.type.length < length: print("Changing length of {} from {} to {}".format(column, column.type.length, length)) column.type.length = length column_name = column.name column_type = col...
Change the column length in the supplied table
def read_event(suppress=False): queue = _queue.Queue(maxsize=1) hooked = hook(queue.put, suppress=suppress) while True: event = queue.get() unhook(hooked) return event
Blocks until a keyboard event happens, then returns that event.
def fetch_deposits(self, limit: int) -> List[Deposit]: return self._transactions(self._deposits, 'deposits', limit)
Fetch latest deposits, must provide a limit.
def createNew(cls, store, pathSegments): if isinstance(pathSegments, basestring): raise ValueError( 'Received %r instead of a sequence' % (pathSegments,)) if store.dbdir is None: self = cls(store=store, storepath=None) else: storepath = store.n...
Create a new SubStore, allocating a new file space for it.
def rdf_source(self, aformat="turtle"): if aformat and aformat not in self.SUPPORTED_FORMATS: return "Sorry. Allowed formats are %s" % str(self.SUPPORTED_FORMATS) if aformat == "dot": return self.__serializedDot() else: return self.rdflib_graph.serialize(format=aformat)
Serialize graph using the format required
def from_description(cls, description, attrs): hash_key = None range_key = None index_type = description["Projection"]["ProjectionType"] includes = description["Projection"].get("NonKeyAttributes") for data in description["KeySchema"]: name = data["AttributeName"] ...
Create an object from a dynamo3 response
def format_hsl(hsl_color): hue, saturation, lightness = hsl_color return 'hsl({}, {:.2%}, {:.2%})'.format(hue, saturation, lightness)
Format hsl color as css color string.
def toggle_value(request, name): obj = service.system.namespace.get(name, None) if not obj or service.read_only: raise Http404 new_status = obj.status = not obj.status if service.redirect_from_setters: return HttpResponseRedirect(reverse('set_ready', args=(name, new_status))) else: ...
For manual shortcut links to perform toggle actions
def by_current_session(cls): session = Session.current_session() if session is None: return None return cls.where_id(session.user_id)
Returns current user session
def disconnect(self): for name, wire in self.wires(): self.cut(name, disconnect=True)
Cut all wires and disconnect all connections established on this link
def _walk(self, root_path='', root_id=''): title = '%s._walk' % self.__class__.__name__ if root_id: pass elif root_path: root_id, root_parent = self._get_id(root_path) for file_id, name, mimetype in self._list_directory(root_id): file_path = os.path.jo...
a generator method which walks the file structure of the dropbox collection
def _complete_exit(self, cmd, args, text): if args: return return [ x for x in { 'root', 'all', } \ if x.startswith(text) ]
Find candidates for the 'exit' command.
def numpyview(arr, datatype, shape, raw=False): if raw: return n.frombuffer(arr, dtype=n.dtype(datatype)).view(n.dtype(datatype)).reshape(shape) else: return n.frombuffer(arr.get_obj(), dtype=n.dtype(datatype)).view(n.dtype(datatype)).reshape(shape)
Takes mp shared array and returns numpy array with given shape.
def main(ctx, opts, version): if version: print_version() elif ctx.invoked_subcommand is None: click.echo(ctx.get_help())
Handle entrypoint to CLI.
def __create_output_dir(base_dir): root, tail = os.path.split(base_dir) dir = None if tail: if base_dir.endswith('/'): dir = base_dir else: dir = root else: if base_dir.endswith('/'): dir = base_dir if dir and not os.path.isdir(dir): ...
Ensure that the output directory base_dir exists.
def check_spinning_children(self): child_jobs = self.task_master.get_child_work_units(self.worker_id) now = time.time() + self.stop_jobs_early for child, wul in child_jobs.iteritems(): if not isinstance(wul, (list, tuple)): wul = [wul] if not wul: ...
Stop children that are working on overdue jobs.
def vdp_vlan_change_cb(self, port_uuid, lvid, vdp_vlan, fail_reason): LOG.info("Vlan change CB lvid %(lvid)s VDP %(vdp)s", {'lvid': lvid, 'vdp': vdp_vlan}) self.update_vm_result(port_uuid, constants.RESULT_SUCCESS, lvid=lvid, vdp_vlan=vdp_vlan, ...
Callback function for updating the VDP VLAN in DB.
def write_batch_json(self, content): timestamp = str(time.time()).replace('.', '') batch_json_file = os.path.join( self.tcex.args.tc_temp_path, 'batch-{}.json'.format(timestamp) ) with open(batch_json_file, 'w') as fh: json.dump(content, fh, indent=2)
Write batch json data to a file.
def rd_files(self, study_fn, pop_fn): study, pop = self._read_geneset(study_fn, pop_fn) print("Study: {0} vs. Population {1}\n".format(len(study), len(pop))) return study, pop
Read files and return study and population.
def journals(self): try: target = self._item_path json_data = self._redmine.get(target % str(self.id), parms={'include': 'journals'}) data = self._redmine.unwrap_json(None, json_data) journals = [Journal(redmine=self._redm...
Retrieve journals attribute for this very Issue
def _split_tidy(self, string, maxsplit=None): if maxsplit is None: return string.rstrip("\n").split("\t") else: return string.rstrip("\n").split("\t", maxsplit)
Rstrips string for \n and splits string for \t
def allReadGroups(self): for dataset in self.getDatasets(): for readGroupSet in dataset.getReadGroupSets(): for readGroup in readGroupSet.getReadGroups(): yield readGroup
Return an iterator over all read groups in the data repo
def load(self, value): if self.property_type is None: return value elif not isinstance(self.property_type, BaseType): raise TypeError('property_type must be schematics BaseType') else: native_value = self.property_type.to_native(value) self.propert...
Load a value, converting it to the proper type if validation_type exists.
def current_index(self): i = self._parent.proxy_model.mapToSource(self._parent.currentIndex()) return i
Get the currently selected index in the parent table view.
def console(cls, id): oper = cls.call('paas.update', cls.usable_id(id), {'console': 1}) cls.echo('Activation of the console on your PaaS instance') cls.display_progress(oper) console_url = Paas.info(cls.usable_id(id))['console'] access = 'ssh %s' % console_url cls.execute...
Open a console to a PaaS instance.
def union(*args): if not args: return [] base = args[0] for other in args[1:]: base.extend(other) return list(OrderedDict.fromkeys(base))
Return the union of lists, ordering by first seen in any list
def main(host, port, timeout, itimeout, qsize, backlog, maxtry, bsize, verbose, logfile=None, logcfgfile=None, cfgfile=None): dict_cfg = {} if cfgfile: dict_cfg = app_config.get_config_by_file(cfgfile) def get_param(key, param, default): return param or dict_cfg.get(key, None) or default ...
Simple python implementation of a socks5 proxy server.
def _get_magnitude_term(self, C, mag): if mag >= self.CONSTS["Mh"]: return C["e1"] + C["b3"] * (mag - self.CONSTS["Mh"]) else: return C["e1"] + (C["b1"] * (mag - self.CONSTS["Mh"])) +\ (C["b2"] * (mag - self.CONSTS["Mh"]) ** 2.)
Returns the magnitude scaling term - equation 3
def freeze_js(html): matches = js_src_pattern.finditer(html) if not matches: return html for match in reversed(tuple(matches)): file_name = match.group(1) file_path = os.path.join(js_files_path, file_name) with open(file_path, "r", encoding="utf-8") as f: file_con...
Freeze all JS assets to the rendered html itself.
def send(term, stream): payload = erlang.term_to_binary(term) header = struct.pack('!I', len(payload)) stream.write(header) stream.write(payload) stream.flush()
Write an Erlang term to an output stream.
def extract_full_summary_from_signature(operation): lines = inspect.getdoc(operation) regex = r'\s*(:param)\s+(.+?)\s*:(.*)' summary = '' if lines: match = re.search(regex, lines) summary = lines[:match.regs[0][0]] if match else lines summary = summary.replace('\n', ' ').replace('\r'...
Extract the summary from the docstring of the command.
def getParameters(self): parameters = lock_and_call( lambda: self._impl.getParameters(), self._lock ) return EntityMap(parameters, Parameter)
Get all the parameters declared.
def _color_attr(self, ground, attr): attr = colors[ground][attr] attrs = self.cursor_attributes if ground == "foreground": self.cursor_attributes = (attrs[0], attr, attrs[2]) elif ground == "background": self.cursor_attributes = (attrs[0], attrs[1], attr)
Given a color attribute, set the current cursor appropriately.
def window_specialization(typename): return Extension( 'zipline.lib._{name}window'.format(name=typename), ['zipline/lib/_{name}window.pyx'.format(name=typename)], depends=['zipline/lib/_windowtemplate.pxi'], )
Make an extension for an AdjustedArrayWindow specialization.
def send_mfg_inspector_data(inspector_proto, credentials, destination_url): envelope = guzzle_pb2.TestRunEnvelope() envelope.payload = zlib.compress(inspector_proto.SerializeToString()) envelope.payload_type = guzzle_pb2.COMPRESSED_MFG_EVENT envelope_data = envelope.SerializeToString() for _ in xrange(5): ...
Upload MfgEvent to steam_engine.
async def fetch_status(self, request): response = await self._query_validator( Message.CLIENT_STATUS_GET_REQUEST, client_status_pb2.ClientStatusGetResponse, client_status_pb2.ClientStatusGetRequest()) return self._wrap_response( request, data={...
Fetches information pertaining to the valiator's status.
def _build_models_query(self, query): registered_models_ct = self.build_models_list() if registered_models_ct: restrictions = [xapian.Query('%s%s' % (TERM_PREFIXES[DJANGO_CT], model_ct)) for model_ct in registered_models_ct] limit_query = xapian.Query(...
Builds a query from `query` that filters to documents only from registered models.
def calculate_response(self, challenge, password): to_hash = (challenge + "-" + password).encode("UTF-16LE") hashed = hashlib.md5(to_hash).hexdigest() return "{0}-{1}".format(challenge, hashed)
Calculate response for the challenge-response authentication
def offset(self, offset): clone = self._clone() if isinstance(offset, int): clone._offset = offset return clone
Fetch results after `offset` value
def on_backward_end(self, **kwargs): "Clip the gradient before the optimizer step." if self.clip: nn.utils.clip_grad_norm_(self.learn.model.parameters(), self.clip)
Clip the gradient before the optimizer step.
def evalsha(self, digest, keys=[], args=[]): return self.execute(b'EVALSHA', digest, len(keys), *(keys + args))
Execute a Lua script server side by its SHA1 digest.
def _put_bucket_policy(self): if self.s3props['bucket_policy']: policy_str = json.dumps(self.s3props['bucket_policy']) _response = self.s3client.put_bucket_policy(Bucket=self.bucket, Policy=policy_str) else: _response = self.s3client.delete_bucket_policy(Bucket=self.b...
Attach a bucket policy to app bucket.
def _raise_error_if_column_exists(dataset, column_name = 'dataset', dataset_variable_name = 'dataset', column_name_error_message_name = 'column_name'): err_msg = 'The SFrame {0} must contain the column {1}.'.format( ...
Check if a column exists in an SFrame with error message.
def run(command, **kw): if sys.platform == 'win32': _cwd = kw.get('cwd', None) if _cwd is not None: kw['cwd'] = _cwd.decode() try: return check_output(command, **kw).splitlines() except CalledProcessError: return () except FileNotFoundError: print("The...
Run `command`, catch any exception, and return lines of output.
def clear_db(): cursor = '0' while cursor != 0: cursor, keys = DB.scan(cursor, match='*', count=5000) if keys: DB.delete(*keys)
Clear the entire db.
def update(self, device_json=None, info_json=None, settings_json=None, avatar_json=None): if device_json: UTILS.update(self._device_json, device_json) if avatar_json: UTILS.update(self._avatar_json, avatar_json) if info_json: UTILS.update(self._...
Update the internal device json data.
def add(self, value): path = value if isinstance(value, Path) else Path(str(value)) if path and path != path.parent: destination = self for part in path.parts: destination = destination.setdefault(part, DirectoryTree())
Create a safe directory from a value.
def report(self, req_handler): "Send a response corresponding to this error to the client" if self.exc: req_handler.send_exception(self.code, self.exc, self.headers) return text = (self.text or BaseHTTPRequestHandler.responses[self.code][1] ...
Send a response corresponding to this error to the client