code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _update_access_key_pair(self, access_key_id, key, val): current_access_key = self.get_access_key(access_key_id) payload_dict = KeenApi._build_access_key_dict(current_access_key) payload_dict[key] = val return self.update_access_key_full(access_key_id, **payload_dict)
Helper for updating access keys in a DRY fashion.
def fields(self) -> GraphQLInputFieldMap: try: fields = resolve_thunk(self._fields) except GraphQLError: raise except Exception as error: raise TypeError(f"{self.name} fields cannot be resolved: {error}") if not isinstance(fields, dict) or not all( isinstance(key, str) for key in fields ): raise TypeError( f"{self.name} fields must be a dict with field names as keys" " or a function which returns such an object." ) if not all( isinstance(value, GraphQLInputField) or is_input_type(value) for value in fields.values() ): raise TypeError( f"{self.name} fields must be" " GraphQLInputField or input type objects." ) return { name: value if isinstance(value, GraphQLInputField) else GraphQLInputField(value) for name, value in fields.items() }
Get provided fields, wrap them as GraphQLInputField if needed.
def _on_scan(_loop, adapter, _adapter_id, info, expiration_time): info['validity_period'] = expiration_time adapter.notify_event_nowait(info.get('connection_string'), 'device_seen', info)
Callback when a new device is seen.
def cleanup_on_delete(self, sender, document, **kwargs): if not self.follow or sender is not self.owner_document: return slug = getattr(document, self.db_field) namespace = self.owner_document.__name__ SlugFollow.objects(namespace=namespace, new_slug=slug).delete()
Clean up slug redirections on object deletion
def _translate_timeperiod(self, timeperiod): if self.time_grouping == 1: return timeperiod year, month, day, hour = time_helper.tokenize_timeperiod(timeperiod) if self.time_qualifier == QUALIFIER_HOURLY: stem = self._do_stem_grouping(timeperiod, int(hour)) result = '{0}{1}{2}{3:02d}'.format(year, month, day, stem) elif self.time_qualifier == QUALIFIER_DAILY: stem = self._do_stem_grouping(timeperiod, int(day)) result = '{0}{1}{2:02d}{3}'.format(year, month, stem, hour) else: stem = self._do_stem_grouping(timeperiod, int(month)) result = '{0}{1:02d}{2}{3}'.format(year, stem, day, hour) return result
method translates given timeperiod to the grouped timeperiod
def _init_date_range(self, start_date=None, end_date=None): self.end_date = end_date self.start_date = start_date if self.end_date is None: today = now_utc().date() end_date = self.event.end_dt.date() self.end_date = end_date if end_date < today else today if self.start_date is None: self.start_date = self.end_date - timedelta(days=ReportBase.default_report_interval)
Set date range defaults if no dates are passed
def execute(self, method, args, ref): response = {'result': None, 'error': None, 'ref': ref} fun = self.methods.get(method) if not fun: response['error'] = 'Method `{}` not found'.format(method) else: try: response['result'] = fun(*args) except Exception as exception: logging.error(exception, exc_info=1) response['error'] = str(exception) return response
Execute the method with args
def cublasZtrsm(handle, side, uplo, transa, diag, m, n, alpha, A, lda, B, ldb): status = _libcublas.cublasZtrsm_v2(handle, _CUBLAS_SIDE_MODE[side], _CUBLAS_FILL_MODE[uplo], _CUBLAS_OP[trans], _CUBLAS_DIAG[diag], m, n, ctypes.byref(cuda.cuDoubleComplex(alpha.real, alpha.imag)), int(A), lda, int(B), ldb) cublasCheckStatus(status)
Solve complex triangular system with multiple right-hand sides.
def drawCurve(self, p1, p2, p3): kappa = 0.55228474983 p1 = Point(p1) p2 = Point(p2) p3 = Point(p3) k1 = p1 + (p2 - p1) * kappa k2 = p3 + (p2 - p3) * kappa return self.drawBezier(p1, k1, k2, p3)
Draw a curve between points using one control point.
def init_report(self, reporter=None): self.options.report = (reporter or self.options.reporter)(self.options) return self.options.report
Initialize the report instance.
def url_join(base, *args): scheme, netloc, path, query, fragment = urlsplit(base) path = path if len(path) else "/" path = posixpath.join(path, *[('%s' % x) for x in args]) return urlunsplit([scheme, netloc, path, query, fragment])
Helper function to join an arbitrary number of url segments together.
def emailComment(comment, obj, request): if not obj.author.frog_prefs.get().json()['emailComments']: return if obj.author == request.user: return html = render_to_string('frog/comment_email.html', { 'user': comment.user, 'comment': comment.comment, 'object': obj, 'action_type': 'commented on', 'image': isinstance(obj, Image), 'SITE_URL': FROG_SITE_URL, }) subject = '{}: Comment from {}'.format(getSiteConfig()['name'], comment.user_name) fromemail = comment.user_email to = obj.author.email text_content = 'This is an important message.' html_content = html send_mail(subject, text_content, fromemail, [to], html_message=html_content)
Send an email to the author about a new comment
def excel_key(index): X = lambda n: ~n and X((n // 26)-1) + chr(65 + (n % 26)) or '' return X(int(index))
create a key for index by converting index into a base-26 number, using A-Z as the characters.
def OnTextFont(self, event): fontchoice_combobox = event.GetEventObject() idx = event.GetInt() try: font_string = fontchoice_combobox.GetString(idx) except AttributeError: font_string = event.GetString() post_command_event(self, self.FontMsg, font=font_string)
Text font choice event handler
def _GetMetric(self, metric_name): if metric_name in self._counter_metrics: return self._counter_metrics[metric_name] elif metric_name in self._event_metrics: return self._event_metrics[metric_name] elif metric_name in self._gauge_metrics: return self._gauge_metrics[metric_name] else: raise ValueError("Metric %s is not registered." % metric_name)
Fetches the metric object corresponding to the given name.
def SaveData(self, raw_data): if self.filename is None: raise IOError("Unknown filename") logging.info("Writing back configuration to file %s", self.filename) try: os.makedirs(os.path.dirname(self.filename)) except (IOError, OSError): pass try: mode = os.O_WRONLY | os.O_CREAT | os.O_TRUNC fd = os.open(self.filename, mode, 0o600) with os.fdopen(fd, "wb") as config_file: self.SaveDataToFD(raw_data, config_file) except OSError as e: logging.warning("Unable to write config file %s: %s.", self.filename, e)
Store the raw data as our configuration.
def focus_last_reply(self): mid = self.get_selected_mid() newpos = self._tree.last_child_position(mid) if newpos is not None: newpos = self._sanitize_position((newpos,)) self.body.set_focus(newpos)
move focus to last reply to currently focussed message
def check_dependencies(): print 'Checking dependencies...' if not HAS_VIRTUALENV: print 'Virtual environment not found.' if HAS_EASY_INSTALL: print 'Installing virtualenv via easy_install...', run_command(['easy_install', 'virtualenv'], die_message='easy_install failed to install virtualenv' '\ndevelopment requires virtualenv, please' ' install it using your favorite tool') if not run_command(['which', 'virtualenv']): die('ERROR: virtualenv not found in path.\n\ndevelopment ' ' requires virtualenv, please install it using your' ' favorite package management tool and ensure' ' virtualenv is in your path') print 'virtualenv installation done.' else: die('easy_install not found.\n\nInstall easy_install' ' (python-setuptools in ubuntu) or virtualenv by hand,' ' then rerun.') print 'dependency check done.'
Make sure virtualenv is in the path.
def get(cls, filter=None, **kwargs): document = cls(cls.find_one(filter, **kwargs)) return document if document.document else None
Returns a Document if any document is filtered, returns None otherwise
def update_hparams_from_hparams(target_hparams, source_hparams, prefix): for (param_name, param_value) in six.iteritems(source_hparams.values()): if param_name.startswith(prefix): target_hparams.set_hparam(param_name[len(prefix):], param_value)
Copy a subset of hparams to target_hparams.
def _initialize_trunk_interfaces_to_none(self, switch_ip, replay=True): try: switch_ifs = self._mdriver._get_switch_interfaces( switch_ip, cfg_only=(False if replay else True)) if not switch_ifs: LOG.debug("Skipping switch %s which has no configured " "interfaces", switch_ip) return self._driver.initialize_all_switch_interfaces( switch_ifs, switch_ip) except Exception: with excutils.save_and_reraise_exception(): LOG.warning("Unable to initialize interfaces to " "switch %(switch_ip)s", {'switch_ip': switch_ip}) self._mdriver.register_switch_as_inactive(switch_ip, 'replay init_interface') if self._mdriver.is_replay_enabled(): return
Initialize all nexus interfaces to trunk allowed none.
def release_subnet(self, cidr, direc): if direc == 'in': self.service_in_ip.release_subnet(cidr) else: self.service_out_ip.release_subnet(cidr)
Routine to release a subnet from the DB.
def _check_asset_node_def(node_def): if node_def.op != "Const": raise TypeError("Asset node must be of type constant.") if tf.as_dtype(node_def.attr["dtype"].type) != tf.string: raise TypeError("Asset node must be of dtype string.") if len(node_def.attr["value"].tensor.string_val) != 1: raise TypeError("Asset node must be a scalar.")
Raises TypeError if `node_def` does not match the expectations.
def _readData(self, id3, data): for reader in self._framespec: if len(data) or reader.handle_nodata: try: value, data = reader.read(id3, self, data) except SpecError as e: raise ID3JunkFrameError(e) else: raise ID3JunkFrameError("no data left") self._setattr(reader.name, value) for reader in self._optionalspec: if len(data) or reader.handle_nodata: try: value, data = reader.read(id3, self, data) except SpecError as e: raise ID3JunkFrameError(e) else: break self._setattr(reader.name, value) return data
Raises ID3JunkFrameError; Returns leftover data
def ensure_directory(directory): directory = os.path.expanduser(directory) try: os.makedirs(directory) except OSError as e: if e.errno != errno.EEXIST: raise e
Create the directories along the provided directory path that do not exist.
def cli(env): manager = PlacementManager(env.client) routers = manager.get_routers() env.fout(get_router_table(routers)) rules = manager.get_all_rules() env.fout(get_rule_table(rules))
List options for creating a placement group.
def check_dependee_exists(self, depender, dependee, dependee_id): shutit_global.shutit_global_object.yield_to_draw() if dependee is None: return 'module: \n\n' + dependee_id + '\n\nnot found in paths: ' + str(self.host['shutit_module_path']) + ' but needed for ' + depender.module_id + '\nCheck your --shutit_module_path setting and ensure that all modules configured to be built are in that path setting, eg "--shutit_module_path /path/to/other/module/:."\n\nAlso check that the module is configured to be built with the correct module id in that module\'s configs/build.cnf file.\n\nSee also help.' return ''
Checks whether a depended-on module is available.
def _drop_ignored(gold, pred, ignore_in_gold, ignore_in_pred): keepers = np.ones_like(gold).astype(bool) for x in ignore_in_gold: keepers *= np.where(gold != x, 1, 0).astype(bool) for x in ignore_in_pred: keepers *= np.where(pred != x, 1, 0).astype(bool) gold = gold[keepers] pred = pred[keepers] return gold, pred
Remove from gold and pred all items with labels designated to ignore.
def parse_compounds(self): if 'compounds' in self._model: for compound in parse_compound_list( self._context, self._model['compounds']): yield compound
Yield CompoundEntries for defined compounds
def configured_class(cls): base = cls.configurable_base() if base.__dict__.get('_Configurable__impl_class') is None: base.__impl_class = cls.configurable_default() return base.__impl_class
Returns the currently configured class.
def format_item(self, item): "Construct result dictionary for the match item." result = { 'id': self.get_item_id(item), 'value': self.get_item_value(item), 'label': self.get_item_label(item), } for key in settings.SELECTABLE_ESCAPED_KEYS: if key in result: result[key] = conditional_escape(result[key]) return result
Construct result dictionary for the match item.
def hex_to_rgb(self, h): rgb = (self.hex_to_red(h), self.hex_to_green(h), self.hex_to_blue(h)) return rgb
Converts a valid hex color string to an RGB array.
def authentications_spec(self): return container_spec(authentication_objs.Authentication , dictof(string_spec(), set_options( reading = optional_spec(authentication_spec()) , writing = optional_spec(authentication_spec()) ) ) )
Spec for a group of authentication options
def cmd_ppp(self, args): usage = "ppp <command|start|stop>" if len(args) == 0: print(usage) return if args[0] == "command": if len(args) == 1: print("ppp.command=%s" % " ".join(self.command)) else: self.command = args[1:] elif args[0] == "start": self.start_ppp_link() elif args[0] == "stop": self.stop_ppp_link() elif args[0] == "status": self.console.writeln("%u packets %u bytes" % (self.packet_count, self.byte_count))
set ppp parameters and start link
def normalize_role(role, config): if role.startswith(config["scm_repo_prefix"]): role_name = role.replace(config["scm_repo_prefix"], "") else: if "." in role: galaxy_prefix = "{0}.".format(config["scm_user"]) role_name = role.replace(galaxy_prefix, "") elif "-" in role: role_name = role.replace("-", "_") else: role_name = role return role_name
Normalize a role name.
def require_isis_version(major, minor=0, patch=0): def decorator(fn): @wraps(fn) def wrapper(*args, **kwargs): check_isis_version(major, minor, patch) return fn(*args, **kwargs) return wrapper return decorator
Decorator that ensures a function is called with a minimum isis version.
def cancel_pending_tasks(self): for task in self._pending_tasks: task.cancel() if not self._loop.is_running(): try: self._loop.run_until_complete(task) except asyncio.CancelledError: pass except Exception: _LOGGER.error("Unhandled exception from async task", exc_info=True)
Cancel all pending tasks.
def update_domain_smarthost(self, domainid, serverid, data): return self.api_call( ENDPOINTS['domainsmarthosts']['update'], dict(domainid=domainid, serverid=serverid), body=data)
Update a domain smarthost
def unpack_zipfile(filename): with open(filename, "rb") as fzip: z = zipfile.ZipFile(fzip) for name in z.namelist(): print((" extracting {}".format(name))) ensure_dirs(name) z.extract(name)
Unpack a zipfile, using the names in the zip.
def uint32_to_uint8(cls, img): return np.flipud(img.view(dtype=np.uint8).reshape(img.shape + (4,)))
Cast uint32 RGB image to 4 uint8 channels.
def to_dict(self): data = self.extract_fields() for key, attr in self.attributes.iteritems(): if key in self.ignore: continue value = getattr(self.context, attr, None) if value is None: value = getattr(self, attr, None) if callable(value): value = value() data[key] = api.to_json_value(self.context, key, value) return data
extract the data of the content and return it as a dictionary
def fromimporterror(cls, bundle, importerid, rsid, exception, endpoint): return RemoteServiceAdminEvent( RemoteServiceAdminEvent.IMPORT_ERROR, bundle, importerid, rsid, None, None, exception, endpoint, )
Creates a RemoteServiceAdminEvent object from an import error
def _add_assert(self, **kwargs): screenshot = kwargs.get('screenshot') is_success = kwargs.get('success') screenshot = (not is_success) if screenshot is None else screenshot kwargs['screenshot'] = self._take_screenshot(screenshot=screenshot, name_prefix='assert') action = kwargs.pop('action', 'assert') self.add_step(action, **kwargs) if not is_success: message = kwargs.get('message') frame, filename, line_number, function_name, lines, index = inspect.stack()[2] print('Assert [%s: %d] WARN: %s' % (filename, line_number, message)) if not kwargs.get('safe', False): raise AssertionError(message)
if screenshot is None, only failed case will take screenshot
def read(self, filehandle): return self.__import(json.load(filehandle, **self.kwargs))
Read JSON from `filehandle`.
def report_role(self, role): self.yaml_files = [] fields = { "state": "skipped", "total_files": self.gather_files(), "total_lines": self.gather_lines(), "total_facts": self.gather_facts(), "total_defaults": self.gather_defaults(), "facts": self.facts, "defaults": self.defaults, "meta": self.gather_meta(), "readme": self.gather_readme(), "dependencies": self.dependencies, "total_dependencies": len(self.dependencies) } return fields
Return the fields gathered.
def drain(self, p): self.logging.info('Started.') fd = os.open(p, os.O_RDWR | os.O_NONBLOCK) gevent_os.make_nonblocking(fd) while self.loop(): try: lines = gevent_os.nb_read(fd, 4096).splitlines() if len(lines) == 0: sleep(0.5) else: self.consume(lines) except OSError: pass
Reads the named pipe.
def put_cache_results(self, key, func_akw, set_cache_cb): args, kwargs = func_akw func_results = self.func(*args, **kwargs) if set_cache_cb(func_results): self[key] = func_results return func_results
Put function results into cache.
def system(self): if self._base == 2: return "NIST" elif self._base == 10: return "SI" else: raise ValueError("Instances mathematical base is an unsupported value: %s" % ( str(self._base)))
The system of units used to measure an instance
def publish(func): @wraps(func) def wrapper(self, *args, **kwargs): payload = func(self, *args, **kwargs) payload.pop('self', None) self._publish(func.__name__, payload) return None wrapper.is_publish = True return wrapper
publish the return value of this function as a message from this endpoint
def tag_audio(filename, tracklisting): if not(tag_audio_file(filename + '.m4a', tracklisting) or tag_audio_file(filename + '.mp3', tracklisting)): print("Cannot find or access any relevant M4A or MP3 audio file.") print("Trying to save a text file instead.") write_text(filename, tracklisting) return False return True
Return True if audio tagged successfully; handle tagging audio.
def _gather_active_forms(self): for stmt in self.statements: if isinstance(stmt, ActiveForm): base_agent = self.agent_set.get_create_base_agent(stmt.agent) agent_to_add = stmt.agent if stmt.agent.activity: new_agent = fast_deepcopy(stmt.agent) new_agent.activity = None agent_to_add = new_agent base_agent.add_activity_form(agent_to_add, stmt.is_active)
Collect all the active forms of each Agent in the Statements.
def Uptime(): uptime = '' try: uptime = check_output(['uptime'], close_fds=True).decode('utf-8')[1:] except Exception as e: logger.error('Could not get current uptime ' + str(e)) return uptime
Get the current uptime information
def _expand_path(path): path = os.path.expandvars(path) path = os.path.expanduser(path) return path
Expand both environment variables and user home in the given path.
def close(self): if self.parent != (): self.parent.flush() self.parent.close() if self.hdf5: self.hdf5.flush() self.hdf5.close() self.hdf5 = ()
Close the underlying hdf5 file
def delete(self): if not self._ddoc_id: raise CloudantArgumentError(125) if not self._name: raise CloudantArgumentError(126) ddoc_id = self._ddoc_id if ddoc_id.startswith('_design/'): ddoc_id = ddoc_id[8:] url = '/'.join((self.index_url, ddoc_id, self._type, self._name)) resp = self._r_session.delete(url) resp.raise_for_status()
Removes the current index from the remote database.
def build_reference_wcs(inputs, sciname='sci'): wcslist = [] for img in inputs: nsci = countExtn(img) for num in range(nsci): extname = (sciname, num + 1) if sciname == 'sci': extwcs = wcsutil.HSTWCS(img, ext=extname) else: extwcs = read_hlet_wcs(img, ext=extname) wcslist.append(extwcs) outwcs = utils.output_wcs(wcslist) return outwcs
Create the reference WCS based on all the inputs for a field
def connect(self): "Create connection to server" family, stype, proto, cname, sockaddr = self.best_connection_params( self.host, self.port) self.sock = socket.socket(family, stype) self.sock.settimeout(self.timeout) self.sock.connect(sockaddr)
Create connection to server
def current_branch(self): branch_name = git(self.gitdir, self.gitwd, "symbolic-ref", "HEAD") return branch_name.replace('refs/heads/', '').strip()
Return the current branch name
def _get_item_from_search_response(self, response, type_): sections = sorted(response['sections'], key=lambda sect: sect['type'] == type_, reverse=True) for section in sections: hits = [hit for hit in section['hits'] if hit['type'] == type_] if hits: return hits[0]['result']
Returns either a Song or Artist result from search_genius_web
def _format_type(cls): if cls.__module__ == _BUILTIN_MODULE: return cls.__name__ else: return '%s.%s' % (cls.__module__, cls.__name__)
Format a type name for printing.
def from_path(filename): filename = to_bytes(filename) if NULL_BYTE in filename: raise ValueError('null byte in path') return ProguardView._from_ptr(rustcall( _lib.lsm_proguard_mapping_from_path, filename + b'\x00'))
Creates a sourcemap view from a file path.
def _fft_convolve_gpu(data_g, h_g, res_g = None, plan = None, inplace = False, kernel_is_fft = False): assert_bufs_type(np.complex64,data_g,h_g) if data_g.shape != h_g.shape: raise ValueError("data and kernel must have same size! %s vs %s "%(str(data_g.shape),str(h_g.shape))) if plan is None: plan = fft_plan(data_g.shape) if inplace: res_g = data_g else: if res_g is None: res_g = OCLArray.empty(data_g.shape,data_g.dtype) res_g.copy_buffer(data_g) if not kernel_is_fft: kern_g = OCLArray.empty(h_g.shape,h_g.dtype) kern_g.copy_buffer(h_g) fft(kern_g,inplace=True, plan = plan) else: kern_g = h_g fft(res_g,inplace=True, plan = plan) _complex_multiply_kernel(res_g,kern_g) fft(res_g,inplace = True, inverse = True, plan = plan) return res_g
fft convolve for gpu buffer
def getkey(stype, site_id=None, key=None): 'Returns the cache key depending on its type.' base = '{0}.feedjack'.format(settings.CACHE_MIDDLEWARE_KEY_PREFIX) if stype == T_HOST: return '{0}.hostcache'.format(base) elif stype == T_ITEM: return '{0}.{1}.item.{2}'.format(base, site_id, str2md5(key)) elif stype == T_META: return '{0}.{1}.meta'.format(base, site_id) elif stype == T_INTERVAL: return '{0}.interval.{1}'.format(base, str2md5(key))
Returns the cache key depending on its type.
def _rewrite_q(self, q): if isinstance(q, tuple) and len(q) == 2: return rewrite_lookup_key(self.model, q[0]), q[1] if isinstance(q, Node): q.children = list(map(self._rewrite_q, q.children)) return q
Rewrite field names inside Q call.
def GET_save_timegrid(self) -> None: state.timegrids[self._id] = copy.deepcopy(hydpy.pub.timegrids.sim)
Save the current simulation period.
def _set_init_vars_and_dims(self, data_vars, coords, compat): both_data_and_coords = [k for k in data_vars if k in coords] if both_data_and_coords: raise ValueError('variables %r are found in both data_vars and ' 'coords' % both_data_and_coords) if isinstance(coords, Dataset): coords = coords.variables variables, coord_names, dims = merge_data_and_coords( data_vars, coords, compat=compat) self._variables = variables self._coord_names = coord_names self._dims = dims
Set the initial value of Dataset variables and dimensions
def getPatches(self) : if not self.mustValidate : return self.getStore() res = {} res.update(self.patchStore) for k, v in self.subStores.items() : res[k] = v.getPatches() return res
get patches as a dictionary
def _keep_assembled_chrom(bam_file, genome, config): fai = "%s.fai" % genome chrom = [] with open(fai) as inh: for line in inh: c = line.split("\t")[0] if c.find("_") < 0: chrom.append(c) chroms = " ".join(chrom) out_file = utils.append_stem(bam_file, '_chrom') samtools = config_utils.get_program("samtools", config) if not utils.file_exists(out_file): with file_transaction(out_file) as tx_out: cmd = "{samtools} view -b {bam_file} {chroms} > {tx_out}" do.run(cmd.format(**locals()), "Remove contigs from %s" % bam_file) bam.index(out_file, config) return out_file
Remove contigs from the BAM file
def _preprocess(self, data, train): if train: inputs, labels = data self.data_mean = inputs.mean(axis=0) self.data_std = inputs.std(axis=0) self.labels_mean = labels.mean(axis=0) self.labels_std = labels.std(axis=0) return ((inputs-self.data_mean)/self.data_std, (labels-self.labels_mean)/self.labels_std) else: return (data-self.data_mean)/self.data_std
Zero-mean, unit-variance normalization by default
def to_dict(self): viewconf = json.loads(json.dumps(self.viewconf)) for track in self.tracks: if track.position is None: raise ValueError( "Track has no position: {}".format(track.viewconf["type"]) ) viewconf["tracks"][track.position] += [track.to_dict()] return viewconf
Convert the existing track to a JSON representation.
def decode_pc11_message(raw_string): data = {} spot = raw_string.split("^") data[const.FREQUENCY] = float(spot[1]) data[const.DX] = spot[2] data[const.TIME] = datetime.fromtimestamp(mktime(strptime(spot[3]+" "+spot[4][:-1], "%d-%b-%Y %H%M"))) data[const.COMMENT] = spot[5] data[const.SPOTTER] = spot[6] data["node"] = spot[7] data["raw_spot"] = raw_string return data
Decode PC11 message, which usually contains DX Spots
def cast_bytes(s, encoding='utf8', errors='strict'): if isinstance(s, bytes): return s elif isinstance(s, str): return s.encode(encoding, errors) else: raise TypeError("Expected unicode or bytes, got %r" % s)
cast str or bytes to bytes
def brightness_to_hex(self, level): level_int = int(level) new_int = int((level_int * 255)/100) new_level = format(new_int, '02X') self.logger.debug("brightness_to_hex: %s to %s", level, str(new_level)) return str(new_level)
Convert numeric brightness percentage into hex for insteon
def add_rule_to_model(model, rule, annotations=None): try: model.add_component(rule) if annotations: model.annotations += annotations except ComponentDuplicateNameError: msg = "Rule %s already in model! Skipping." % rule.name logger.debug(msg)
Add a Rule to a PySB model and handle duplicate component errors.
def insert(self, index, value): self.__field.validate_element(value) return list.insert(self, index, value)
Validate item insertion to list.
def error_info(): worker = global_worker worker.check_connected() return (global_state.error_messages(driver_id=worker.task_driver_id) + global_state.error_messages(driver_id=DriverID.nil()))
Return information about failed tasks.
def defaultFile(self): filename = self.curframe.f_code.co_filename if filename == '<string>' and self.mainpyfile: filename = self.mainpyfile return filename
Produce a reasonable default.
def _createtoken(self, type_, value, flags=None): pos = None assert len(self._positions) >= 2, (type_, value) p2 = self._positions.pop() p1 = self._positions.pop() pos = [p1, p2] return token(type_, value, pos, flags)
create a token with position information
def index_exists(self): headers = {'Content-Type': 'application/json', 'DB-Method': 'GET'} url = '/v2/exchange/db/{}/{}/_search'.format(self.domain, self.data_type) r = self.tcex.session.post(url, headers=headers) if not r.ok: self.tcex.log.warning('The provided index was not found ({}).'.format(r.text)) return False return True
Check to see if index exists.
def coerce(self, value): if self.type == Style: return value elif self.type == list: return self.type( map(self.subtype, map(lambda x: x.strip(), value.split(','))) ) elif self.type == dict: rv = {} for pair in value.split(','): key, val = pair.split(':') key = key.strip() val = val.strip() try: rv[key] = self.subtype(val) except Exception: rv[key] = val return rv return self.type(value)
Cast a string into this key type
def _load_dataset(dsid, ds_info, file_handlers, dim='y'): slice_list = [] failure = True for fh in file_handlers: try: projectable = fh.get_dataset(dsid, ds_info) if projectable is not None: slice_list.append(projectable) failure = False except KeyError: logger.warning("Failed to load {} from {}".format(dsid, fh), exc_info=True) if failure: raise KeyError( "Could not load {} from any provided files".format(dsid)) if dim not in slice_list[0].dims: return slice_list[0] res = xr.concat(slice_list, dim=dim) combined_info = file_handlers[0].combine_info( [p.attrs for p in slice_list]) res.attrs = combined_info return res
Load only a piece of the dataset.
def process_task(self): pdu = RegisterForeignDevice(self.bbmdTimeToLive) pdu.pduDestination = self.bbmdAddress self.request(pdu)
Called when the registration request should be sent to the BBMD.
def _parse_value(self): indent = 0 while self._cur_token['type'] is TT.ws: indent = self._skip_whitespace() self._skip_newlines() if self._cur_token['type'] is TT.id: return self._parse_key(indent) elif self._cur_token['type'] is TT.hyphen: self._increment() if self._cur_token['type'] is TT.hyphen: self._increment() return [] else: return self._parse_object_list() else: return self._parse_literal_list(indent)
Parse the value of a key-value pair.
def _axes(self): self.view._force_vertical = True super(HorizontalGraph, self)._axes() self.view._force_vertical = False
Set the _force_vertical flag when rendering axes
def check(self): for name, valItem, dtype in self.values: val = valItem.text() if dtype: try: val = dtype(val) except: msgBox = QtWidgets.QMessageBox() msgBox.setText( 'attribute %s has not the right dtype(%s)' % (name, str(dtype))) msgBox.exec_() self.args[name] = val self.accept()
check whether all attributes are setted and have the right dtype
def write_stats (self): self.writeln(u'<br/><i>%s</i><br/>' % _("Statistics")) if self.stats.number > 0: self.writeln(_( "Content types: %(image)d image, %(text)d text, %(video)d video, " "%(audio)d audio, %(application)d application, %(mail)d mail" " and %(other)d other.") % self.stats.link_types) self.writeln(u"<br/>") self.writeln(_("URL lengths: min=%(min)d, max=%(max)d, avg=%(avg)d.") % dict(min=self.stats.min_url_length, max=self.stats.max_url_length, avg=self.stats.avg_url_length)) else: self.writeln(_("No statistics available since no URLs were checked.")) self.writeln(u"<br/>")
Write check statistic infos.
def _parse(yr, mo, day): yr = '20'+yr yr = int(yr) mo = int(mo) day = int(day) return pds.datetime(yr, mo, day)
Basic parser to deal with date format of the Kp file.
def content(self): toolbars = OrderedDict() for id, toolbar in DebugToolbar._store.items(): content = {} for panel in toolbar.panels: panel_id = None nav_title = '' nav_subtitle = '' try: panel_id = panel.panel_id nav_title = panel.nav_title nav_subtitle = panel.nav_subtitle() if isinstance( panel.nav_subtitle, Callable) else panel.nav_subtitle except Exception: logger.debug('Error parsing panel info:', exc_info=True) if panel_id is not None: content.update({ panel_id: { 'panel_id': panel_id, 'nav_title': nav_title, 'nav_subtitle': nav_subtitle, } }) toolbars[id] = { 'toolbar': toolbar, 'content': content } return get_template().render(Context({ 'toolbars': OrderedDict(reversed(list(toolbars.items()))), 'trunc_length': CONFIG.get('RH_POST_TRUNC_LENGTH', 0) }))
Content of the panel when it's displayed in full screen.
def scan_results(self): bsses = self._wifi_ctrl.scan_results(self._raw_obj) if self._logger.isEnabledFor(logging.INFO): for bss in bsses: self._logger.info("Find bss:") self._logger.info("\tbssid: %s", bss.bssid) self._logger.info("\tssid: %s", bss.ssid) self._logger.info("\tfreq: %d", bss.freq) self._logger.info("\tauth: %s", bss.auth) self._logger.info("\takm: %s", bss.akm) self._logger.info("\tsignal: %d", bss.signal) return bsses
Return the scan result.
def genome(self): f = self.adapter.fileobj d = {} for ref, length in zip(f.references, f.lengths): d[ref] = (0, length) return d
"genome" dictionary ready for pybedtools, based on the BAM header.
def price_humanized(value, inst, currency=None): return (natural_number_with_currency(value, ugettext('CZK') if currency is None else currency) if value is not None else ugettext('(None)'))
Return a humanized price
async def on_raw_notice(self, message): nick, metadata = self._parse_user(message.source) target, msg = message.params if is_ctcp(msg): self._sync_user(nick, metadata) type, response = parse_ctcp(msg) attr = 'on_ctcp_' + pydle.protocol.identifierify(type) + '_reply' if hasattr(self, attr): await getattr(self, attr)(user, target, response) await self.on_ctcp_reply(user, target, type, response) else: await super().on_raw_notice(message)
Modify NOTICE to redirect CTCP messages.
def clonerepo(barerepo, userrepo): git.clone(barerepo, userrepo) ag = activegit.ActiveGit(userrepo)
Clone a bare base repo to a user
def update_from_pypi(self): package = pypi.Package(self.package_name) self.licence = package.licence() if self.is_parseable: self.latest_version = package.latest_version() self.next_version = package.next_version(self.current_version) self.diff_status = pypi.version_diff(self.current_version, self.latest_version) self.python_support = package.python_support() self.django_support = package.django_support() self.supports_py3 = package.supports_py3() self.checked_pypi_at = tz_now() self.save() return self
Call get_latest_version and then save the object.
def _shouldOwn(self, param): if not (self.uid == param.parent and self.hasParam(param.name)): raise ValueError("Param %r does not belong to %r." % (param, self))
Validates that the input param belongs to this Params instance.
def nr_genes(self): if self['genes']: nr_genes = len(self['genes']) else: nr_genes = len(self['gene_symbols']) return nr_genes
Return the number of genes
def _reindex_multi(self, axes, copy, fill_value): new_index, row_indexer = self.index.reindex(axes['index']) new_columns, col_indexer = self.columns.reindex(axes['columns']) if row_indexer is not None and col_indexer is not None: indexer = row_indexer, col_indexer new_values = algorithms.take_2d_multi(self.values, indexer, fill_value=fill_value) return self._constructor(new_values, index=new_index, columns=new_columns) else: return self._reindex_with_indexers({0: [new_index, row_indexer], 1: [new_columns, col_indexer]}, copy=copy, fill_value=fill_value)
We are guaranteed non-Nones in the axes.
def create_description(self, complib=None, complevel=None, fletcher32=False, expectedrows=None): if expectedrows is None: expectedrows = max(self.nrows_expected, 10000) d = dict(name='table', expectedrows=expectedrows) d['description'] = {a.cname: a.typ for a in self.axes} if complib: if complevel is None: complevel = self._complevel or 9 filters = _tables().Filters( complevel=complevel, complib=complib, fletcher32=fletcher32 or self._fletcher32) d['filters'] = filters elif self._filters is not None: d['filters'] = self._filters return d
create the description of the table from the axes & values
def reload(self): app_profile_pb = self.instance_admin_client.get_app_profile(self.name) self._update_from_pb(app_profile_pb)
Reload the metadata for this cluster
def connect(self): self.urlobj = getImageObject(self.url, self.referrer, self.session) content_type = unquote(self.urlobj.headers.get('content-type', 'application/octet-stream')) content_type = content_type.split(';', 1)[0] if '/' in content_type: maintype, subtype = content_type.split('/', 1) else: maintype = content_type subtype = None if maintype != 'image' and content_type not in ('application/octet-stream', 'application/x-shockwave-flash'): raise IOError('content type %r is not an image at %s' % (content_type, self.url)) if maintype == 'image': self.ext = '.' + subtype.replace('jpeg', 'jpg') self.contentLength = int(self.urlobj.headers.get('content-length', 0)) out.debug(u'... filename = %r, ext = %r, contentLength = %d' % (self.filename, self.ext, self.contentLength))
Connect to host and get meta information.