code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def update(self, collection_name, instance): assert isinstance(instance, SiteStatistics) if instance.db_id: query = {'_id': ObjectId(instance.db_id)} else: query = {DOMAIN_NAME: instance.domain_name, TIMEPERIOD: instance.timeperiod} self.ds.update(collection_name, query, instance) return instance.db_id
method finds Site Statistics record and update it DB representation
def _get_object(self, name): if self.use_pyrax: try: return self.container.get_object(name) except pyrax.exceptions.NoSuchObject: return None elif swiftclient: try: return self.container.get_object(name) except swiftclient.exceptions.ClientException: return None else: return self.container.get_object(name)
Helper function to retrieve the requested Object.
def run(self): (task_id, tasks) = self.server.get_task() self.task_store.from_dict(tasks) for (index, task) in self.task_store: result = self.compute(index, task) self.results.append(result) self.server.task_done((task_id, self.results))
This function needs to be called to start the computation.
def build_extension(self, ext): if sys.platform == "win32": _clr_compiler = "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\csc.exe" else: _clr_compiler = "mcs" cmd = [ _clr_compiler, "/target:library", "clrmagic.cs" ] check_call(" ".join(cmd), shell=True)
build clrmagic.dll using csc or mcs
def _debug_info(self): self._msg('DEBUG') self._msg2('WorkDir: {0}'.format(self._curdir)) self._msg2('Cookies: {0}'.format(self._session.cookies)) self._msg2('Headers: {0}'.format(self._session.headers)) self._msg2('Configs: {0}'.format(self._config)) self._msg2('Customs: {0}'.format(self._custom)) self._msg2('Account: {0}'.format(self._account))
Show a list of recently variables info.
def delete_fabric_fw(self, tenant_id, fw_dict, is_fw_virt, result): try: with self.mutex_lock: ret = self.delete_fabric_fw_internal(tenant_id, fw_dict, is_fw_virt, result) except Exception as exc: LOG.error("Exception raised in delete fabric %s", str(exc)) return False return ret
Top level routine to unconfigure the fabric.
def _convert_errors(func): cast_Invalid = lambda e: Invalid( u"{message}, expected {expected}".format( message=e.message, expected=e.expected) if e.expected != u'-none-' else e.message, e.path, six.text_type(e)) @wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except good.SchemaError as e: raise SchemaError(six.text_type(e)) except good.MultipleInvalid as ee: raise MultipleInvalid([cast_Invalid(e) for e in ee]) except good.Invalid as e: raise MultipleInvalid([cast_Invalid(e)]) return wrapper
Decorator to convert throws errors to Voluptuous format.
def cmpr(self, val1, name, val2): ctor = self.getCmprCtor(name) if ctor is None: raise s_exc.NoSuchCmpr(cmpr=name, name=self.name) norm1 = self.norm(val1)[0] norm2 = self.norm(val2)[0] return ctor(norm2)(norm1)
Compare the two values using the given type specific comparator.
def hide_shortcuts(self, menu): for element in getattr(self, menu + '_menu_actions'): if element and isinstance(element, QAction): if element._shown_shortcut is not None: element.setShortcut(QKeySequence())
Hide action shortcuts in menu
def prepare_jochem(ctx, jochem, output, csoutput): click.echo('chemdataextractor.dict.prepare_jochem') for i, line in enumerate(jochem): print('JC%s' % i) if line.startswith('TM '): if line.endswith(' @match=ci\n'): for tokens in _make_tokens(line[3:-11]): output.write(' '.join(tokens)) output.write('\n') else: for tokens in _make_tokens(line[3:-1]): csoutput.write(' '.join(tokens)) csoutput.write('\n')
Process and filter jochem file to produce list of names for dictionary.
def from_hex(cls, hexval: str) -> 'ColorCode': c = cls() c._init_hex(hexval) return c
Return a ColorCode from a hex string.
def _get_limit_and_offset(page, page_size): if page < 1: raise ValueError('page must be >= 1') limit = page_size offset = (page - 1) * page_size return limit, offset
Returns a 0-indexed offset and limit based on page and page_size for a MySQL query.
def output_row(self, name): "Output a scoring row." print("%10s %4d %0.3f %0.3f %0.3f"%( name, self.gold, self.precision(), self.recall(), self.fscore()))
Output a scoring row.
def add_dictionary(self, dictionary): if self.word_vectors is None: raise Exception('Model must be fit before adding a dictionary') if len(dictionary) > self.word_vectors.shape[0]: raise Exception('Dictionary length must be smaller ' 'or equal to the number of word vectors') self.dictionary = dictionary if hasattr(self.dictionary, 'iteritems'): items_iterator = self.dictionary.iteritems() else: items_iterator = self.dictionary.items() self.inverse_dictionary = {v: k for k, v in items_iterator}
Supply a word-id dictionary to allow similarity queries.
def _is_valid_options_weights_list(value): return ((isinstance(value, list)) and len(value) > 1 and (all(isinstance(opt, tuple) and len(opt) == 2 and isinstance(opt[1], (int, float)) for opt in value)))
Check whether ``values`` is a valid argument for ``weighted_choice``.
def factory(fileobject, jfs, parentpath): 'Class method to get the correct file class instatiated' if hasattr(fileobject, 'currentRevision'): return JFSFile(fileobject, jfs, parentpath) elif str(fileobject.latestRevision.state) == ProtoFile.STATE_INCOMPLETE: return JFSIncompleteFile(fileobject, jfs, parentpath) elif str(fileobject.latestRevision.state) == ProtoFile.STATE_CORRUPT: return JFSCorruptFile(fileobject, jfs, parentpath) else: raise NotImplementedError('No JFS*File support for state %r. Please file a bug!' % fileobject.latestRevision.state)
Class method to get the correct file class instatiated
def _newRemoteException(ErrorType): RemoteErrorBaseType = _RemoteExceptionMeta('', (ErrorType,), {}) class RemoteException(RemoteErrorBaseType): BaseExceptionType = ErrorType def __init__(self, thrownError, tracebackString): self.thrownError = thrownError self.tracebackString = tracebackString RemoteErrorBaseType.__init__(self, *thrownError.args) loadError = staticmethod(_loadError) def __str__(self): return '\n%s\n%s' % (self.tracebackString, self.thrownError) def __reduce__(self): args = (ErrorType, self.thrownError, self.tracebackString) return self.loadError, args RemoteException.__name__ = 'Remote' + ErrorType.__name__ return RemoteException
create a new RemoteExceptionType from a given errortype
def copy(self): return Header([line.copy() for line in self.lines], self.samples.copy())
Return a copy of this header
def unauthenticated_view(self): url = request.url flash(_("You must be signed in to access '%(url)s'.", url=url), 'error') safe_next_url = self.make_safe_url(url) return redirect(self._endpoint_url(self.USER_UNAUTHENTICATED_ENDPOINT)+'?next='+quote(safe_next_url))
Prepare a Flash message and redirect to USER_UNAUTHENTICATED_ENDPOINT
def distances(self, points): return [math.acos(self._pos3d.dot(p.vector)) for p in points]
Distance to other points on the sphere
def visit_if(self, node): ifs = ["if %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))] if node.has_elif_block(): ifs.append("el%s" % self._stmt_list(node.orelse, indent=False)) elif node.orelse: ifs.append("else:\n%s" % self._stmt_list(node.orelse)) return "\n".join(ifs)
return an astroid.If node as string
def _transliterate (self, text, outFormat): result = [] text = self._preprocess(text) i = 0 while i < len(text): if text[i].isspace(): result.append(text[i]) i = i+1 else: chr = self._getNextChar(text, i) try: result.append(self[chr].unichr) except KeyError: result.append(_unrecognised(chr)) i = i + len(chr) return result
Transliterate the text to Unicode.
def register_pubkey(self): p = pkcs_os2ip(self.dh_p) g = pkcs_os2ip(self.dh_g) pn = dh.DHParameterNumbers(p, g) y = pkcs_os2ip(self.dh_Ys) public_numbers = dh.DHPublicNumbers(y, pn) s = self.tls_session s.server_kx_pubkey = public_numbers.public_key(default_backend()) if not s.client_kx_ffdh_params: s.client_kx_ffdh_params = pn.parameters(default_backend())
XXX Check that the pubkey received is in the group.
def wif(self, is_compressed=None): secret_exponent = self.secret_exponent() if secret_exponent is None: return None if is_compressed is None: is_compressed = self.is_compressed() blob = to_bytes_32(secret_exponent) if is_compressed: blob += b'\01' return self._network.wif_for_blob(blob)
Return the WIF representation of this key, if available.
def ensure_output(self): if not os.path.exists(self.output): os.makedirs(self.output)
Ensure output's directory exists
def _reap_child(self): if self.detached and self.child_is_immediate_subprocess: LOG.debug('%r: immediate child is detached, won\'t reap it', self) return if self.profiling: LOG.info('%r: wont kill child because profiling=True', self) return if self._reaped: return try: pid, status = os.waitpid(self.pid, os.WNOHANG) except OSError: e = sys.exc_info()[1] if e.args[0] == errno.ECHILD: LOG.warn('%r: waitpid(%r) produced ECHILD', self, self.pid) return raise self._reaped = True if pid: LOG.debug('%r: PID %d %s', self, pid, wstatus_to_str(status)) return if not self._router.profiling: LOG.debug('%r: child process still alive, sending SIGTERM', self) try: os.kill(self.pid, signal.SIGTERM) except OSError: e = sys.exc_info()[1] if e.args[0] != errno.EPERM: raise
Reap the child process during disconnection.
def __search_iterable(self, obj, item, parent="root", parents_ids=frozenset({})): for i, thing in enumerate(obj): new_parent = "%s[%s]" % (parent, i) if self.__skip_this(thing, parent=new_parent): continue if self.case_sensitive or not isinstance(thing, strings): thing_cased = thing else: thing_cased = thing.lower() if thing_cased == item: self.__report( report_key='matched_values', key=new_parent, value=thing) else: item_id = id(thing) if parents_ids and item_id in parents_ids: continue parents_ids_added = add_to_frozen_set(parents_ids, item_id) self.__search(thing, item, "%s[%s]" % (parent, i), parents_ids_added)
Search iterables except dictionaries, sets and strings.
def _logger_api(self): from .tcex_logger import TcExLogHandler, TcExLogFormatter api = TcExLogHandler(self.session) api.set_name('api') api.setLevel(logging.DEBUG) api.setFormatter(TcExLogFormatter()) self.log.addHandler(api)
Add API logging handler.
def politeShutdown(self): protoDeferreds = [] for proto in self.protocols: protoDeferreds.append(proto.stopPolitely(disconnect=True)) return defer.DeferredList(protoDeferreds)
Stop inventory on all connected readers.
def cli(env, identifier, out_file): mgr = SoftLayer.SshKeyManager(env.client) key_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'SshKey') key = mgr.get_key(key_id) if out_file: with open(path.expanduser(out_file), 'w') as pub_file: pub_file.write(key['key']) table = formatting.KeyValueTable(['name', 'value']) table.add_row(['id', key['id']]) table.add_row(['label', key.get('label')]) table.add_row(['notes', key.get('notes', '-')]) env.fout(table)
Prints out an SSH key to the screen.
def _get_representation_doc(self): if not self.representation: return 'N/A' fields = {} for name, field in self.representation.fields.items(): fields[name] = self._get_field_doc(field) return fields
Return documentation for the representation of the resource.
def _is_detach_necessary(cls): if os.getppid() == 1: return False if cls._is_socket(sys.stdin): return False return True
Check if detaching the process is even necessary.
def QueryValueEx(key, value_name): regqueryvalueex = advapi32["RegQueryValueExW"] regqueryvalueex.restype = ctypes.c_long regqueryvalueex.argtypes = [ ctypes.c_void_p, ctypes.c_wchar_p, LPDWORD, LPDWORD, LPBYTE, LPDWORD ] size = 256 data_type = ctypes.wintypes.DWORD() while True: tmp_size = ctypes.wintypes.DWORD(size) buf = ctypes.create_string_buffer(size) rc = regqueryvalueex(key.handle, value_name, LPDWORD(), ctypes.byref(data_type), ctypes.cast(buf, LPBYTE), ctypes.byref(tmp_size)) if rc != ERROR_MORE_DATA: break if size > 10 * 1024 * 1024: raise OSError("Value too big to be read by GRR.") size *= 2 if rc != ERROR_SUCCESS: raise ctypes.WinError(2) return _Reg2Py(buf, tmp_size.value, data_type.value), data_type.value
This calls the Windows QueryValueEx function in a Unicode safe way.
def project2module(project): project = project.lower().replace("-", "_") if project.startswith("python_"): project = project[7:] return project
Convert project name into a module name.
def annotate(self, word): info = [] for syllabification, _ in syllabify(self.normalize(word), stress=True): stresses = '' weights = '' vowels = '' for syll in syllable_split(syllabification): try: vowels += get_vowel(syll) weights += get_weight(syll) stresses += {'\'': 'P', '`': 'S'}.get(syll[0], 'U') except AttributeError: if syll[-1].isalpha(): stresses += '*' weights += '*' vowels += '*' else: stresses += ' ' weights += ' ' vowels += ' ' info.append(( syllabification, stresses, weights, vowels, )) return info
Annotate 'word' for syllabification, stress, weights, and vowels.
def update_features(self, poly): for feature in self.features: feature.wavelength = poly(feature.xpos)
Evaluate wavelength at xpos using the provided polynomial.
def getVideoStreamTextureGL(self, hTrackedCamera, eFrameType, nFrameHeaderSize): fn = self.function_table.getVideoStreamTextureGL pglTextureId = glUInt_t() pFrameHeader = CameraVideoStreamFrameHeader_t() result = fn(hTrackedCamera, eFrameType, byref(pglTextureId), byref(pFrameHeader), nFrameHeaderSize) return result, pglTextureId, pFrameHeader
Access a shared GL texture for the specified tracked camera stream
def clear(self): del IKBreakpoint.breakpoints_by_file_and_line[self.file_name, self.line_number] IKBreakpoint.breakpoints_by_number[self.number] = None IKBreakpoint.breakpoints_files[self.file_name].remove(self.line_number) if len(IKBreakpoint.breakpoints_files[self.file_name]) == 0: del IKBreakpoint.breakpoints_files[self.file_name] IKBreakpoint.update_active_breakpoint_flag()
Clear a breakpoint by removing it from all lists.
def publish_api(self, ret, stage_variables): stage_desc = dict() stage_desc['current_deployment_label'] = self.deployment_label stage_desc_json = _dict_to_json_pretty(stage_desc) if self._deploymentId: res = self._set_current_deployment(stage_desc_json, stage_variables) if not res.get('set'): ret['abort'] = True ret['result'] = False ret['comment'] = res.get('error') else: ret = _log_changes(ret, 'publish_api (reassociate deployment, set stage_variables)', res.get('response')) else: res = __salt__['boto_apigateway.create_api_deployment'](restApiId=self.restApiId, stageName=self._stage_name, stageDescription=stage_desc_json, description=self.deployment_label_json, variables=stage_variables, **self._common_aws_args) if not res.get('created'): ret['abort'] = True ret['result'] = False ret['comment'] = res.get('error') else: ret = _log_changes(ret, 'publish_api (new deployment)', res.get('deployment')) return ret
this method tie the given stage_name to a deployment matching the given swagger_file
def log_learning_rates(self, model: Model, optimizer: torch.optim.Optimizer): if self._should_log_learning_rate: names = {param: name for name, param in model.named_parameters()} for group in optimizer.param_groups: if 'lr' not in group: continue rate = group['lr'] for param in group['params']: effective_rate = rate * float(param.requires_grad) self.add_train_scalar("learning_rate/" + names[param], effective_rate)
Send current parameter specific learning rates to tensorboard
def parse(self, xmp): tree = etree.fromstring(xmp) rdf_tree = tree.find(RDF_NS + 'RDF') meta = defaultdict(dict) for desc in rdf_tree.findall(RDF_NS + 'Description'): for el in desc.getchildren(): ns, tag = self._parse_tag(el) value = self._parse_value(el) meta[ns][tag] = value return dict(meta)
Run parser and return a dictionary of all the parsed metadata.
def to_yaml(self, skip_nulls=True): return yaml.safe_dump(self.to_dict(skip_nulls=skip_nulls), default_flow_style=False)
Convert object to a yaml string
def write_file(filename, content): print 'Generating {0}'.format(filename) with open(filename, 'wb') as out_f: out_f.write(content)
Create the file with the given content
async def get_data(self, resource): url = '{}{}'.format( self.base_url, self.endpoint.format(resource=resource)) try: with async_timeout.timeout(5, loop=self._loop): response = await self._session.get(url) _LOGGER.info( "Response from Netdata: %s", response.status) data = await response.json() _LOGGER.debug(data) self.values = {k: v for k, v in zip( data['labels'], data['data'][0])} except (asyncio.TimeoutError, aiohttp.ClientError, socket.gaierror): _LOGGER.error("Can not load data from Netdata") raise exceptions.NetdataConnectionError()
Get detail for a resource from the data endpoint.
def _temporary_filenames(total): temp_files = [_get_temporary_filename('optimage-') for i in range(total)] yield temp_files for temp_file in temp_files: try: os.remove(temp_file) except OSError: pass
Context manager to create temporary files and remove them after use.
def iter_duration(self, iter_trigger): print process_info = ProcessInfo(self.frame_count, use_last_rates=4) start_time = time.time() next_status = start_time + 0.25 old_pos = next(iter_trigger) for pos in iter_trigger: duration = pos - old_pos yield duration old_pos = pos if time.time() > next_status: next_status = time.time() + 1 self._print_status(process_info) self._print_status(process_info) print
yield the duration of two frames in a row.
def fetch(cls, obj, keys): current = obj for key in keys.split("."): if type(current) == list: try: key = int(key) except TypeError: raise cls.Missing(key) try: current = current[key] except (IndexError, KeyError, TypeError) as ex: raise cls.Missing(key) return current
fetches the value corresponding to keys from obj
def unseen_videos_reset(self): url = RESET_CAM_ENDPOINT.format(self.unique_id) ret = self._session.query(url).get('success') return ret
Reset the unseen videos counter.
def pre_dissect(self, s): if len(s) < 5: raise Exception("Invalid record: header is too short.") if isinstance(self.tls_session.rcs.cipher, Cipher_NULL): self.deciphered_len = None return s else: msglen = struct.unpack('!H', s[3:5])[0] hdr, efrag, r = s[:5], s[5:5 + msglen], s[msglen + 5:] frag, auth_tag = self._tls_auth_decrypt(efrag) self.deciphered_len = len(frag) return hdr + frag + auth_tag + r
Decrypt, verify and decompress the message.
def prettyfy(response, format='json'): if format == 'json': return pretty_json(response.content) else: return pretty_xml(response.content)
A wrapper for pretty_json and pretty_xml
def add_string(self, string): self.string += string self.length += len(string) self.eos = 0
Add to the working string and its length and reset eos.
def reload(self): self._source = self._fetch_secrets(self._vault_url, self._path, self._token)
Reread secrets from the vault path
def interactivity(self, min_val=None, max_val=None, qt_app=None): from .seed_editor_qt import QTSeedEditor from PyQt4.QtGui import QApplication if min_val is None: min_val = np.min(self.img) if max_val is None: max_val = np.max(self.img) window_c = (max_val + min_val) / 2 window_w = max_val - min_val if qt_app is None: qt_app = QApplication(sys.argv) pyed = QTSeedEditor( self.img, modeFun=self.interactivity_loop, voxelSize=self.voxelsize, seeds=self.seeds, volume_unit=self.volume_unit, ) pyed.changeC(window_c) pyed.changeW(window_w) qt_app.exec_()
Interactive seed setting with 3d seed editor
def _strip_version_from_dependency(dep): usedmark = '' for mark in '< > ='.split(): split = dep.split(mark) if len(split) > 1: usedmark = mark break if usedmark: return split[0].strip() else: return dep.strip()
For given dependency string, return only the package name
def shellfilter(value): replacements = {'\\': '\\\\', '`': '\\`', "'": "\\'", '"': '\\"'} for search, repl in replacements.items(): value = value.replace(search, repl) return safestring.mark_safe(value)
Replace HTML chars for shell usage.
def execute(cls, instance, async=True, countdown=2, is_heavy_task=False, **kwargs): cls.pre_apply(instance, async=async, **kwargs) result = cls.apply_signature(instance, async=async, countdown=countdown, is_heavy_task=is_heavy_task, **kwargs) cls.post_apply(instance, async=async, **kwargs) return result
Execute high level-operation
def __make_thumbnail(self, width, height): (w, h) = self.size factor = max( (float(w) / width), (float(h) / height) ) w /= factor h /= factor return self.get_image((round(w), round(h)))
Create the page's thumbnail
def project_ranges(cb, msg, attributes): if skip(cb, msg, attributes): return msg plot = get_cb_plot(cb) x0, x1 = msg.get('x_range', (0, 1000)) y0, y1 = msg.get('y_range', (0, 1000)) extents = x0, y0, x1, y1 x0, y0, x1, y1 = project_extents(extents, plot.projection, plot.current_frame.crs) coords = {'x_range': (x0, x1), 'y_range': (y0, y1)} return {k: v for k, v in coords.items() if k in attributes}
Projects ranges supplied by a callback.
def run(self): self.mark_incomplete() session = client.get_client().create_session() cpi = ConsumerPriceIndexFile().load() max_cpi_year = cpi['Year'].max() cpi = cpi.set_index('Year')['Annual'] for movie in session.query(models.Movie).all(): if movie.year is not None and movie.budget is not None: if movie.year > max_cpi_year: movie.budget_inflation_adjusted = movie.budget else: movie.budget_inflation_adjusted = movie.budget * cpi.loc[max_cpi_year] / cpi.loc[movie.year] session.commit() session.close() self.mark_complete()
Compute and store inflation-adjusted movie budgets
def rollback(self, date): if self.onOffset(date): return date else: return date - QuarterBegin(month=self.month)
Roll date backward to nearest start of quarter
def kwargs_to_spec(self, **kwargs): spec = self.create_spec(**kwargs) self.prepare_spec(spec, **kwargs) return spec
Turn the provided kwargs into arguments ready for toolchain.
def api_stop(server_state): api_srv = server_state['api'] if api_srv is not None: log.info("Shutting down API") api_srv.stop_server() api_srv.join() log.info("API server joined") else: log.info("API already joined") server_state['api'] = None
Stop the global API server thread
def _set_cached_value(self, xblock, value): if not hasattr(xblock, '_field_data_cache'): xblock._field_data_cache = {} xblock._field_data_cache[self.name] = value
Store a value in the xblock's cache, creating the cache if necessary.
def resume(self): for client in self._snippet_clients.values(): if client.is_alive and client.host_port is None: self._device.log.debug('Resuming SnippetClient<%s>.', client.package) client.restore_app_connection() else: self._device.log.debug('Not resuming SnippetClient<%s>.', client.package)
Resumes all paused snippet clients.
def merge(s, t): for k, v in t.items(): if isinstance(v, dict): if k not in s: s[k] = v continue s[k] = merge(s[k], v) continue s[k] = v return s
Merge dictionary t into s.
def register_prop(name, handler_get, handler_set): global props_get, props_set if handler_get: props_get[name] = handler_get if handler_set: props_set[name] = handler_set
register a property handler
def tag(self, path, name): if not path[len(path) - 1] == '/': path += '/' config = self.get_config() folder = self.find_folder({ 'path' : path }, config) if not folder: raise custom_errors.FileNotInConfig(path) old_name = folder['label'] folder['label'] = name dir_config = self.adapter.get_dir_config(path) dir_config['label'] = name self.adapter.set_dir_config(dir_config) self.set_config(config) return old_name
Change name associated with path
def mate_top(self): " top of the stator" return Mate(self, CoordSystem( origin=(0, 0, self.length/2), xDir=(0, 1, 0), normal=(0, 0, 1) ))
top of the stator
def sha256_digest(instr): return salt.utils.stringutils.to_unicode( hashlib.sha256(salt.utils.stringutils.to_bytes(instr)).hexdigest() )
Generate a sha256 hash of a given string.
def error(self, message): QMessageBox.critical(self, _("Array editor"), message) self.setAttribute(Qt.WA_DeleteOnClose) self.reject()
An error occured, closing the dialog box
def number_of_pages(self, key, value): result = maybe_int(force_single_element(value.get('a', ''))) if result and result > 0: return result
Populate the ``number_of_pages`` key.
def upload_site(self): if not os.path.isdir(self._config['build_dir']): message = 'Site not built at {0}'.format(self._config['build_dir']) self._logger.error(message) raise RuntimeError(message) ltdclient.upload(self._config)
Upload a previously-built site to LSST the Docs.
def append_to_circuit(self, circuit, simplify=True): if simplify: term = self.simplify() else: term = self for op in term.ops[::-1]: gate = op.op.lower() if gate != "i": getattr(circuit, gate)[op.n]
Append Pauli gates to `Circuit`.
def parallactic_angles(self, context): (lt, ut), (la, ua) = context.dim_extents('ntime', 'na') return (mbu.parallactic_angles(self._times[lt:ut], self._antenna_positions[la:ua], self._phase_dir) .reshape(context.shape) .astype(context.dtype))
parallactic angle data source
def reorder_view(self, request): model = self.model if not self.has_change_permission(request): raise PermissionDenied if request.method == "POST": object_pks = request.POST.getlist('neworder[]') model.objects.set_orders(object_pks) return HttpResponse("OK")
The 'reorder' admin view for this model.
def _initLayerCtors(self): ctors = { 'lmdb': s_lmdblayer.LmdbLayer, 'remote': s_remotelayer.RemoteLayer, } self.layrctors.update(**ctors)
Registration for built-in Layer ctors
def register_file(name, member, path, digest='', conn=None): close = False if conn is None: close = True conn = init() conn.execute('INSERT INTO files VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', ( name, '{0}/{1}'.format(path, member.path), member.size, member.mode, digest, member.devmajor, member.devminor, member.linkname, member.linkpath, member.uname, member.gname, member.mtime )) if close: conn.close()
Register a file in the package database
def requires(self): value = self._schema.get("requires", {}) if not isinstance(value, (basestring, dict)): raise SchemaError( "requires value {0!r} is neither a string nor an" " object".format(value)) return value
Additional object or objects required by this object.
def _manual_recv(self, method, body, headers={}): headers.setdefault('sent_at', time.time()) return self.recv(self._make_context(), {'method': method, 'body': body, 'headers': headers})
Used in the tests
def determine_rotation(rotation, mark): if isinstance(rotation, six.string_types) and rotation.lower() == 'r': rotation = random.randint(0, 359) else: rotation = _int(rotation) return rotation
Determines the number of degrees to rotate the watermark image.
def grab_rdflib_graph_version(g: Graph) -> str: version = g.subject_objects( predicate = URIRef( OWL.versionIRI ) ) version = [o for s, o in version] if len(version) != 1: print('versioning isn\'t correct') else: version = str(version[0]) return version
Crap-shot for ontology iri if its properly in the header and correctly formated
def windowed(self, size) -> None: width, height = size self.wnd.windowed(width, height)
Set the window to windowed mode.
def _hooks_apply_before_serialize( hooks, state, value ): if hooks and hooks.before_serialize: return hooks.before_serialize(ProcessorStateView(state), value) return value
Apply the before serialize hook.
def du(path): size, err = calc(path) if err: return err else: hr, unit = convert(size) hr = str(hr) result = hr + " " + unit return result
Put it all together!
def state_fluents(self) -> Dict[str, PVariable]: return { str(pvar): pvar for pvar in self.pvariables if pvar.is_state_fluent() }
Returns state-fluent pvariables.
def add_multizone(self, group_cast): self._groups[str(group_cast.uuid)] = { 'chromecast': group_cast, 'listener': Listener(group_cast, self._casts), 'members': set()}
Start managing a group
def add_user_role(self, user, role_name): if isinstance(self.db_adapter, SQLDbAdapter): role = self.db_adapter.find_first_object(self.RoleClass, name=role_name) if not role: role = self.RoleClass(name=role_name) self.db_adapter.add_object(role) user.roles.append(role) else: user.roles.append(role_name)
Associate a role name with a user.
def delete_info(ctx, info): head = ctx.parent.head vcf_handle = ctx.parent.handle outfile = ctx.parent.outfile silent = ctx.parent.silent if not info: logger.error("No info provided") sys.exit("Please provide a info string") if not info in head.info_dict: logger.error("Info '{0}' is not specified in vcf header".format(info)) sys.exit("Please provide a valid info field") head.remove_header(info) print_headers(head, outfile=outfile, silent=silent) for line in vcf_handle: line = line.rstrip() new_line = remove_vcf_info(keyword=info, variant_line=line) print_variant(variant_line=new_line, outfile=outfile, silent=silent)
Delete a info field from all variants in a vcf
def url(self, host): path = '/'.join(str(v) for v in self._path) return 'coaps://{}:5684/{}'.format(host, path)
Generate url for coap client.
def u2s(self,u): try: return u.encode('utf-8',errors='ignore') except (UnicodeDecodeError,AttributeError) as e: try: return str(u) except UnicodeEncodeError: return unicode(u).encode('utf-8',errors='ignore')
Returns an ASCII representation of the Unicode string 'u'.
def _special_value_cols(em): if em.tagName == 'textarea': return convertToIntRange(em.getAttribute('cols', 20), minValue=1, maxValue=None, invalidDefault=20) else: return em.getAttribute('cols', '')
_special_value_cols - Handle "cols" special attribute, which differs if tagName is a textarea or frameset
def partition_asymmetries(neurites, neurite_type=NeuriteType.all): return map(_bifurcationfunc.partition_asymmetry, iter_sections(neurites, iterator_type=Tree.ibifurcation_point, neurite_filter=is_type(neurite_type)))
Partition asymmetry at bifurcation points of a collection of neurites
def iter_languages(self): default_lang = self.babel.default_locale.language default_title = self.babel.default_locale.get_display_name( default_lang) yield (default_lang, default_title) for l, title in current_app.config.get('I18N_LANGUAGES', []): yield l, title
Iterate over list of languages.
def add_hits_to_proteins(self, hmm_hit_list): for org in self.organisms: print "adding SearchIO hit objects for", org.accession for hit in hmm_hit_list: hit_org_id = hit.id.split(',')[0] hit_prot_id = hit.id.split(',')[1] if org.accession == hit_org_id: for prot in org.proteins: if prot.accession == hit_prot_id: prot.hmm_hit_list.append(hit)
Add HMMER results to Protein objects
def _request_status(self): if self.item_id: return True response = self.con.get(self.monitor_url) if not response: return False data = response.json() self.status = data.get('status', 'inProgress') self.completion_percentage = data.get(self._cc('percentageComplete'), 0) self.item_id = data.get(self._cc('resourceId'), None) return self.item_id is not None
Checks the api endpoint to check if the async job progress
def _get_model_fields(self, model, prefix=_field_prefix): fields = list() for field_name, field in model()._ordered_fields: if field_name not in getattr(model, '_DEFAULT_BASE_FIELDS', []): type_name = utils.to_camel(field.solr_type) required = self._marker_true if field.required is True else self._marker_false fields.append((prefix, field_name, type_name, required)) return fields
Find all fields of given model that are not default models.
def by_ip(self, ip): try: number = inet_aton(ip) except Exception: raise IpRange.DoesNotExist try: return self.filter(start_ip__lte=number, end_ip__gte=number)\ .order_by('end_ip', '-start_ip')[0] except IndexError: raise IpRange.DoesNotExist
Find the smallest range containing the given IP.
def ppf(q, df, loc=0.0, scale=1.0, gamma = 1.0): result = np.zeros(q.shape[0]) probzero = Skewt.cdf(x=np.zeros(1),loc=np.zeros(1),df=df,gamma=gamma) result[q<probzero] = 1.0/gamma*ss.t.ppf(((np.power(gamma,2) + 1.0) * q[q<probzero])/2.0,df) result[q>=probzero] = gamma*ss.t.ppf((1.0 + 1.0/np.power(gamma,2))/2.0*(q[q >= probzero] - probzero) + 0.5, df) return result
PPF function for Skew t distribution
def subscriptions_unread(self, room_id, **kwargs): return self.__call_api_post('subscriptions.unread', roomId=room_id, kwargs=kwargs)
Mark messages as unread by roomId or from a message
def _format_axes(self): if not self.obj.index.is_unique and self.orient in ( 'index', 'columns'): raise ValueError("DataFrame index must be unique for orient=" "'{orient}'.".format(orient=self.orient)) if not self.obj.columns.is_unique and self.orient in ( 'index', 'columns', 'records'): raise ValueError("DataFrame columns must be unique for orient=" "'{orient}'.".format(orient=self.orient))
Try to format axes if they are datelike.