code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def _linefeed(self): last_line = self._cursor.blockNumber() == self._text_edit.blockCount() - 1 if self._cursor.atEnd() or last_line: if last_line: self._cursor.movePosition(self._cursor.EndOfBlock) self._cursor.insertText('\n') else: self._cursor.movePosition(self._cursor.Down) self._cursor.movePosition(self._cursor.StartOfBlock) self._text_edit.setTextCursor(self._cursor)
Performs a line feed.
def append_domain(): grain = {} if salt.utils.platform.is_proxy(): return grain if 'append_domain' in __opts__: grain['append_domain'] = __opts__['append_domain'] return grain
Return append_domain if set
def match_subgroup(sequence, pattern): for element in sequence: match = re.match(pattern, element) if match: yield match.groupdict()
Yield the sub-group element dictionary that match a regex pattern.
def get(context, request, key=None): registry_records = api.get_registry_records_by_keyword(key) size = req.get_batch_size() start = req.get_batch_start() batch = api.make_batch(registry_records, size, start) return { "pagesize": batch.get_pagesize(), "next": batch.make_next_url(), "previous": batch.make_prev_url(), "page": batch.get_pagenumber(), "pages": batch.get_numpages(), "count": batch.get_sequence_length(), "items": [registry_records], "url": api.url_for("senaite.jsonapi.v1.registry", key=key), }
Return all registry items if key is None, otherwise try to fetch the registry key
def _find_bad_meta(self): self._badmeta = dict() for datatype in self.meta: for item in self.meta[datatype]: if not Dap._meta_valid[datatype].match(item): if datatype not in self._badmeta: self._badmeta[datatype] = [] self._badmeta[datatype].append(item)
Fill self._badmeta with meta datatypes that are invalid
def layer_post_save(instance, *args, **kwargs): if instance.is_monitored and instance.service.is_monitored: if not settings.REGISTRY_SKIP_CELERY: check_layer.delay(instance.id) else: check_layer(instance.id) else: index_layer(instance.id)
Used to do a layer full check when saving it.
def empty_bucket(outputs_file): with open(outputs_file, "r") as f: outputs = yaml.load(f) bucket = outputs["storage"]["BucketName"] print("Emptying bucket {} ...".format(bucket)) os.system("aws s3 rm s3://{} --recursive".format(bucket)) print("Bucket {} has been emptied".format(bucket))
Empty the bucket associated to the test deployment.
def save(self, filething=None, v2_version=4, v23_sep='/', padding=None): fileobj = filething.fileobj fileobj.seek(0) dsd_header = DSDChunk(fileobj) if dsd_header.offset_metdata_chunk == 0: fileobj.seek(0, 2) dsd_header.offset_metdata_chunk = fileobj.tell() dsd_header.write() try: data = self._prepare_data( fileobj, dsd_header.offset_metdata_chunk, self.size, v2_version, v23_sep, padding) except ID3Error as e: reraise(error, e, sys.exc_info()[2]) fileobj.seek(dsd_header.offset_metdata_chunk) fileobj.write(data) fileobj.truncate() dsd_header.total_size = fileobj.tell() dsd_header.write()
Save ID3v2 data to the DSF file
def to_cartopy(self): globe = self.cartopy_globe proj_name = self._attrs['grid_mapping_name'] try: proj_handler = self.projection_registry[proj_name] except KeyError: raise ValueError('Unhandled projection: {}'.format(proj_name)) return proj_handler(self._attrs, globe)
Convert to a CartoPy projection.
def rescale_taps(taps): taps = np.array(taps) cs = sum(taps) for (i, x) in enumerate(taps): taps[i] = x / cs return taps.tolist()
Rescale taps in that way that their sum equals 1
def _create_rpmmacros(runas='root'): home = os.path.expanduser('~') rpmbuilddir = os.path.join(home, 'rpmbuild') if not os.path.isdir(rpmbuilddir): __salt__['file.makedirs_perms'](name=rpmbuilddir, user=runas, group='mock') mockdir = os.path.join(home, 'mock') if not os.path.isdir(mockdir): __salt__['file.makedirs_perms'](name=mockdir, user=runas, group='mock') rpmmacros = os.path.join(home, '.rpmmacros') with salt.utils.files.fopen(rpmmacros, 'w') as afile: afile.write( salt.utils.stringutils.to_str('%_topdir {0}\n'.format(rpmbuilddir)) ) afile.write('%signature gpg\n') afile.write('%_source_filedigest_algorithm 8\n') afile.write('%_binary_filedigest_algorithm 8\n') afile.write('%_gpg_name packaging@saltstack.com\n')
Create the .rpmmacros file in user's home directory
def remove_watchpoint(self, addr, size, type): return self.dwt.remove_watchpoint(addr, size, type)
remove a hardware watchpoint
def _prepare_encryption_table(): seed = 0x00100001 crypt_table = {} for i in range(256): index = i for j in range(5): seed = (seed * 125 + 3) % 0x2AAAAB temp1 = (seed & 0xFFFF) << 0x10 seed = (seed * 125 + 3) % 0x2AAAAB temp2 = (seed & 0xFFFF) crypt_table[index] = (temp1 | temp2) index += 0x100 return crypt_table
Prepare encryption table for MPQ hash function.
def _deconstruct_url(self, url): url = url.split("://", 1)[-1] server, endpoint = url.split("/", 1) return (server, endpoint)
Breaks down URL and returns server and endpoint
def parameters(self, parameters, locations=None): def decorator(func): if locations is None and parameters.many: _locations = ('json', ) else: _locations = locations if _locations is not None: parameters.context['in'] = _locations return self.doc(params=parameters)( self.response(code=HTTPStatus.UNPROCESSABLE_ENTITY)( self.WEBARGS_PARSER.use_args(parameters, locations=_locations)( func ) ) ) return decorator
Endpoint parameters registration decorator.
def ipinfo_ip_check(ip): if not is_IPv4Address(ip): return None response = requests.get('http://ipinfo.io/%s/json' % ip) return response.json()
Checks ipinfo.io for basic WHOIS-type data on an IP address
def createTopicPage2(): topic = TopicPage(er) topic.addCategory(er.getCategoryUri("renewable"), 50) topic.addKeyword("renewable energy", 30) topic.addConcept(er.getConceptUri("biofuel"), 50) topic.addConcept(er.getConceptUri("solar energy"), 50) topic.restrictToSetConceptsAndKeywords(True) topic.setLanguages(["eng", "deu", "spa"]) topic.setMaxDaysBack(3) topic.setArticleThreshold(30) arts1 = topic.getArticles(page=1, sortBy="date", returnInfo=ReturnInfo( articleInfo = ArticleInfoFlags(concepts=True, categories=True) )) for art in arts1.get("articles", {}).get("results", []): print(art)
create a topic page directly, set the article threshold, restrict results to set concepts and keywords
def __getStationName(name, id): name = name.replace("Meetstation", "") name = name.strip() name += " (%s)" % id return name
Construct a staiion name.
def apply_async(self, args=None, kwargs=None, **options): if self.is_previous_task_processing(*args, **kwargs): message = 'Background task %s was not scheduled, because its predecessor is not completed yet.' % self.name logger.info(message) return self.AsyncResult(options.get('task_id') or str(uuid4())) return super(BackgroundTask, self).apply_async(args=args, kwargs=kwargs, **options)
Do not run background task if previous task is uncompleted
def remove_reader(self, fd): " Remove read file descriptor from the event loop. " fd = fd_to_int(fd) if fd in self._read_fds: del self._read_fds[fd] self.selector.unregister(fd)
Remove read file descriptor from the event loop.
def create_welcome_client(self): if self.tabwidget.count() == 0: welcome = open(WELCOME).read() client = NotebookClient(self, WELCOME, ini_message=welcome) self.add_tab(client) return client
Create a welcome client with some instructions.
def removeMigrationRequest(self, migration_rqst): conn = self.dbi.connection() try: tran = conn.begin() self.mgrremove.execute(conn, migration_rqst) tran.commit() except dbsException as he: if conn: conn.close() raise except Exception as ex: if conn: conn.close() raise if conn: conn.close()
Method to remove pending or failed migration request from the queue.
def download_file_insecure(url, target): src = urlopen(url) try: data = src.read() finally: src.close() with open(target, "wb") as dst: dst.write(data)
Use Python to download the file, without connection authentication.
def mean_absolute_error(pred:Tensor, targ:Tensor)->Rank0Tensor: "Mean absolute error between `pred` and `targ`." pred,targ = flatten_check(pred,targ) return torch.abs(targ - pred).mean()
Mean absolute error between `pred` and `targ`.
def enterEvent(self, event): super(CallTipWidget, self).enterEvent(event) if self.as_tooltip: self.hide() if (self._hide_timer.isActive() and self.app.topLevelAt(QCursor.pos()) == self): self._hide_timer.stop()
Reimplemented to cancel the hide timer.
def extract_table_names(query): tables_blocks = re.findall(r'(?:FROM|JOIN)\s+(\w+(?:\s*,\s*\w+)*)', query, re.IGNORECASE) tables = [tbl for block in tables_blocks for tbl in re.findall(r'\w+', block)] return set(tables)
Extract table names from an SQL query.
def _kwargs_to_attributes(self, kwargs): for key, val in kwargs.iteritems(): if key not in self.__dict__: raise ValueError( "Can't set %s parameter - it is not defined here!" % key ) self.__dict__[key] = val
Put keys from `kwargs` to `self`, if the keys are already there.
def semantic_parent(self): if not hasattr(self, '_semantic_parent'): self._semantic_parent = conf.lib.clang_getCursorSemanticParent(self) return self._semantic_parent
Return the semantic parent for this cursor.
def _read_response(self): result = self.buf.read_line().decode("utf-8") if not result: raise NoResponseError("No response received from server.") msg = self._read_message() if result != "ok": raise InvalidResponseError(msg) return msg
Reads a complete response packet from the server
def asDictionary(self): template = {"type" : self._type, "mapLayerId" : self._mapLayerId} if not self._gdbVersion is None and\ self._gdbVersion != "": template['gdbVersion'] = self._gdbVersion return template
converts the object to a dictionary
def cli(ctx, obj): client = obj['client'] click.echo('alerta {}'.format(client.mgmt_status()['version'])) click.echo('alerta client {}'.format(client_version)) click.echo('requests {}'.format(requests_version)) click.echo('click {}'.format(click.__version__)) ctx.exit()
Show Alerta server and client versions.
def _read_words(filename): with tf.gfile.GFile(filename, "r") as f: if sys.version_info[0] >= 3: return f.read().replace("\n", " %s " % EOS).split() else: return f.read().decode("utf-8").replace("\n", " %s " % EOS).split()
Reads words from a file.
def plotgwsrc(gwb): theta, phi, omega, polarization = gwb.gw_dist() rho = phi-N.pi eta = 0.5*N.pi - theta P.title("GWB source population") ax = P.axes(projection='mollweide') foo = P.scatter(rho, eta, marker='.', s=1) return foo
Plot a GWB source population as a mollweide projection.
def to_glyphs_family_user_data_from_designspace(self): target_user_data = self.font.userData for key, value in self.designspace.lib.items(): if key == UFO2FT_FEATURE_WRITERS_KEY and value == DEFAULT_FEATURE_WRITERS: continue if _user_data_has_no_special_meaning(key): target_user_data[key] = value
Set the GSFont userData from the designspace family-wide lib data.
def to_ccw(geom): if isinstance(geom, sgeom.Polygon) and not geom.exterior.is_ccw: geom = sgeom.polygon.orient(geom) return geom
Reorients polygon to be wound counter-clockwise.
def start(cls, originator_id, quorum_size, network_uid): assert isinstance(quorum_size, int), "Not an integer: {}".format(quorum_size) return cls.__create__( event_class=cls.Started, originator_id=originator_id, quorum_size=quorum_size, network_uid=network_uid )
Factory method that returns a new Paxos aggregate.
def _norm(self, x): return tf.sqrt(tf.reduce_sum(tf.square(x), keepdims=True, axis=-1) + 1e-7)
Compute the safe norm.
def memoize(self, hasher=None): ns = self.Namespace() ns.memo = {} if hasher is None: hasher = lambda x: x def memoized(*args, **kwargs): key = hasher(*args) if key not in ns.memo: ns.memo[key] = self.obj(*args, **kwargs) return ns.memo[key] return self._wrap(memoized)
Memoize an expensive function by storing its results.
def _make_cmake(config_info): configure_args = ["-DCMAKE_EXPORT_COMPILE_COMMANDS=ON"] cmake_args = {} options, option_fns = _make_all_options() def _add_value(value, key): args_key, args_value = _EX_ARG_FNS[key](value) cmake_args[args_key] = args_value devpipeline_core.toolsupport.args_builder( "cmake", config_info, options, lambda v, key: configure_args.extend(option_fns[key](v)), ) devpipeline_core.toolsupport.args_builder( "cmake", config_info, _EX_ARGS, _add_value ) cmake = CMake(cmake_args, config_info, configure_args) build_type = config_info.config.get("cmake.build_type") if build_type: cmake.set_build_type(build_type) return devpipeline_build.make_simple_builder(cmake, config_info)
This function initializes a CMake builder for building the project.
def usable_id(cls, id): hcs = cls.from_fqdn(id) if hcs: return [hc_['id'] for hc_ in hcs] try: return int(id) except (TypeError, ValueError): pass
Retrieve id from single input.
def DeleteAttributes(self, subject, attributes, start=None, end=None, sync=True): _ = sync if not attributes: return if isinstance(attributes, string_types): raise ValueError( "String passed to DeleteAttributes (non string iterable expected).") for attribute in attributes: timestamp = self._MakeTimestamp(start, end) attribute = utils.SmartUnicode(attribute) queries = self._BuildDelete(subject, attribute, timestamp) self._ExecuteQueries(queries)
Remove some attributes from a subject.
def _get_price(self, package): for price in package['prices']: if not price.get('locationGroupId'): return price['id'] raise SoftLayer.SoftLayerError("Could not find valid price")
Returns valid price for ordering a dedicated host.
def _compute_prediction(self,X): A_pred = np.dot(X,self._Bhat) Y_pred = self._basis_object.inverse_transform(A_pred) return Y_pred
compute predictions given a new X
def _format_structured_address(address): latitude = address['metadata'].get('latitude') longitude = address['metadata'].get('longitude') return Location( ", ".join((address['delivery_line_1'], address['last_line'])), (latitude, longitude) if latitude and longitude else None, address )
Pretty-print address and return lat, lon tuple.
def unit_get(attribute): _args = ['unit-get', '--format=json', attribute] try: return json.loads(subprocess.check_output(_args).decode('UTF-8')) except ValueError: return None
Get the unit ID for the remote unit
def squeeze_bits(arr: numpy.ndarray) -> numpy.ndarray: assert arr.dtype.kind in ("i", "u") if arr.dtype.kind == "i": assert arr.min() >= 0 mlbl = int(arr.max()).bit_length() if mlbl <= 8: dtype = numpy.uint8 elif mlbl <= 16: dtype = numpy.uint16 elif mlbl <= 32: dtype = numpy.uint32 else: dtype = numpy.uint64 return arr.astype(dtype)
Return a copy of an integer numpy array with the minimum bitness.
def _sort(self): self.versions = OrderedDict(sorted(self.versions.items(), key=lambda v: v[0]))
Sort versions by their version number
def draw(self): if sys.stdout.isatty(): from asciimatics.screen import Screen Screen.wrapper(self._do_draw) else: for line in self.canvas: print("".join(line))
Draws ASCII canvas on the screen.
def build_model_classes(metadata): i = importlib.import_module(metadata) env = get_jinja_env() model_template = env.get_template('model.py.jinja2') for model in i.models: with open(model_path(model.name.lower()), 'w') as t: t.write(model_template.render(model_md=model))
Generate a model class for any models contained in the specified spec file.
def backward_char(event): " Move back a character. " buff = event.current_buffer buff.cursor_position += buff.document.get_cursor_left_position(count=event.arg)
Move back a character.
def wireshark(pktlist, *args): fname = get_temp_file() wrpcap(fname, pktlist) subprocess.Popen([conf.prog.wireshark, "-r", fname] + list(args))
Run wireshark on a list of packets
def _advanced_indexer_subspaces(key): if not isinstance(key, tuple): key = (key,) advanced_index_positions = [i for i, k in enumerate(key) if not isinstance(k, slice)] if (not advanced_index_positions or not _is_contiguous(advanced_index_positions)): return (), () non_slices = [k for k in key if not isinstance(k, slice)] ndim = len(np.broadcast(*non_slices).shape) mixed_positions = advanced_index_positions[0] + np.arange(ndim) vindex_positions = np.arange(ndim) return mixed_positions, vindex_positions
Indices of the advanced indexes subspaces for mixed indexing and vindex.
def attach(item, action, greedy=False, ignore_no_tokens=None, ignore_one_token=None): if use_computation_graph: if ignore_no_tokens is None: ignore_no_tokens = getattr(action, "ignore_no_tokens", False) if ignore_one_token is None: ignore_one_token = getattr(action, "ignore_one_token", False) kwargs = {} if greedy: kwargs["greedy"] = greedy if ignore_no_tokens: kwargs["ignore_no_tokens"] = ignore_no_tokens if ignore_one_token: kwargs["ignore_one_token"] = ignore_one_token action = partial(ComputationNode, action, **kwargs) return add_action(item, action)
Set the parse action for the given item to create a node in the computation graph.
def add_relations(self, relations): for source, destination in relations: self.add_relation(source, destination)
Add multiple relations to a bijection
def make_pilothole_cutter(self): pilothole_radius = self.pilothole_radius if pilothole_radius is None: (inner_radius, outer_radius) = self.get_radii() pilothole_radius = inner_radius + self.pilothole_ratio * (outer_radius - inner_radius) return cadquery.Workplane('XY') \ .circle(pilothole_radius) \ .extrude(self.length)
Make a solid to subtract from an interfacing solid to bore a pilot-hole.
def _get_model_without_dependencies(self, models_dict): next_model = None if not models_dict: return next_model for model, dependencies in six.iteritems(models_dict): if dependencies == []: next_model = model break if next_model is None: raise ValueError('incomplete model definitions, models in dependency ' 'list not defined: {0}'.format(models_dict)) models_dict.pop(next_model) for model, dep_list in six.iteritems(models_dict): if next_model in dep_list: dep_list.remove(next_model) return next_model
Helper function to find the next model that should be created
def object_path(self, key): return os.path.join(self.root_path, self.relative_object_path(key))
return the object path for `key`.
def ask_list(question: str, default: list = None) -> list: default_q = " [default: {0}]: ".format( ",".join(default)) if default is not None else "" answer = input("{0} [{1}]: ".format(question, default_q)) if answer == "": return default return [ans.strip() for ans in answer.split(",")]
Asks for a comma seperated list of strings
def simplify_expression(txt): minimal = re.sub(r'\s', ' ', re.sub(r'\s(?=\W)', '', re.sub(r'(?<=\W)\s', '', txt.strip()))) return re.sub(r'\)(?=\w)', ') ', re.sub(r'(,|\b(?:{}))\('.format('|'.join(RESERVED_WORDS)), '\\1 (', minimal) )
Remove all unecessary whitespace and some very usual space
def register(self, cls): doc_type = cls.search_objects.mapping.doc_type self.all_models[doc_type] = cls base_class = cls.get_base_class() if base_class not in self.families: self.families[base_class] = {} self.families[base_class][doc_type] = cls if cls.search_objects.mapping.index not in self.indexes: self.indexes[cls.search_objects.mapping.index] = [] self.indexes[cls.search_objects.mapping.index].append(cls)
Adds a new PolymorphicIndexable to the registry.
def com_google_fonts_check_varfont_weight_instances(ttFont): failed = False for instance in ttFont["fvar"].instances: if 'wght' in instance.coordinates and instance.coordinates['wght'] % 100 != 0: failed = True yield FAIL, ("Found an variable font instance with" f" 'wght'={instance.coordinates['wght']}." " This should instead be a multiple of 100.") if not failed: yield PASS, "OK"
Variable font weight coordinates must be multiples of 100.
def _get_gae_credentials(): try: import google.auth.app_engine as app_engine except ImportError: return None, None try: credentials = app_engine.Credentials() project_id = app_engine.get_project_id() return credentials, project_id except EnvironmentError: return None, None
Gets Google App Engine App Identity credentials and project ID.
def _patch(): import sys from OpenGL import GL if sys.version_info > (3,): buffersubdatafunc = GL.glBufferSubData if hasattr(buffersubdatafunc, 'wrapperFunction'): buffersubdatafunc = buffersubdatafunc.wrapperFunction _m = sys.modules[buffersubdatafunc.__module__] _m.long = int try: from OpenGL.GL.VERSION import GL_2_0 GL_2_0.GL_OBJECT_SHADER_SOURCE_LENGTH = GL_2_0.GL_SHADER_SOURCE_LENGTH except Exception: pass
Monkey-patch pyopengl to fix a bug in glBufferSubData.
def Dirname(self): result = self.Copy() while 1: last_directory = posixpath.dirname(result.last.path) if last_directory != "/" or len(result) <= 1: result.last.path = last_directory result.last.inode = None break result.Pop(-1) return result
Get a new copied object with only the directory path.
def connection_lost(self, exc=None): if self._loop.get_debug(): self.producer.logger.debug('connection lost %s', self) self.event('connection_lost').fire(exc=exc)
Fires the ``connection_lost`` event.
def task_path(cls, project, location, queue, task): return google.api_core.path_template.expand( "projects/{project}/locations/{location}/queues/{queue}/tasks/{task}", project=project, location=location, queue=queue, task=task, )
Return a fully-qualified task string.
def json_options_to_metadata(options, add_brackets=True): try: options = loads('{' + options + '}' if add_brackets else options) return options except ValueError: return {}
Read metadata from its json representation
def _get_keywords(self, location, keywords): if 'xml' in keywords: keywords.pop('xml') self.xml = True else: keywords['file_type'] = 'json' if 'id' in keywords: if location != 'series': location = location.rstrip('s') key = '%s_id' % location value = keywords.pop('id') keywords[key] = value if 'start' in keywords: time = keywords.pop('start') keywords['realtime_start'] = time if 'end' in keywords: time = keywords.pop('end') keywords['realtime_end'] = time if 'sort' in keywords: order = keywords.pop('sort') keywords['sort_order'] = order keywords['api_key'] = self.api_key return keywords
Format GET request's parameters from keywords.
def log_tag(self, tag, code, multiline=False): if self.tracing: if callable(code): code = code() tagstr = "[" + str(tag) + "]" if multiline: printerr(tagstr + "\n" + displayable(code)) else: printerr(tagstr, ascii(code))
Logs a tagged message if tracing.
def seen_tasks(self): print('\n'.join(self._stub.seen_tasks(clearly_pb2.Empty()).task_types))
Shows a list of seen task types.
def run_dot(dot): global impl if impl is None: impl = guess_impl() if impl == "dot": return run_dot_dot(dot) elif impl == "js": return run_dot_js(dot) else: raise ValueError("unknown implementation {}".format(impl))
Converts a graph in DOT format into an IPython displayable object.
def create_lambda_function(self): vpc_config = self._vpc_config() if self._check_lambda(): self.update_function_configuration(vpc_config) else: self.create_function(vpc_config) if self._check_lambda_alias(): self.update_alias() else: self.create_alias()
Create or update Lambda function.
def ignore_cec(self): return self.device is not None and \ any([fnmatch.fnmatchcase(self.device.friendly_name, pattern) for pattern in IGNORE_CEC])
Returns whether the CEC data should be ignored.
def characters(self, numberOfCharacters): return self.code[self.index:self.index + numberOfCharacters]
Returns characters at index + number of characters
def hash(mapping, bind, values): for v in values: if v is None: continue if not isinstance(v, six.string_types): v = six.text_type(v) yield sha1(v.encode('utf-8')).hexdigest()
Generate a sha1 for each of the given values.
def acquire_win(lock_file): try: fd = os.open(lock_file, OPEN_MODE) except OSError: pass else: try: msvcrt.locking(fd, msvcrt.LK_NBLCK, 1) except (IOError, OSError): os.close(fd) else: return fd
Acquire a lock file on windows.
def decode(self, dataset_split=None, decode_from_file=False, checkpoint_path=None): if decode_from_file: decoding.decode_from_file(self._estimator, self._decode_hparams.decode_from_file, self._hparams, self._decode_hparams, self._decode_hparams.decode_to_file) else: decoding.decode_from_dataset( self._estimator, self._hparams.problem.name, self._hparams, self._decode_hparams, dataset_split=dataset_split, checkpoint_path=checkpoint_path)
Decodes from dataset or file.
def packet_get_samples_per_frame(data, fs): data_pointer = ctypes.c_char_p(data) result = _packet_get_nb_frames(data_pointer, ctypes.c_int(fs)) if result < 0: raise OpusError(result) return result
Gets the number of samples per frame from an Opus packet
def __parseColumns(self): columns = [] if self.__dataItem.has_key('columnModel'): for column in self.__dataItem['columnModel']: columns.append(Column(column)) return columns
Returns the list of columns related to the dataset.
def _update(self, **kwargs): if 'content' in kwargs: content = kwargs.pop('content') path = self._construct_path_to_source_content() self._http.put(path, json.dumps({'content': content})) super(Resource, self)._update(**kwargs)
Use separate URL for updating the source file.
def search_reddit_names(self, query): data = {'query': query} results = self.request_json(self.config['search_reddit_names'], data=data) return [self.get_subreddit(name) for name in results['names']]
Return subreddits whose display name contains the query.
def _build_exclusion_list(exclude): mod_files = [] if exclude: for mod in exclude: mdir = None mod_file = None for token in mod.split("."): try: mfile, mdir, _ = imp.find_module(token, mdir and [mdir]) if mfile: mod_file = mfile.name mfile.close() except ImportError: msg = "Source for module {mod_name} could not be found" raise ValueError(msg.format(mod_name=mod)) if mod_file: mod_files.append(mod_file.replace(".pyc", ".py")) return mod_files
Build file names list of modules to exclude from exception handling.
def _log_multivariate_normal_density_tied(X, means, covars): cv = np.tile(covars, (means.shape[0], 1, 1)) return _log_multivariate_normal_density_full(X, means, cv)
Compute Gaussian log-density at X for a tied model.
def merge_conf_file(self, result, conf_file_path): "Merge a configuration in file with current configuration" conf = parse_conf_file(conf_file_path) conf_file_name = os.path.splitext(os.path.basename(conf_file_path))[0] result_part = result if not conf_file_name in File.TOP_LEVEL_CONF_FILES \ and ( not "top_level" in self._options or not self._options["top_level"]): for key_part in conf_file_name.split('.'): if not key_part in result_part: result_part[key_part] = {} result_part = result_part[key_part] return merge_conf(result_part, conf)
Merge a configuration in file with current configuration
def render(self, surf): if not self.flags & self.NO_SHADOW: circle(surf, self.center + self._bg_delta, self.width / 2, LIGHT_GREY) circle(surf, self.center + self._front_delta, self.width / 2, self._get_color()) self.text.center = self.center + self._front_delta self.text.render(surf)
Draw the button on the surface.
def ToMicroseconds(self): micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) return self.seconds * _MICROS_PER_SECOND + micros
Converts a Duration to microseconds.
def _write_critic_model_stats(self, iteration:int)->None: "Writes gradient statistics for critic to Tensorboard." critic = self.learn.gan_trainer.critic self.stats_writer.write(model=critic, iteration=iteration, tbwriter=self.tbwriter, name='crit_model_stats') self.crit_stats_updated = True
Writes gradient statistics for critic to Tensorboard.
def _resolve_child(self, path): 'Return a member generator by a dot-delimited path' obj = self for component in path.split('.'): ptr = obj if not isinstance(ptr, Permuter): raise self.MessageNotFound("Bad element path [wrong type]") found_gen = (_ for _ in ptr._generators if _.name() == component) obj = next(found_gen, None) if not obj: raise self.MessageNotFound("Path '{}' unresolved to member." .format(path)) return ptr, obj
Return a member generator by a dot-delimited path
def title(self, txt): num = len(txt) ticks = "=" * num print(ticks) print(txt) print(ticks)
Prints a title for pipelines
def create_route(self, item, routes): for route in routes: self._routes.setdefault(route, set()).add(item) return item
Stores a new item in routing map
def _host_notification(self, context, method, payload, host): LOG.debug('Notify Cisco cfg agent at %(host)s the message ' '%(method)s', {'host': host, 'method': method}) cctxt = self.client.prepare(server=host) cctxt.cast(context, method, payload=payload)
Notify the cfg agent that is handling the hosting device.
def show_analysis_dialog(self): self.analysis_dialog.update_evt_types() self.analysis_dialog.update_groups() self.analysis_dialog.update_cycles() self.analysis_dialog.show()
Create the analysis dialog.
def add_item(self, item): if not isinstance(item, JsonRpcResponse): raise TypeError( "Expected JsonRpcResponse but got {} instead".format(type(item).__name__)) self.items.append(item)
Adds an item to the batch.
def Reduce(self): if self.state != 'INITIAL' and self.state != 'BINARY': self.Error('Premature end of expression') length = len(self.stack) while length > 1: self._CombineParenthesis() self._CombineBinaryExpressions('and') self._CombineBinaryExpressions('or') self._CombineContext() if len(self.stack) == length: break length = len(self.stack) if length != 1: self.Error('Illegal query expression') return self.stack[0]
Reduce the token stack into an AST.
def _margtimephase_loglr(self, mf_snr, opt_snr): return special.logsumexp(numpy.log(special.i0(mf_snr)), b=self._deltat) - 0.5*opt_snr
Returns the log likelihood ratio marginalized over time and phase.
def transmogrify(l): d = {l[0]: {}} tmp = d for c in l: tmp[c] = {} tmp = tmp[c] return d
Fit a flat list into a treeable object.
def getTzid(tzid, smart=True): tz = __tzidMap.get(toUnicode(tzid), None) if smart and tzid and not tz: try: from pytz import timezone, UnknownTimeZoneError try: tz = timezone(tzid) registerTzid(toUnicode(tzid), tz) except UnknownTimeZoneError: pass except ImportError: pass return tz
Return the tzid if it exists, or None.
def update_deps( self ): klass = self.__class__ for name in klass._fields: self.update_deps_on_field( name ) return
Update dependencies on all the fields on this Block instance.
def CreateByteVector(self, x): self.assertNotNested() self.nested = True if not isinstance(x, compat.binary_types): raise TypeError("non-byte vector passed to CreateByteVector") self.Prep(N.UOffsetTFlags.bytewidth, len(x)*N.Uint8Flags.bytewidth) l = UOffsetTFlags.py_type(len(x)) self.head = UOffsetTFlags.py_type(self.Head() - l) self.Bytes[self.Head():self.Head()+l] = x return self.EndVector(len(x))
CreateString writes a byte vector.
def _compute_acq(self,x): means, stds = self.model.predict(x) f_acqu = 0 for m,s in zip(means, stds): f_acqu += -m + self.exploration_weight * s return f_acqu/(len(means))
Integrated GP-Lower Confidence Bound