code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def channels_set_topic(self, room_id, topic, **kwargs): return self.__call_api_post('channels.setTopic', roomId=room_id, topic=topic, kwargs=kwargs)
Sets the topic for the channel.
def tagfunc(nargs=None, ndefs=None, nouts=None): def wrapper(f): return wraps(f)(FunctionWithTag(f, nargs=nargs, nouts=nouts, ndefs=ndefs)) return wrapper
decorate of tagged function
def find_data(folder): for (path, directories, filenames) in os.walk(folder): for filename in filenames: yield os.path.join('..', path, filename)
Include everything in the folder
def _get_zone_id_from_name(self, name): results = self.client['Account'].getDomains( filter={"domains": {"name": utils.query_filter(name)}}) return [x['id'] for x in results]
Return zone ID based on a zone.
def path_dispatch_rename(rename_like_method): def _wrapper_method(self, old_path, new_path): old_path, _old_path, old_sentinel = _split_path(old_path); new_path, _new_path, new_sentinel = _split_path(new_path); if old_sentinel != new_sentinel: raise ValueError('Does not know how to move things across contents manager mountpoints') else: sentinel = new_sentinel man = self.managers.get(sentinel, None) if man is not None: rename_meth = getattr(man, rename_like_method.__name__) sub = rename_meth('/'.join(_old_path), '/'.join(_new_path)) return sub else : return rename_meth(self, old_path, new_path) return _wrapper_method
decorator for rename-like function, that need dispatch on 2 arguments
def s3(ctx, bucket_name, data_file, region): if not ctx.data_file: ctx.data_file = data_file if not ctx.bucket_name: ctx.bucket_name = bucket_name if not ctx.region: ctx.region = region ctx.type = 's3'
Use the S3 SWAG backend.
def __enableProxy(self): os.environ[ "HTTP_PROXY"] = self.httpProxy() os.environ[ "HTTPS_PROXY"] = self.httpsProxy() os.environ["REQUESTS_CA_BUNDLE"] = os.path.join( os.path.dirname( os.path.abspath(__file__)), "cert.pem")
Set the required environment variables to enable the use of hoverfly as a proxy.
def as_nick(self, name): orig = self.get_nickname() self.nick(name) try: yield orig finally: self.nick(orig)
Set the nick for the duration of the context.
def create_parser(self, prog_name, subcommand): parser = optparse.OptionParser( prog=prog_name, usage=self.usage(subcommand), version=self.get_version(), option_list=self.get_option_list()) for name, description, option_list in self.get_option_groups(): group = optparse.OptionGroup(parser, name, description); list(map(group.add_option, option_list)) parser.add_option_group(group) return parser
Customize the parser to include option groups.
def filename(self, prefix='', suffix='', extension='.py'): return BASE_NAME.format(prefix, self.num, suffix, extension)
Returns filename padded with leading zeros
def parse_value(self, value): if isinstance(value, datetime.datetime): return value if value: return parse(value) else: return None
Parse string into instance of `datetime`.
def _make_complex(self): rcomplex_coeffs = _shtools.SHrtoc(self.coeffs, convention=1, switchcs=0) complex_coeffs = _np.zeros((2, self.lmax+1, self.lmax+1), dtype='complex') complex_coeffs[0, :, :] = (rcomplex_coeffs[0, :, :] + 1j * rcomplex_coeffs[1, :, :]) complex_coeffs[1, :, :] = complex_coeffs[0, :, :].conjugate() for m in self.degrees(): if m % 2 == 1: complex_coeffs[1, :, m] = - complex_coeffs[1, :, m] return SHCoeffs.from_array(complex_coeffs, normalization=self.normalization, csphase=self.csphase, copy=False)
Convert the real SHCoeffs class to the complex class.
def present(self, path, timeout=0): ret, data = self.sendmess(MSG_PRESENCE, str2bytez(path), timeout=timeout) assert ret <= 0 and not data, (ret, data) if ret < 0: return False else: return True
returns True if there is an entity at path
def collect(self): for func in self._caches: cache = {} for key in self._caches[func]: if (time.time() - self._caches[func][key][1]) < self._timeouts[func]: cache[key] = self._caches[func][key] self._caches[func] = cache
Clear cache of results which have timed out
def rand(x_bounds, x_types): outputs = [] for i, _ in enumerate(x_bounds): if x_types[i] == "discrete_int": temp = x_bounds[i][random.randint(0, len(x_bounds[i]) - 1)] outputs.append(temp) elif x_types[i] == "range_int": temp = random.randint(x_bounds[i][0], x_bounds[i][1]) outputs.append(temp) elif x_types[i] == "range_continuous": temp = random.uniform(x_bounds[i][0], x_bounds[i][1]) outputs.append(temp) else: return None return outputs
Random generate variable value within their bounds
def add_properties_callbacks(cls): for name in cls._mutators: if not hasattr(cls, name): continue setattr(cls, name, properties_mutator(cls, name)) for name in cls._operators: if not hasattr(cls, name): continue setattr(cls, name, properties_operator(cls, name)) for name in cls._ioperators: if not hasattr(cls, name): continue setattr(cls, name, properties_mutator(cls, name, True)) return cls
Class decorator to add change notifications to builtin containers
def prepare_writeable_dir(tree): if tree != '/': tree = os.path.realpath(os.path.expanduser(tree)) if not os.path.exists(tree): try: os.makedirs(tree) except (IOError, OSError), e: exit("Could not make dir %s: %s" % (tree, e)) if not os.access(tree, os.W_OK): exit("Cannot write to path %s" % tree)
make sure a directory exists and is writeable
def style(self, id): return self._serve_file(os.path.join(media_path, 'style', id))
Serve Pylons' stock stylesheets
def whichrestype(atom): atom = atom if not isinstance(atom, Atom) else atom.OBAtom return atom.GetResidue().GetName() if atom.GetResidue() is not None else None
Returns the residue name of an Pybel or OpenBabel atom.
def evaluate(tensor: BKTensor) -> TensorLike: if isinstance(tensor, _DTYPE): if torch.numel(tensor) == 1: return tensor.item() if tensor.numel() == 2: return tensor[0].cpu().numpy() + 1.0j * tensor[1].cpu().numpy() return tensor[0].cpu().numpy() + 1.0j * tensor[1].cpu().numpy() return tensor
Return the value of a tensor
def update_transfer_encoding(self) -> None: te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower() if 'chunked' in te: if self.chunked: raise ValueError( 'chunked can not be set ' 'if "Transfer-Encoding: chunked" header is set') elif self.chunked: if hdrs.CONTENT_LENGTH in self.headers: raise ValueError( 'chunked can not be set ' 'if Content-Length header is set') self.headers[hdrs.TRANSFER_ENCODING] = 'chunked' else: if hdrs.CONTENT_LENGTH not in self.headers: self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
Analyze transfer-encoding header.
def load_metafile(filepath): try: with open(filepath, 'r', encoding='utf-8') as file: return email.message_from_file(file) except FileNotFoundError: logger.warning("Category file %s not found", filepath) orm.delete(c for c in model.Category if c.file_path == filepath) orm.commit() return None
Load a metadata file from the filesystem
def count(self): Statement = self.get_model('statement') session = self.Session() statement_count = session.query(Statement).count() session.close() return statement_count
Return the number of entries in the database.
def send_message(self, message): with self._instance_lock: if message is None: Global.LOGGER.error("can't deliver a null messages") return if message.sender is None: Global.LOGGER.error(f"can't deliver anonymous messages with body {message.body}") return if message.receiver is None: Global.LOGGER.error( f"can't deliver message from {message.sender}: recipient not specified") return if message.message is None: Global.LOGGER.error(f"can't deliver message with no body from {message.sender}") return sender = "*" + message.sender + "*" self.socket.send_multipart( [bytes(sender, 'utf-8'), pickle.dumps(message)]) if Global.CONFIG_MANAGER.tracing_mode: Global.LOGGER.debug("dispatched : " + message.sender + "-" + message.message + "-" + message.receiver) self.dispatched = self.dispatched + 1
Dispatch a message using 0mq
def run_mace_smothr(x, y, bass_enhancement=0.0): N = len(x) weight = numpy.ones(N) results = numpy.zeros(N) flags = numpy.zeros((N, 7)) mace.smothr(1, x, y, weight, results, flags) return results
Run the FORTRAN SMOTHR.
def _set_bookmarks(self, bookmarks): storage = bookmark_xso.Storage() storage.bookmarks[:] = bookmarks yield from self._private_xml.set_private_xml(storage)
Set the bookmarks stored on the server.
def _normWidth(self, w, maxw): if type(w) == type(""): w = ((maxw / 100.0) * float(w[: - 1])) elif (w is None) or (w == "*"): w = maxw return min(w, maxw)
Helper for calculating percentages
def _serie_format(self, serie, value): kwargs = {'chart': self, 'serie': serie, 'index': None} formatter = (serie.formatter or self.formatter or self._value_format) kwargs = filter_kwargs(formatter, kwargs) return formatter(value, **kwargs)
Format an independent value for the serie
def start_fitting(self): self.queue = queue.Queue() self.peak_vals = [] self.fit_thread = QThread() self.fitobj = self.do_fit(str(self.data_filepath.text()), self.matplotlibwidget, self.queue, self.peak_vals, self.peak_locs) self.fitobj.moveToThread(self.fit_thread) self.fit_thread.started.connect(self.fitobj.run) self.fitobj.finished.connect(self.fit_thread.quit) self.fitobj.status.connect(self.update_status) self.fit_thread.start()
Launches the fitting routine on another thread
def _datalog(self, parameter, run, maxrun, det_id): "Extract data from database" values = { 'parameter_name': parameter, 'minrun': run, 'maxrun': maxrun, 'detid': det_id, } data = urlencode(values) content = self._get_content('streamds/datalognumbers.txt?' + data) if content.startswith('ERROR'): log.error(content) return None try: dataframe = read_csv(content) except ValueError: log.warning( "Empty dataset" ) return make_empty_dataset() else: add_datetime(dataframe) try: self._add_converted_units(dataframe, parameter) except KeyError: log.warning( "Could not add converted units for {0}".format(parameter) ) return dataframe
Extract data from database
def foreachRDD(self, func): if func.__code__.co_argcount == 1: old_func = func func = lambda t, rdd: old_func(rdd) jfunc = TransformFunction(self._sc, func, self._jrdd_deserializer) api = self._ssc._jvm.PythonDStream api.callForeachRDD(self._jdstream, jfunc)
Apply a function to each RDD in this DStream.
def valid_loc(self,F=None): if F is not None: return [i for i,f in enumerate(F) if np.all(f < self.max_fit) and np.all(f >= 0)] else: return [i for i,f in enumerate(self.F) if np.all(f < self.max_fit) and np.all(f >= 0)]
returns the indices of individuals with valid fitness.
def get(self, varname, idx=0, units=None): if not varname in self.mapping.vars: raise fgFDMError('Unknown variable %s' % varname) if idx >= self.mapping.vars[varname].arraylength: raise fgFDMError('index of %s beyond end of array idx=%u arraylength=%u' % ( varname, idx, self.mapping.vars[varname].arraylength)) value = self.values[self.mapping.vars[varname].index + idx] if units: value = self.convert(value, self.mapping.vars[varname].units, units) return value
get a variable value
def extract_archive(archive_path, destination_path): tar = tarfile.open(archive_path) tar.errorlevel = 1 tar.extractall(destination_path)
Extracts an archive somewhere on the filesystem.
def removeAllAltWCS(hdulist,extlist): original_logging_level = log.level log.setLevel(logutil.logging.WARNING) try: hdr = hdulist[extlist[0]].header wkeys = altwcs.wcskeys(hdr) if ' ' in wkeys: wkeys.remove(' ') for extn in extlist: for wkey in wkeys: if wkey == 'O': continue altwcs.deleteWCS(hdulist,extn,wkey) hwcs = readAltWCS(hdulist,extn,wcskey='O') if hwcs is None: continue for k in hwcs.keys(): if k not in ['DATE-OBS','MJD-OBS'] and k in hdr: try: del hdr[k] except KeyError: pass except: raise finally: log.setLevel(original_logging_level)
Removes all alternate WCS solutions from the header
def _compile_int_g(self): string = ' ' self.int_g = compile(eval(string), '', 'exec')
Time Domain Simulation - update algebraic equations and Jacobian
def checksum_bytes(data): int_values = [int(x, 16) for x in data] int_xor = reduce(lambda x, y: x ^ y, int_values) hex_xor = "{:X}".format(int_xor) if len(hex_xor) % 2 != 0: hex_xor = "0" + hex_xor return str.encode(hex_xor)
Returns a XOR of all the bytes specified inside of the given list
def _convert_value(val): def _is_number(x, op): try: op(x) return True except ValueError: return False if isinstance(val, (list, tuple)): return [_convert_value(x) for x in val] elif val is None: return val elif _is_number(val, int): return int(val) elif _is_number(val, float): return float(val) elif val.find(";;") >= 0: return [_convert_value(v) for v in val.split(";;")] elif val.startswith(("{", "[")): return json.loads(val.replace("u'", "'").replace("'", '"')) elif val.lower() == "true": return True elif val.lower() == "false": return False else: return val
Handle multiple input type values.
def pause(self): for child in chain(self.consumers.values(), self.workers): child.pause() for child in chain(self.consumers.values(), self.workers): child.paused_event.wait()
Pauses all the worker threads.
def route(app_or_blueprint, context=default_context, **kwargs): def decorator(fn): fn = describe(**kwargs)(fn) transmute_func = TransmuteFunction(fn) routes, handler = create_routes_and_handler(transmute_func, context) for r in routes: if not hasattr(app_or_blueprint, SWAGGER_ATTR_NAME): setattr(app_or_blueprint, SWAGGER_ATTR_NAME, SwaggerSpec()) swagger_obj = getattr(app_or_blueprint, SWAGGER_ATTR_NAME) swagger_obj.add_func(transmute_func, context) app_or_blueprint.route(r, methods=transmute_func.methods)(handler) return handler return decorator
attach a transmute route.
def _lits(lexer, varname, nvars): tok = _expect_token(lexer, {OP_not, IntegerToken}) if isinstance(tok, IntegerToken) and tok.value == 0: return tuple() else: if isinstance(tok, OP_not): neg = True tok = _expect_token(lexer, {IntegerToken}) else: neg = False index = tok.value if index > nvars: fstr = "formula literal {} is greater than {}" raise Error(fstr.format(index, nvars)) lit = ('var', (varname, ), (index, )) if neg: lit = ('not', lit) return (lit, ) + _lits(lexer, varname, nvars)
Return a tuple of DIMACS CNF clause literals.
def im_files(self, room_id=None, user_name=None, **kwargs): if room_id: return self.__call_api_get('im.files', roomId=room_id, kwargs=kwargs) elif user_name: return self.__call_api_get('im.files', username=user_name, kwargs=kwargs) else: raise RocketMissingParamException('roomId or username required')
Retrieves the files from a direct message.
def hydrated_targets(build_file_addresses): targets = yield [Get(HydratedTarget, Address, a) for a in build_file_addresses.addresses] yield HydratedTargets(targets)
Requests HydratedTarget instances for BuildFileAddresses.
def _get_checker(self, user): user_perm_checkers_cache_key = user.id if not user.is_anonymous else 'anonymous' if user_perm_checkers_cache_key in self._user_perm_checkers_cache: return self._user_perm_checkers_cache[user_perm_checkers_cache_key] checker = ForumPermissionChecker(user) self._user_perm_checkers_cache[user_perm_checkers_cache_key] = checker return checker
Return a ForumPermissionChecker instance for the given user.
def fit(self): n,p = np.shape(self._X) self._df = float(n - p) self._Cx = np.linalg.pinv(np.dot(self._X.T,self._X)) self._Bhat = np.dot(np.dot(self._Cx, self._X.T), self._A) self._Y_rec = self._compute_prediction(self._X)
fit waveforms in any domain
def notify(self, new_jobs_count): if not self.use_notify(): return count = min(new_jobs_count, 100) notify_key = redis_key("notify", self) context.connections.redis.lpush(notify_key, *([1] * count)) context.connections.redis.expire(notify_key, max(1, int(context.get_current_config()["max_latency"] * 2)))
We just queued new_jobs_count jobs on this queue, wake up the workers if needed
def _load_rules(self): with self._sftp_connection.open(self.RULE_PATH) as file: data = file.read() lines = ( line.strip() for line in data.split('\n') ) rule_strings = ( line for line in lines if len(line) > 0 ) rules = ( Rule.parse(rule_string) for rule_string in rule_strings ) self._rules = [ rule for rule in rules if rule is not None ]
Loads the rules from the SSH-Connection
def _flag_transform(flags): flags = (flags & FLAG_MASK) | _wcparse.PATHNAME if flags & _wcparse.REALPATH and util.platform() == "windows": flags |= _wcparse._FORCEWIN if flags & _wcparse.FORCECASE: flags ^= _wcparse.FORCECASE return flags
Transform flags to glob defaults.
def edit_event_view(request, event_pk): page_name = "Edit Event" profile = UserProfile.objects.get(user=request.user) event = get_object_or_404(Event, pk=event_pk) if event.owner != profile and not request.user.is_superuser: return HttpResponseRedirect( reverse('events:view', kwargs={"event_pk": event_pk}), ) event_form = EventForm( request.POST or None, profile=profile, instance=event, ) if event_form.is_valid(): event = event_form.save() messages.add_message( request, messages.SUCCESS, MESSAGES['EVENT_UPDATED'].format(event=event.title), ) return HttpResponseRedirect( reverse('events:view', kwargs={"event_pk": event_pk}), ) return render_to_response('edit_event.html', { 'page_name': page_name, 'event_form': event_form, }, context_instance=RequestContext(request))
The view to edit an event.
def _run_async_task(task=None, session=None): if task is None or session is None: return None task_name = session.xenapi.task.get_name_label(task) log.debug('Running %s', task_name) while session.xenapi.task.get_status(task) == 'pending': progress = round(session.xenapi.task.get_progress(task), 2) * 100 log.debug('Task progress %.2f%%', progress) time.sleep(1) log.debug('Cleaning up task %s', task_name) session.xenapi.task.destroy(task)
Run XenAPI task in asynchronous mode to prevent timeouts
def stage(self): if 'PYPI_USER' not in os.environ or 'PYPI_PASS' not in os.environ: raise BuildError("You must set the PYPI_USER and PYPI_PASS environment variables") try: import twine except ImportError: raise BuildError("You must install twine in order to release python packages", suggestion="pip install twine") if not self.component.has_wheel: raise BuildError("You can't release a component to a PYPI repository if it doesn't have python packages") wheel = self.component.support_wheel sdist = "%s-%s.tar.gz" % (self.component.support_distribution, self.component.parsed_version.pep440_string()) wheel_path = os.path.realpath(os.path.abspath(os.path.join(self.component.output_folder, 'python', wheel))) sdist_path = os.path.realpath(os.path.abspath(os.path.join(self.component.output_folder, 'python', sdist))) if not os.path.isfile(wheel_path) or not os.path.isfile(sdist_path): raise BuildError("Could not find built wheel or sdist matching current built version", sdist_path=sdist_path, wheel_path=wheel_path) self.dists = [sdist_path, wheel_path]
Stage python packages for release, verifying everything we can about them.
def _scheduled_check_for_summaries(self): if self._analysis_process is None: return timed_out = time.time() - self._analyze_start_time > self.time_limit if timed_out: self._handle_results('Analysis timed out but managed\n' ' to get lower turn results.', 'Analysis timed out with no results.') return try: self._analysis_process.join(0.001) except AssertionError: pass if not self._analysis_process.is_alive(): self._handle_results('Completed analysis.', 'Unable to find the game on screen.') return self._base.after(self._POLL_PERIOD_MILLISECONDS, self._scheduled_check_for_summaries)
Present the results if they have become available or timed out.
def discount_rewards(r): discounted_r = np.zeros_like(r) running_add = 0 for t in reversed(range(0, r.size)): if r[t] != 0: running_add = 0 running_add = running_add * gamma + r[t] discounted_r[t] = running_add return discounted_r
take 1D float array of rewards and compute discounted reward
def measure(self, vid): from ambry.orm import Column if isinstance(vid, PartitionColumn): return vid elif isinstance(vid, Column): return PartitionColumn(vid) else: return PartitionColumn(self.table.column(vid), self)
Return a measure, given its vid or another reference
def check_format(actual_file, input_formats, ontology ): for afile in aslist(actual_file): if not afile: continue if "format" not in afile: raise validate.ValidationException( u"File has no 'format' defined: {}".format( json_dumps(afile, indent=4))) for inpf in aslist(input_formats): if afile["format"] == inpf or \ formatSubclassOf(afile["format"], inpf, ontology, set()): return raise validate.ValidationException( u"File has an incompatible format: {}".format( json_dumps(afile, indent=4)))
Confirms that the format present is valid for the allowed formats.
def from_dict(cls, d): kwargs = {} for key, val in d.iteritems(): if key.startswith('display'): continue if key.startswith('@'): key = key[1:] if key == 'type': key = 'type_' elif key == 'valid_since': val = str_to_datetime(val) elif key == 'date_range': val = DateRange.from_dict(val) kwargs[key.encode('ascii')] = val return cls(**kwargs)
Transform the dict to a field object and return the field.
def write_ndef(self, ndef, slot=1): if not self.capabilities.have_nfc_ndef(slot): raise yubikey_base.YubiKeyVersionError("NDEF slot %i unsupported in %s" % (slot, self)) return self._device._write_config(ndef, _NDEF_SLOTS[slot])
Write an NDEF tag configuration to the YubiKey NEO.
def save_encoder(self, name:str): "Save the encoder to `name` inside the model directory." encoder = get_model(self.model)[0] if hasattr(encoder, 'module'): encoder = encoder.module torch.save(encoder.state_dict(), self.path/self.model_dir/f'{name}.pth')
Save the encoder to `name` inside the model directory.
async def on_raw_731(self, message): for nick in message.params[1].split(','): self._destroy_user(nick, monitor_override=True) await self.on_user_offline(nickname)
Someone we are monitoring got offline.
def simplegeneric(func): registry = {} def wrapper(*args, **kw): ob = args[0] try: cls = ob.__class__ except AttributeError: cls = type(ob) try: mro = cls.__mro__ except AttributeError: try: class cls(cls, object): pass mro = cls.__mro__[1:] except TypeError: mro = object, for t in mro: if t in registry: return registry[t](*args, **kw) else: return func(*args, **kw) try: wrapper.__name__ = func.__name__ except (TypeError, AttributeError): pass def register(typ, func=None): if func is None: return lambda f: register(typ, f) registry[typ] = func return func wrapper.__dict__ = func.__dict__ wrapper.__doc__ = func.__doc__ wrapper.register = register return wrapper
Make a trivial single-dispatch generic function
def materialize_as_ndarray(a): if type(a) in (list, tuple): if da is not None and any(isinstance(arr, da.Array) for arr in a): return da.compute(*a, sync=True) return tuple(np.asarray(arr) for arr in a) return np.asarray(a)
Convert distributed arrays to ndarrays.
def allocateFile(self, sharename, peer): peerDir = self.basepath.child(str(peer)) if not peerDir.isdir(): peerDir.makedirs() return (peerDir.child(sharename+'.incomplete'), peerDir.child(sharename))
return a 2-tuple of incompletePath, fullPath
def displayEmptyInputWarningBox(display=True, parent=None): if sys.version_info[0] >= 3: from tkinter.messagebox import showwarning else: from tkMessageBox import showwarning if display: msg = 'No valid input files found! '+\ 'Please check the value for the "input" parameter.' showwarning(parent=parent,message=msg, title="No valid inputs!") return "yes"
Displays a warning box for the 'input' parameter.
def fetch_string(self, context, name): val = self.resolve_context(context, name) if callable(val): return self._render_value(val(), context) if not is_string(val): return self.to_str(val) return val
Get a value from the given context as a basestring instance.
def wrap_exception(func: Callable) -> Callable: def _func_wrapper(*args, **kwargs): try: return func(*args, **kwargs) except IOError as exception: raise BluetoothBackendException() from exception return _func_wrapper
Wrap all IOErrors to BluetoothBackendException
def tasks(self): tasks = set() for ctrl in self.controllers.values(): tasks.update(ctrl.tasks) return tasks
Tasks in this exact group
def isBirthday(self): if not self.birthday: return False birthday = self.birthdate() today = date.today() return (birthday.month == today.month and birthday.day == today.day)
Is it the user's birthday today?
def eval_stdin(): 'evaluate expressions read from stdin' cmd = ['plash', 'eval'] p = subprocess.Popen(cmd, stdin=sys.stdin, stdout=sys.stdout) exit = p.wait() if exit: raise subprocess.CalledProcessError(exit, cmd)
evaluate expressions read from stdin
def acquisition_function_withGradients(self, x): aqu_x = self.acquisition_function(x) aqu_x_grad = self.d_acquisition_function(x) return aqu_x, aqu_x_grad
Returns the acquisition function and its its gradient at x.
def _has_data(self): return any([ len([ v for a in (s[0] if is_list_like(s) else [s]) for v in (a if is_list_like(a) else [a]) if v is not None ]) for s in self.raw_series ])
Check if there is any data
def failed_update(self, exception): f = None with self._lock: if self._future: f = self._future self._future = None if f: f.failure(exception) self._last_refresh_ms = time.time() * 1000
Update cluster state given a failed MetadataRequest.
def values(self): "Returns all values this object can return via keys." return tuple(set(self.new.values()).union(self.old.values()))
Returns all values this object can return via keys.
def idatdecomp(self, lenient=False, max_length=0): d = zlib.decompressobj() for data in self.idat(lenient): yield bytearray(d.decompress(data)) yield bytearray(d.flush())
Iterator that yields decompressed ``IDAT`` strings.
def check_version(self, name, majorv=2, minorv=7): if sys.version_info.major == majorv and sys.version_info.minor != minorv: sys.stderr.write("ERROR: %s is only for >= Python %d.%d but you are running %d.%d\n" %\ (name, majorv, minorv, sys.version_info.major, sys.version_info.minor)) sys.exit(1)
Make sure the package runs on the supported Python version
def date_totals(entries, by): date_dict = {} for date, date_entries in groupby(entries, lambda x: x['date']): if isinstance(date, datetime.datetime): date = date.date() d_entries = list(date_entries) if by == 'user': name = ' '.join((d_entries[0]['user__first_name'], d_entries[0]['user__last_name'])) elif by == 'project': name = d_entries[0]['project__name'] else: name = d_entries[0][by] pk = d_entries[0][by] hours = get_hours_summary(d_entries) date_dict[date] = hours return name, pk, date_dict
Yield a user's name and a dictionary of their hours
def _get_bokeh_html(self, chart_obj): global bokeh_renderer try: renderer = bokeh_renderer p = renderer.get_plot(chart_obj).state script, div = components(p) return script + "\n" + div except Exception as e: self.err(e, self._get_bokeh_html, "Can not get html from the Bokeh rendering engine")
Get the html for a Bokeh chart
def close(self): if self.auto_file_prompt and self.prompt_quiet_changed is True: self.device.send_config_set(["no file prompt quiet"]) self.prompt_quiet_changed = False self.prompt_quiet_configured = False self._netmiko_close()
Close the connection to the device and do the necessary cleanup.
def template_for_action(self, action): return "%s/%s_%s.html" % (self.module_name.lower(), self.model_name.lower(), action)
Returns the template to use for the passed in action
def run_tile(job_ini, sites_slice): return run_job(job_ini, sites_slice=(sites_slice.start, sites_slice.stop))
Used in tiling calculations
def _collect_values(handlers, names, user, client, values): results = {} def visitor(claim_name, func): data = {'user': user, 'client': client} data.update(values.get(claim_name) or {}) claim_value = func(data) if claim_value is not None: results[claim_name] = claim_value _visit_handlers(handlers, visitor, 'claim', names) return results
Get the values from the handlers of the requested claims.
def scan(self, ids=range(254)): return [id for id in ids if self.ping(id)]
Pings all ids within the specified list, by default it finds all the motors connected to the bus.
def write_urls_index(app, exc): inventory = os.path.join(app.builder.outdir, 'objects.inv') objects = sphinx.ext.intersphinx.fetch_inventory(app, DOCS_URL, inventory) with open(os.path.join(app.builder.outdir, 'shorturls.json'), 'w') as f: json.dump(objects, f)
Generate a JSON file to serve as an index for short-URL lookups
def _property_detect_type(name, values): value_type = 'str' if values.startswith('on | off'): value_type = 'bool' elif values.startswith('yes | no'): value_type = 'bool_alt' elif values in ['<size>', '<size> | none']: value_type = 'size' elif values in ['<count>', '<count> | none', '<guid>']: value_type = 'numeric' elif name in ['sharenfs', 'sharesmb', 'canmount']: value_type = 'bool' elif name in ['version', 'copies']: value_type = 'numeric' return value_type
Detect the datatype of a property
def scrape(language, method, word, *args, **kwargs): scraper = Scrape(language, word) if hasattr(scraper, method): function = getattr(scraper, method) if callable(function): return function(*args, **kwargs) else: raise NotImplementedError('The method ' + method + '() is not implemented so far.')
Uses custom scrapers and calls provided method.
def _compress_for_consolidate(max_vol, plan, **kwargs): target = None new_target = None d_vol = 0 temp_aspirates = [] new_transfer_plan = [] def _append_aspirates(): nonlocal d_vol, temp_aspirates, new_transfer_plan, target if not temp_aspirates: return for a in temp_aspirates: new_transfer_plan.append({ 'aspirate': { 'location': a['location'], 'volume': a['volume'] } }) new_transfer_plan.append({ 'dispense': { 'location': target, 'volume': d_vol } }) d_vol = 0 temp_aspirates = [] for i, p in enumerate(plan): this_vol = p['aspirate']['volume'] new_target = p['dispense']['location'] if (new_target is not target) or (this_vol + d_vol > max_vol): _append_aspirates() target = new_target d_vol += this_vol temp_aspirates.append(p['aspirate']) _append_aspirates() return new_transfer_plan
Combines as many aspirates as can fit within the maximum volume
def get(self,oid): try: self.lock.acquire() if oid not in self.data: return "NONE" else: return self.base_oid + oid + '\n' + self.data[oid]['type'] + '\n' + str(self.data[oid]['value']) finally: self.lock.release()
Return snmp value for the given OID.
def _name_matches(name, matches): for m in matches: if name.endswith(m): return True if name.lower().endswith('_' + m.lower()): return True if name.lower() == m.lower(): return True return False
Helper function to see if given name has any of the patterns in given matches
def superuser_required(f): @functools.wraps(f) @login_required def wrapped(*args, **kwargs): if not (current_user.is_authenticated() and current_user.superuser): abort(403) return f(*args, **kwargs) return wrapped
Requires the requestor to be a super user.
def safeDbUrl(db_url): url = urlparse(db_url) return db_url.replace(url.password, "****") if url.password else db_url
Obfuscates password from a database URL.
def join(self, timeout=None): if not self._ioloop_managed: return try: self._ioloop_thread.join(timeout) except AttributeError: raise RuntimeError('Cannot join if not started')
Join managed ioloop thread, or do nothing if not managed.
def rgfromid(idstr): rgidx = idstr.find('resourceGroups/') providx = idstr.find('/providers/') return idstr[rgidx + 15:providx]
get resource group name from the id string
def setup(app): app.add_config_value('jsdoc_source_root', '..', 'env') app.add_config_value('jsdoc_output_root', 'javascript', 'env') app.add_config_value('jsdoc_exclude', [], 'env') app.connect('builder-inited', generate_docs)
Sphinx extension entry point
def visit_Tuple(self, node: ast.Tuple) -> Tuple[Any, ...]: if isinstance(node.ctx, ast.Store): raise NotImplementedError("Can not compute the value of a Store on a tuple") result = tuple(self.visit(node=elt) for elt in node.elts) self.recomputed_values[node] = result return result
Visit the elements and assemble the results into a tuple.
def trigger(self, event, *args): for handler in self._event_handlers[event]: handler(*args)
Trigger event by name.
def session_token(self): session_token = None if self.user_id is not None: session_token = token_getter(self.remote) if session_token: token = RemoteToken.get( self.user_id, self.remote.consumer_key, access_token=session_token[0] ) return token return None
Return OAuth session token.
def determine_ip_address(): addrs = [ x.address for k, v in psutil.net_if_addrs().items() if k[0] == "e" for x in v if x.family == AddressFamily.AF_INET ] return addrs[0]
Return the first IP address for an ethernet interface on the system.
def get(self, arg): try: reservations = self.get_all_instances(filters={'tag:Name': [arg]}) instance = reservations[0].instances[0] except IndexError: try: instance = self.get_all_instances([arg])[0].instances[0] except (_ResponseError, IndexError): err = "Can't find any instance with name or ID '%s'" % arg raise ValueError(err) return instance
Return instance object with given EC2 ID or nametag.
def transformer_librispeech_tpu_v2(): hparams = transformer_librispeech_v2() update_hparams_for_tpu(hparams) hparams.batch_size = 16 librispeech.set_librispeech_length_hparams(hparams) return hparams
HParams for training ASR model on Librispeech on TPU v2.
def _gzip(self, response): bytesio = six.BytesIO() with gzip.GzipFile(fileobj=bytesio, mode='w') as gz: gz.write(response) return bytesio.getvalue()
Apply gzip compression to a response.
def tfclasses(): classes = {} mydir = op.dirname(op.abspath(inspect.getfile(get_mimetype))) tfcls = {"<class 'aston.tracefile.TraceFile'>", "<class 'aston.tracefile.ScanListFile'>"} for filename in glob(op.join(mydir, '*.py')): name = op.splitext(op.basename(filename))[0] module = import_module('aston.tracefile.' + name) for clsname in dir(module): cls = getattr(module, clsname) if hasattr(cls, '__base__'): if str(cls.__base__) in tfcls: classes[cls.mime] = cls return classes
A mapping of mimetypes to every class for reading data files.