code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def make_speaker_utters(utterances: List[Utterance]) -> Dict[str, List[Utterance]]: speaker_utters = defaultdict(list) for utter in utterances: speaker_utters[utter.speaker].append(utter) return speaker_utters
Creates a dictionary mapping from speakers to their utterances.
def on_batch_begin(self, train, **kwargs): "Multiply the current lr if necessary." if not self.learn.gan_trainer.gen_mode and train: self.learn.opt.lr *= self.mult_lr
Multiply the current lr if necessary.
def puts(s='', newline=True, stream=STDOUT): max_width_ctx = _get_max_width_context() if max_width_ctx: cols, separator = max_width_ctx[-1] s = max_width(s, cols, separator) if newline: s = tsplit(s, NEWLINES) s = map(str, s) indent = ''.join(INDENT_STRINGS) s = (str('\n' + indent)).join(s) _str = ''.join(( ''.join(INDENT_STRINGS), str(s), '\n' if newline else '' )) stream(_str)
Prints given string to stdout.
def _stdio_stream_as(src_fd, dst_fd, dst_sys_attribute, mode): if src_fd == -1: src = open('/dev/null', mode) src_fd = src.fileno() old_dst = getattr(sys, dst_sys_attribute) old_dst_fd = os.dup(dst_fd) if src_fd != dst_fd: os.dup2(src_fd, dst_fd) new_dst = os.fdopen(dst_fd, mode) setattr(sys, dst_sys_attribute, new_dst) try: yield finally: new_dst.close() os.dup2(old_dst_fd, dst_fd) setattr(sys, dst_sys_attribute, old_dst)
Replace the given dst_fd and attribute on `sys` with an open handle to the given src_fd.
def send(self, msg): if self._conn: self._conn.write_data(msg.message, msg.response_command)
Send a message to Elk panel.
def apply_settings(self): qapp = QApplication.instance() if is_gtk_desktop() and ('GTK+' in QStyleFactory.keys()): try: qapp.setStyle('gtk+') except: pass else: style_name = CONF.get('appearance', 'windows_style', self.default_style) style = QStyleFactory.create(style_name) if style is not None: style.setProperty('name', style_name) qapp.setStyle(style) default = self.DOCKOPTIONS if CONF.get('main', 'vertical_tabs'): default = default|QMainWindow.VerticalTabs if CONF.get('main', 'animated_docks'): default = default|QMainWindow.AnimatedDocks self.setDockOptions(default) self.apply_panes_settings() self.apply_statusbar_settings() if CONF.get('main', 'use_custom_cursor_blinking'): qapp.setCursorFlashTime(CONF.get('main', 'custom_cursor_blinking')) else: qapp.setCursorFlashTime(self.CURSORBLINK_OSDEFAULT)
Apply settings changed in 'Preferences' dialog box
def map(self, mapper: Callable[[Any], Any]) -> 'List': try: ret = List.from_iterable([mapper(x) for x in self]) except TypeError: ret = List.from_iterable([partial(mapper, x) for x in self]) return ret
Map a function over a List.
def cleanup(arg): arg = numpy.asarray(arg) if len(arg.shape) <= 1: arg = arg.reshape(arg.size, 1) elif len(arg.shape) > 2: raise ValueError("shapes must be smaller than 3") return arg
Clean up the input variable.
def null_term(str): if sys.version_info.major < 3: return str if isinstance(str, bytes): str = str.decode("utf-8") idx = str.find("\0") if idx != -1: str = str[:idx] return str
null terminate a string for py3
def select_scheme(self, name): scheme = INSTALL_SCHEMES[name] for key in SCHEME_KEYS: attrname = 'install_' + key if getattr(self, attrname) is None: setattr(self, attrname, scheme[key])
Sets the install directories by applying the install schemes.
def which_pip(allow_global=False): location = None if "VIRTUAL_ENV" in os.environ: location = os.environ["VIRTUAL_ENV"] if allow_global: if location: pip = which("pip", location=location) if pip: return pip for p in ("pip", "pip3", "pip2"): where = system_which(p) if where: return where pip = which("pip") if not pip: pip = fallback_which("pip", allow_global=allow_global, location=location) return pip
Returns the location of virtualenv-installed pip.
def slot_definition_for(self, slotname: SlotDefinitionName, cls: ClassDefinition) -> Optional[SlotDefinition]: if cls.is_a: for sn in self.schema.classes[cls.is_a].slots: slot = self.schema.slots[sn] if slot.alias and slotname == slot.alias or slotname == slot.name: return slot for mixin in cls.mixins: for sn in self.schema.classes[mixin].slots: slot = self.schema.slots[sn] if slot.alias and slotname == slot.alias or slotname == slot.name: return slot if cls.is_a: defn = self.slot_definition_for(slotname, self.schema.classes[cls.is_a]) if defn: return defn for mixin in cls.mixins: defn = self.slot_definition_for(slotname, self.schema.classes[mixin]) if defn: return defn return None
Find the most proximal definition for slotname in the context of cls
def next_blob(self): try: length = struct.unpack('<i', self.blob_file.read(4))[0] except struct.error: raise StopIteration header = CLBHeader(file_obj=self.blob_file) blob = {'CLBHeader': header} remaining_length = length - header.size pmt_data = [] pmt_raw_data = self.blob_file.read(remaining_length) pmt_raw_data_io = BytesIO(pmt_raw_data) for _ in range(int(remaining_length / 6)): channel_id, time, tot = struct.unpack( '>cic', pmt_raw_data_io.read(6) ) pmt_data.append(PMTData(ord(channel_id), time, ord(tot))) blob['PMTData'] = pmt_data blob['PMTRawData'] = pmt_raw_data return blob
Generate next blob in file
def clear_feature_symlinks(self, feature_name): logger.debug("Clearing feature symlinks for %s" % feature_name) feature_path = self.install_directory(feature_name) for d in ('bin', 'lib'): if os.path.exists(os.path.join(self.root_dir, d)): for link in os.listdir(os.path.join(self.root_dir, d)): path = os.path.join(self.root_dir, d, link) if feature_path in os.path.realpath(path): getattr(self, 'remove_from_%s' % d)(link)
Clear the symlinks for a feature in the symlinked path
async def set_password(self, password): await self.controller.change_user_password(self.username, password) self._user_info.password = password
Update this user's password.
def parse_help_text(self, file_path): with open(file_path) as f: lines = f.readlines() cmds = [] descs = [] for line in lines: line = line.strip() if len(line) == 0: cmds.append('') descs.append('') else: tokens = line.split(' ') cmds.append(tokens[0]) descs.append(''.join(tokens[1:]).strip()) max_len = len(max(cmds, key=len)) text = '' for cmd, desc in zip(cmds, descs): if len(cmd) == 0: text += '\n' else: text += self.style.help(cmd.ljust(max_len + 2), desc + '\n') return cmds, text
Load of list of commands and descriptions from a file.
def _volume_get(self, volume_id): if self.volume_conn is None: raise SaltCloudSystemExit('No cinder endpoint available') nt_ks = self.volume_conn volume = nt_ks.volumes.get(volume_id) response = {'name': volume.display_name, 'size': volume.size, 'id': volume.id, 'description': volume.display_description, 'attachments': volume.attachments, 'status': volume.status } return response
Organize information about a volume from the volume_id
def process_delivery(message, notification): mail = message['mail'] delivery = message['delivery'] if 'timestamp' in delivery: delivered_datetime = clean_time(delivery['timestamp']) else: delivered_datetime = None deliveries = [] for eachrecipient in delivery['recipients']: deliveries += [Delivery.objects.create( sns_topic=notification['TopicArn'], sns_messageid=notification['MessageId'], mail_timestamp=clean_time(mail['timestamp']), mail_id=mail['messageId'], mail_from=mail['source'], address=eachrecipient, delivered_time=delivered_datetime, processing_time=int(delivery['processingTimeMillis']), smtp_response=delivery['smtpResponse'] )] for eachdelivery in deliveries: signals.feedback.send( sender=Delivery, instance=eachdelivery, message=message, notification=notification ) logger.info('Logged %s Deliveries(s)', str(len(deliveries))) return HttpResponse('Delivery Processed')
Function to process a delivery notification
def encodeDeltas(self, dx,dy): dxe = self.dxEncoder.encode(dx) dye = self.dyEncoder.encode(dy) ex = numpy.outer(dxe,dye) return ex.flatten().nonzero()[0]
Return the SDR for dx,dy
def accel_calibration(self): self.calibration_state = self.CAL_ACC self.acc_dialog = SK8AccDialog(self.sk8.get_imu(self.spinIMU.value()), self) if self.acc_dialog.exec_() == QDialog.Rejected: return self.calculate_acc_calibration(self.acc_dialog.samples)
Perform accelerometer calibration for current IMU.
async def connect(self): tcp_client = TCPTransport(self.frame_received_cb, self.connection_closed_cb) self.transport, _ = await self.loop.create_connection( lambda: tcp_client, host=self.config.host, port=self.config.port, ssl=self.create_ssl_context()) self.connected = True
Connect to gateway via SSL.
def on_channel_click(self, channel_id=None, key=None, button=None): channels = self.channel_ids if channels is None: return if len(channels) == 1: self.on_select() return assert len(channels) >= 2 d = 0 if button == 1 else 1 old = channels[d] if channel_id == old: return channels[d] = channel_id if channels[1 - d] == channel_id: channels[1 - d] = old assert channels[0] != channels[1] self.channel_ids = _uniq(channels) logger.debug("Choose channels %d and %d in feature view.", *channels[:2]) self.on_select(fixed_channels=True)
Respond to the click on a channel.
def unzip_file(self, zip_path, output_path): with zipfile.ZipFile(zip_path, 'r') as z: z.extractall(output_path)
Unzip a local file into a specified directory.
def setColorRamp(self, colorRamp=None): if not colorRamp: self._colorRamp = RasterConverter.setDefaultColorRamp(ColorRampEnum.COLOR_RAMP_HUE) else: self._colorRamp = colorRamp
Set the color ramp of the raster converter instance
def expose_ideal_query_execution_start_points(compound_match_query, location_types, coerced_locations): new_queries = [] for match_query in compound_match_query.match_queries: location_classification = _classify_query_locations(match_query) preferred_locations, eligible_locations, _ = location_classification if preferred_locations: new_query = _expose_only_preferred_locations( match_query, location_types, coerced_locations, preferred_locations, eligible_locations) elif eligible_locations: new_query = _expose_all_eligible_locations( match_query, location_types, eligible_locations) else: raise AssertionError(u'This query has no preferred or eligible query start locations. ' u'This is almost certainly a bug: {}'.format(match_query)) new_queries.append(new_query) return compound_match_query._replace(match_queries=new_queries)
Ensure that OrientDB only considers desirable query start points in query planning.
def graph_from_edges(edges: Iterable[Edge], **kwargs) -> BELGraph: graph = BELGraph(**kwargs) for edge in edges: edge.insert_into_graph(graph) return graph
Build a BEL graph from edges.
def make_number(num: str, repr_: str = None, speak: str = None): if not num or is_unknown(num): return if num == 'CAVOK': return Number('CAVOK', 9999, 'ceiling and visibility ok') if num in SPECIAL_NUMBERS: return Number(repr_ or num, None, SPECIAL_NUMBERS[num]) if '/' in num: nmr, dnm = [int(i) for i in num.split('/')] unpacked = unpack_fraction(num) spoken = spoken_number(unpacked) return Fraction(repr_ or num, nmr / dnm, spoken, nmr, dnm, unpacked) val = num.replace('M', '-') val = float(val) if '.' in num else int(val) return Number(repr_ or num, val, spoken_number(speak or str(val)))
Returns a Number or Fraction dataclass for a number string
def ProcessHttpRequest(self, http_request): http_request.headers.update(self.additional_http_headers) if self.log_request: logging.info('Making http %s to %s', http_request.http_method, http_request.url) logging.info('Headers: %s', pprint.pformat(http_request.headers)) if http_request.body: logging.info('Body:\n%s', http_request.loggable_body or http_request.body) else: logging.info('Body: (none)') return http_request
Hook for pre-processing of http requests.
def chunks(l, n): for i in _range(0, len(l), n): yield l[i:i + n]
Yields successive n-sized chunks from l.
def store_uploaded_file(title, uploaded_file): upload_dir_path = '%s/static/taskManager/uploads' % ( os.path.dirname(os.path.realpath(__file__))) if not os.path.exists(upload_dir_path): os.makedirs(upload_dir_path) os.system( "mv " + uploaded_file.temporary_file_path() + " " + "%s/%s" % (upload_dir_path, title)) return '/static/taskManager/uploads/%s' % (title)
Stores a temporary uploaded file on disk
def package_in_pypi(package): url = 'http://pypi.python.org/simple/%s' % package try: urllib.request.urlopen(url) return True except urllib.error.HTTPError as e: logger.debug("Package not found on pypi: %s", e) return False
Check whether the package is registered on pypi
def _assign_ascii_character(self, y_prev, y, y_next): char = '?' if y_next > y and y_prev > y: char = '-' elif y_next < y and y_prev < y: char = '-' elif y_prev < y and y == y_next: char = '-' elif y_prev == y and y_next < y: char = '-' elif y_next > y: char = '/' elif y_next < y: char = '\\' elif y_prev < y: char = '/' elif y_prev > y: char = '\\' elif y_next == y: char = '-' elif y == y_prev: char = '-' return char
Assign the character to be placed into the graph
def connect(self, ctx): if hasattr(ctx,"conn") or "host" not in ctx.params: return ctx.conn = conn = connect(ctx.params["host"]) lnk = link.Link() lnk.wire("main", receive=conn, send=conn) ctx.client = api.Client(link=lnk) ctx.widget = ClientWidget(ctx.client, "engineer")
establish xbahn connection and store on click context
def json(self, **kwargs): encoding = detect_encoding(self.content[:4]) value = self.content.decode(encoding) return simplejson.loads(value, **kwargs)
Decodes response as JSON.
def show_menu(self, status_icon, button, activate_time): menu = self.get_widget('tray-menu') menu.popup(None, None, None, Gtk.StatusIcon.position_menu, button, activate_time)
Show the tray icon menu.
def _obj_getattr(obj, fqdn, start=1): node = obj for chain in fqdn.split('.')[start:]: if hasattr(node, chain): node = getattr(node, chain) else: node = None break return node
Returns the attribute specified by the fqdn list from obj.
def check_offline_configuration(self): quit_on_start = False database_url = self.config.get('Database Parameters', 'database_url') host = self.config.get('Server Parameters', 'host', 'localhost') if database_url[:6] != 'sqlite': print("*** Error: config.txt option 'database_url' set to use " "mysql://. Please change this sqllite:// while in cabin mode.") quit_on_start = True if host != 'localhost': print("*** Error: config option 'host' is not set to localhost. " "Please change this to localhost while in cabin mode.") quit_on_start = True if quit_on_start: exit()
Check offline configuration file
def unregister_finders(): global __PREVIOUS_FINDER if not __PREVIOUS_FINDER: return pkg_resources.register_finder(zipimport.zipimporter, __PREVIOUS_FINDER) _remove_finder(pkgutil.ImpImporter, find_wheels_on_path) if importlib_machinery is not None: _remove_finder(importlib_machinery.FileFinder, find_wheels_on_path) __PREVIOUS_FINDER = None
Unregister finders necessary for PEX to function properly.
def sign_data(args): sk = _load_ecdsa_signing_key(args) binary_content = args.datafile.read() signature = sk.sign_deterministic(binary_content, hashlib.sha256) vk = sk.get_verifying_key() vk.verify(signature, binary_content, hashlib.sha256) if args.output is None or os.path.abspath(args.output) == os.path.abspath(args.datafile.name): args.datafile.close() outfile = open(args.datafile.name, "ab") else: outfile = open(args.output, "wb") outfile.write(binary_content) outfile.write(struct.pack("I", 0)) outfile.write(signature) outfile.close() print("Signed %d bytes of data from %s with key %s" % (len(binary_content), args.datafile.name, args.keyfile.name))
Sign a data file with a ECDSA private key, append binary signature to file contents
def cli(): import argparse parser = argparse.ArgumentParser(description='Send data to graphite') parser.add_argument('metric', metavar='metric', type=str, help='name.of.metric') parser.add_argument('value', metavar='value', type=int, help='value of metric as int') args = parser.parse_args() metric = args.metric value = args.value graphitesend_instance = init() graphitesend_instance.send(metric, value)
Allow the module to be called from the cli.
def run_loop(agents, env, max_frames=0, max_episodes=0): total_frames = 0 total_episodes = 0 start_time = time.time() observation_spec = env.observation_spec() action_spec = env.action_spec() for agent, obs_spec, act_spec in zip(agents, observation_spec, action_spec): agent.setup(obs_spec, act_spec) try: while not max_episodes or total_episodes < max_episodes: total_episodes += 1 timesteps = env.reset() for a in agents: a.reset() while True: total_frames += 1 actions = [agent.step(timestep) for agent, timestep in zip(agents, timesteps)] if max_frames and total_frames >= max_frames: return if timesteps[0].last(): break timesteps = env.step(actions) except KeyboardInterrupt: pass finally: elapsed_time = time.time() - start_time print("Took %.3f seconds for %s steps: %.3f fps" % ( elapsed_time, total_frames, total_frames / elapsed_time))
A run loop to have agents and an environment interact.
def _synchronized(meth): @functools.wraps(meth) def wrapper(self, *args, **kwargs): with self._lock: return meth(self, *args, **kwargs) return wrapper
Call method while holding a lock.
def _check_stream(stream): if not isinstance(stream, type(_sys.stderr)): raise TypeError("The stream given ({}) is not a file object.".format(stream))
Check that the stream is a file
def match_blocks(hash_func, old_children, new_children): sm = difflib.SequenceMatcher( _is_junk, a=[hash_func(c) for c in old_children], b=[hash_func(c) for c in new_children], ) return sm
Use difflib to find matching blocks.
def f2p(phrase, max_word_size=15, cutoff=3): results = f2p_list(phrase, max_word_size, cutoff) return ' '.join(i[0][0] for i in results)
Convert a Finglish phrase to the most probable Persian phrase.
def _StartSshd(self): if os.path.exists(constants.LOCALBASE + '/bin/systemctl'): return elif (os.path.exists('/etc/init.d/ssh') or os.path.exists('/etc/init/ssh.conf')): subprocess.call(['service', 'ssh', 'start']) subprocess.call(['service', 'ssh', 'reload']) elif (os.path.exists('/etc/init.d/sshd') or os.path.exists('/etc/init/sshd.conf')): subprocess.call(['service', 'sshd', 'start']) subprocess.call(['service', 'sshd', 'reload'])
Initialize the SSH daemon.
def _get_cf_grid_mapping_var(self): gmaps = ['fixedgrid_projection', 'goes_imager_projection', 'lambert_projection', 'polar_projection', 'mercator_projection'] if 'grid_mapping' in self.filename_info: gmaps = [self.filename_info.get('grid_mapping')] + gmaps for grid_mapping in gmaps: if grid_mapping in self.nc: return self.nc[grid_mapping] raise KeyError("Can't find grid mapping variable in SCMI file")
Figure out which grid mapping should be used
def find_all(self, header, list_type=None): found = [] for chunk in self: if chunk.header == header and (not list_type or (header in list_headers and chunk.type == list_type)): found.append(chunk) return found
Find all direct children with header and optional list type.
def memcache(self, f): name = _fullname(f) cache = self.load_memcache(name) @wraps(f) def memcached(*args): h = args out = cache.get(h, None) if out is None: out = f(*args) cache[h] = out return out return memcached
Cache a function in memory using an internal dictionary.
def import_fx_rates(self, rates: List[PriceModel]): have_new_rates = False base_currency = self.get_default_currency() for rate in rates: assert isinstance(rate, PriceModel) currency = self.get_by_symbol(rate.symbol) amount = rate.value has_rate = currency.prices.filter(Price.date == rate.datetime.date()).first() if not has_rate: log(INFO, "Creating entry for %s, %s, %s, %s", base_currency.mnemonic, currency.mnemonic, rate.datetime.date(), amount) inverted_rate = 1 / amount inverted_rate = inverted_rate.quantize(Decimal('.00000000')) price = Price(commodity=currency, currency=base_currency, date=rate.datetime.date(), value=str(inverted_rate)) have_new_rates = True if have_new_rates: log(INFO, "Saving new prices...") self.book.flush() self.book.save() else: log(INFO, "No prices imported.")
Imports the given prices into database. Write operation!
def planetType(temperature, mass, radius): if mass is not np.nan: sizeType = planetMassType(mass) elif radius is not np.nan: sizeType = planetRadiusType(radius) else: return None return '{0} {1}'.format(planetTempType(temperature), sizeType)
Returns the planet type as 'temperatureType massType'
def hydra_parser(in_file, options=None): if options is None: options = {} BedPe = namedtuple('BedPe', ["chrom1", "start1", "end1", "chrom2", "start2", "end2", "name", "strand1", "strand2", "support"]) with open(in_file) as in_handle: reader = csv.reader(in_handle, dialect="excel-tab") for line in reader: cur = BedPe(line[0], int(line[1]), int(line[2]), line[3], int(line[4]), int(line[5]), line[6], line[8], line[9], float(line[18])) if cur.support >= options.get("min_support", 0): yield cur
Parse hydra input file into namedtuple of values.
def _uniform_phi(M): return np.random.uniform(-np.pi, np.pi, M)
Generate M random numbers in [-pi, pi).
def select(self, nowait=True, cb=None): nowait = nowait and self.allow_nowait() and not cb if not self._enabled: self._enabled = True self.channel.basic._msg_id = 0 self.channel.basic._last_ack_id = 0 args = Writer() args.write_bit(nowait) self.send_frame(MethodFrame(self.channel_id, 85, 10, args)) if not nowait: self._select_cb.append(cb) self.channel.add_synchronous_cb(self._recv_select_ok)
Set this channel to use publisher confirmations.
def from_Composition(composition): if not hasattr(composition, 'tracks'): return False result = '\\header { title = "%s" composer = "%s" opus = "%s" } '\ % (composition.title, composition.author, composition.subtitle) for track in composition.tracks: result += from_Track(track) + ' ' return result[:-1]
Return the LilyPond equivalent of a Composition in a string.
def enable_debug(self): if not self.connected: raise HardwareError("Cannot enable debug if we are not in a connected state") self._loop.run_coroutine(self.adapter.open_interface(0, 'debug'))
Open the debug interface on the connected device.
def mouseGestureHandler(self, info): print(self.pre, ": mouseGestureHandler: ") if (info.fsingle): print(self.pre, ": mouseGestureHandler: single click") if (info.button == QtCore.Qt.LeftButton): print(self.pre, ": mouseGestureHandler: Left button clicked") elif (info.button == QtCore.Qt.RightButton): print(self.pre, ": mouseGestureHandler: Right button clicked") self.handle_right_single_click(info) elif (info.fdouble): if (info.button == QtCore.Qt.LeftButton): print( self.pre, ": mouseGestureHandler: Left button double-clicked") self.handle_left_double_click(info) elif (info.button == QtCore.Qt.RightButton): print( self.pre, ": mouseGestureHandler: Right button double-clicked")
This is the callback for MouseClickContext. Passed to VideoWidget as a parameter
def parse_opera (url_data): from ..bookmarks.opera import parse_bookmark_data for url, name, lineno in parse_bookmark_data(url_data.get_content()): url_data.add_url(url, line=lineno, name=name)
Parse an opera bookmark file.
def pop_no_diff_fields(latest_config, current_config): for field in ['userIdentity', 'principalId', 'userAgent', 'sourceIpAddress', 'requestParameters', 'eventName']: latest_config.pop(field, None) current_config.pop(field, None)
Pops off fields that should not be included in the diff.
def setup(): init_tasks() run_hook("before_setup") env.run("mkdir -p %s" % (paths.get_shared_path())) env.run("chmod 755 %s" % (paths.get_shared_path())) env.run("mkdir -p %s" % (paths.get_backup_path())) env.run("chmod 750 %s" % (paths.get_backup_path())) env.run("mkdir -p %s" % (paths.get_upload_path())) env.run("chmod 775 %s" % (paths.get_upload_path())) run_hook("setup") run_hook("after_setup")
Creates shared and upload directory then fires setup to recipes.
def fetch_and_parse(method, uri, params_prefix=None, **params): response = fetch(method, uri, params_prefix, **params) return _parse(json.loads(response.text))
Fetch the given uri and return python dictionary with parsed data-types.
def __priority(self, pr, repo, pkg): if (pr[0] == repo and pr[1].startswith("*") and pr[1].endswith("*")): if pr[1][1:-1] in pkg: self.black.append(self.__add(repo, pkg)) elif pr[0] == repo and pr[1].endswith("*"): if pkg.startswith(pr[1][:-1]): self.black.append(self.__add(repo, pkg)) elif pr[0] == repo and pr[1].startswith("*"): if pkg.endswith(pr[1][1:]): self.black.append(self.__add(repo, pkg)) elif pr[0] == repo and "*" not in pr[1]: self.black.append(self.__add(repo, pkg))
Add packages in blacklist by priority
def delete(cls, resources, background=False, force=False): if not isinstance(resources, (list, tuple)): resources = [resources] ifaces = [] for item in resources: try: ip_ = cls.info(item) except UsageError: cls.error("Can't find this ip %s" % item) iface = Iface.info(ip_['iface_id']) ifaces.append(iface['id']) return Iface.delete(ifaces, background)
Delete an ip by deleting the iface
def requeue_job(self, job, queue, priority, delayed_for=None): job.requeue(queue_name=queue._cached_name, priority=priority, delayed_for=delayed_for, queue_model=self.queue_model) if hasattr(job, 'on_requeued'): job.on_requeued(queue) self.log(self.job_requeue_message(job, queue))
Requeue a job in a queue with the given priority, possibly delayed
def run(self): elapsed = 0 run_time = self.config['run_time'] start_time = time.time() t = time.time self.turrets_manager.start(self.transaction_context) self.started = True while elapsed <= run_time: try: self._run_loop_action() self._print_status(elapsed) elapsed = t() - start_time except (Exception, KeyboardInterrupt): print("\nStopping test, sending stop command to turrets") self.turrets_manager.stop() self.stats_handler.write_remaining() traceback.print_exc() break self.turrets_manager.stop() print("\n\nProcessing all remaining messages... This could take time depending on message volume") t = time.time() self.result_collector.unbind(self.result_collector.LAST_ENDPOINT) self._clean_queue() print("took %s" % (time.time() - t))
Run the hight quarter, lunch the turrets and wait for results
def html_state(self): ret = "" state = json.loads(self.state) for (app, appstate) in state.items(): for (model, modelstate) in appstate.items(): ret += "<p>%s.models.%s</p>" % (app, model,) ret += "<ul>" for field in modelstate["fields"] + ["uid"]: ret += "<li>%s</li>" % field for fk in modelstate["foreignkeys"]: ret += "<li>%s (foreign key)</li>" % fk ret += "</ul>" return ret
Display state in HTML format for the admin form.
def compile_contracts(self, target_path: Path) -> ContractManager: self.checksum_contracts() if self.overall_checksum is None: raise ContractSourceManagerCompilationError('Checksumming failed.') contracts_compiled = self._compile_all_contracts() target_path.parent.mkdir(parents=True, exist_ok=True) with target_path.open(mode='w') as target_file: target_file.write( json.dumps( dict( contracts=contracts_compiled, contracts_checksums=self.contracts_checksums, overall_checksum=self.overall_checksum, contracts_version=None, ), sort_keys=True, indent=4, ), ) return ContractManager(target_path)
Store compiled contracts JSON at `target_path`.
def from_dict(cls, database, key, data, clear=False): zset = cls(database, key) if clear: zset.clear() zset.add(data) return zset
Create and populate a ZSet object from a data dictionary.
def _get_all_indexes(self): if self.parser: return [v.index for v in self.parser.get_volumes()] + [d.index for d in self.parser.disks] else: return None
Returns all indexes available in the parser
def mv_videos(path): count = 0 for f in os.listdir(path): f = os.path.join(path, f) if os.path.isdir(f): for sf in os.listdir(f): sf = os.path.join(f, sf) if os.path.isfile(sf): new_name = os.path.join(path, os.path.basename(sf)) try: os.rename(sf, new_name) except (WindowsError, OSError) as e: print('mv {} happens error: {}'.format(sf, e)) else: count += 1 print('mv {} to {}'.format(sf, new_name)) return count
move videos in sub-directory of path to path.
def sd(self): v = self.var() if len(v): return np.sqrt(v) else: return None
Calculate standard deviation of timeseries
def from_dict(self, d): for key in d: if key == 'data': for dkey in d['data']: if dkey in self._encode_fields: setattr(self, str(dkey), base64.b64decode(d['data'][dkey])) else: setattr(self, str(dkey), d['data'][dkey]) else: setattr(self, str(key), d[key])
Initialise an API message from a transmission-safe dictionary.
def make_parse_err(self, err, reformat=True, include_ln=True): err_line = err.line err_index = err.col - 1 err_lineno = err.lineno if include_ln else None if reformat: err_line, err_index = self.reformat(err_line, err_index) if err_lineno is not None: err_lineno = self.adjust(err_lineno) return CoconutParseError(None, err_line, err_index, err_lineno)
Make a CoconutParseError from a ParseBaseException.
def commit_or_abort(self, ctx, timeout=None, metadata=None, credentials=None): return self.stub.CommitOrAbort(ctx, timeout=timeout, metadata=metadata, credentials=credentials)
Runs commit or abort operation.
def parse_all_data(self): self._master_df.columns = ["domain", "entity", "state", "last_changed"] self._master_df["numerical"] = self._master_df["state"].apply( lambda x: functions.isfloat(x) ) self._master_df.set_index( ["domain", "entity", "numerical", "last_changed"], inplace=True )
Parses the master df.
def _add_dominance_relation(self, source, target): self.add_node(target, layers={self.ns, self.ns+':unit'}) self.add_edge(source, target, layers={self.ns, self.ns+':discourse'}, edge_type=EdgeTypes.dominance_relation)
add a dominance relation to this docgraph
def tabulate(collection, headers, datetime_fmt='%Y-%m-%d %H:%M:%S', **kwargs): if isinstance(headers, dict): attrs = headers.keys() names = [ key if value is None else value for key, value in headers.items() ] else: attrs = names = headers table = [( format_cell(cell, datetime_fmt=datetime_fmt) for cell in attrgetter(*attrs)(c) ) for c in collection] return tblte(table, headers=[h.upper() for h in names], **kwargs)
Pretty-print a collection.
def ustr(obj): if sys.version_info[0] == 2: if type(obj) in [str, basestring]: return unicode(obj, DEFAULT_ENCODING) else: return unicode(obj) else: if type(obj) in [bytes]: return obj.decode(DEFAULT_ENCODING) else: return str(obj)
Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3
def popleft(self): self._mq.send("^", True, type=1) message = self._wait_receive_msg() reply = int(message[0].decode('utf-8')) if reply == -1: raise IndexError("pop from empty list") return reply
Removes and returns the oldest read pulse.
def _create_scheduled_actions(conn, as_name, scheduled_actions): if scheduled_actions: for name, action in six.iteritems(scheduled_actions): if 'start_time' in action and isinstance(action['start_time'], six.string_types): action['start_time'] = datetime.datetime.strptime( action['start_time'], DATE_FORMAT ) if 'end_time' in action and isinstance(action['end_time'], six.string_types): action['end_time'] = datetime.datetime.strptime( action['end_time'], DATE_FORMAT ) conn.create_scheduled_group_action(as_name, name, desired_capacity=action.get('desired_capacity'), min_size=action.get('min_size'), max_size=action.get('max_size'), start_time=action.get('start_time'), end_time=action.get('end_time'), recurrence=action.get('recurrence') )
Helper function to create scheduled actions
def decrypt(s, passphrase, curve='secp160r1', mac_bytes=10): curve = Curve.by_name(curve) privkey = curve.passphrase_to_privkey(passphrase) return privkey.decrypt(s, mac_bytes)
Decrypts `s' with passphrase `passphrase'
def filter_with_schema(self, collection=None, context=None): if collection is None or self.schema is None: return try: for item in collection: self._schema.filter( model=item, context=context if self.use_context else None ) except TypeError: pass
Perform collection items filtering with schema
def stop_handling_end_signal(self): try: signal.signal(signal.SIGTERM, signal.SIG_DFL) signal.signal(signal.SIGINT, signal.SIG_DFL) except ValueError: self.log('Signals cannot be caught in a Thread', level='warning')
Stop handling the SIGINT and SIGTERM signals
def register_references(kb_app: kb, sphinx_app: Sphinx, sphinx_env: BuildEnvironment, docnames: List[str]): references: ReferencesContainer = sphinx_app.env.references for name, klass in kb_app.config.resources.items(): if getattr(klass, 'is_reference', False): references[name] = dict()
Walk the registry and add sphinx directives
def from_caller_module_root(): import inspect all_stack = list(inspect.stack()) curframe = inspect.currentframe() calframe = inspect.getouterframes(curframe, 2) module = inspect.getmodule(calframe[1].frame) if not module: raise RuntimeError('caller is not a module') root_module_name = module.__name__.partition('.')[0] fullpath = sys.modules[root_module_name].__file__ return Path(fullpath)
return a `Path` from module root which include the caller
def _cast(self, value): if self.type != 'text': value = utils.cast(self.TYPES.get(self.type)['cast'], value) return value
Cast the specifief value to the type of this setting.
def create_and_write_saml_metadata(proxy_conf, key, cert, dir, valid, split_frontend_metadata=False, split_backend_metadata=False): satosa_config = SATOSAConfig(proxy_conf) secc = _get_security_context(key, cert) frontend_entities, backend_entities = create_entity_descriptors(satosa_config) output = [] if frontend_entities: if split_frontend_metadata: output.extend(_create_split_entity_descriptors(frontend_entities, secc, valid)) else: output.extend(_create_merged_entities_descriptors(frontend_entities, secc, valid, "frontend.xml")) if backend_entities: if split_backend_metadata: output.extend(_create_split_entity_descriptors(backend_entities, secc, valid)) else: output.extend(_create_merged_entities_descriptors(backend_entities, secc, valid, "backend.xml")) for metadata, filename in output: path = os.path.join(dir, filename) print("Writing metadata to '{}'".format(path)) with open(path, "w") as f: f.write(metadata)
Generates SAML metadata for the given PROXY_CONF, signed with the given KEY and associated CERT.
def paginate(self, skip, limit): if not self.count() or not limit: return skip = skip or 0 pages = int(ceil(self.count() / float(limit))) limits = {} last = 0 for i in range(pages): current = limit * i limits[last] = current last = current if limit and limit < self.count(): limit = limits.get(skip, self.count()) self.cursordat = self.cursordat[skip: limit]
Paginate list of records
def _get_callback_context(env): if env.model is not None and env.cvfolds is None: context = 'train' elif env.model is None and env.cvfolds is not None: context = 'cv' return context
return whether the current callback context is cv or train
def get(self, timeout): if self._first: self._first = False return ("ping", PingStats.get(), {}) try: (action, msg, kwargs) = yield from asyncio.wait_for(super().get(), timeout) except asyncio.futures.TimeoutError: return ("ping", PingStats.get(), {}) return (action, msg, kwargs)
When timeout is expire we send a ping notification with server information
def global_start_index(self, value): if not isinstance(value, int) and value is not None: raise TypeError('global_start_index attribute must be of int ' 'type.') self._global_start_index = value
Set the global start index.
def load(self, playerName=None): if playerName: self.name = playerName try: with open(self.filename, "rb") as f: data = f.read() except Exception: raise ValueError("invalid profile, '%s'. file does not exist: %s"%(self.name, self.filename)) self.update(json.loads(data)) self._matches = []
retrieve the PlayerRecord settings from saved disk file
def summarize_video(video, prefix, max_outputs=1): video_shape = shape_list(video) if len(video_shape) != 5: raise ValueError("Assuming videos given as tensors in the format " "[batch, time, height, width, channels] but got one " "of shape: %s" % str(video_shape)) if tf.executing_eagerly(): return if video.get_shape().as_list()[1] is None: tf.summary.image( "%s_last_frame" % prefix, tf.cast(video[:, -1, :, :, :], tf.uint8), max_outputs=max_outputs) else: for k in range(video_shape[1]): tf.summary.image( "%s_frame_%d" % (prefix, k), tf.cast(video[:, k, :, :, :], tf.uint8), max_outputs=max_outputs)
Summarize the video using image summaries starting with prefix.
def rewrap(s, width=COLS): s = ' '.join([l.strip() for l in s.strip().split('\n')]) return '\n'.join(textwrap.wrap(s, width))
Join all lines from input string and wrap it at specified width
def help_string(): help_str = lists = tuple( display_list_by_prefix(entries, starting_spaces=4) for entries in [ list_models(), list_hparams(), list_ranged_hparams(), list_base_problems(), list_optimizers(), list_attacks(), list_attack_params(), list_pruning_params(), list_pruning_strategies(), list_env_problems(), ]) return help_str % lists
Generate help string with contents of registry.
def _iter_candidate_groups(self, init_match, edges0, edges1): sources = {} for start_vertex0, end_vertex0 in edges0: l = sources.setdefault(start_vertex0, []) l.append(end_vertex0) dests = {} for start_vertex1, end_vertex1 in edges1: start_vertex0 = init_match.reverse[start_vertex1] l = dests.setdefault(start_vertex0, []) l.append(end_vertex1) for start_vertex0, end_vertices0 in sources.items(): end_vertices1 = dests.get(start_vertex0, []) yield end_vertices0, end_vertices1
Divide the edges into groups
def this_year(self): start_date, end_date = get_date_range_this_year() return self.filter(date__gte=start_date, date__lte=end_date)
Get EighthBlocks from this school year only.
def create(model_config, batch_size, vectors=None): path = model_config.data_dir('imdb') text_field = data.Field(lower=True, tokenize='spacy', batch_first=True) label_field = data.LabelField(is_target=True) train_source, test_source = IMDBCached.splits( root=path, text_field=text_field, label_field=label_field ) text_field.build_vocab(train_source, max_size=25_000, vectors=vectors) label_field.build_vocab(train_source) train_iterator, test_iterator = data.BucketIterator.splits( (train_source, test_source), batch_size=batch_size, device=model_config.torch_device(), shuffle=True ) return TextData( train_source, test_source, train_iterator, test_iterator, text_field, label_field )
Create an IMDB dataset
def getExim(exim_id): interfaces = filter(lambda i: i[0]==exim_id, get_instrument_interfaces()) return interfaces and interfaces[0][1] or None
Returns the instrument interface for the exim_id passed in
def v_init_extension(ctx, stmt): (prefix, identifier) = stmt.raw_keyword (modname, revision) = \ prefix_to_modulename_and_revision(stmt.i_module, prefix, stmt.pos, ctx.errors) stmt.keyword = (modname, identifier) stmt.i_extension_modulename = modname stmt.i_extension_revision = revision stmt.i_extension = None
find the modulename of the prefix, and set `stmt.keyword`