code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def _write_xml(xmlfile, srcs):
root = ElementTree.Element('source_library')
root.set('title', 'source_library')
for src in srcs:
src.write_xml(root)
output_file = open(xmlfile, 'w')
output_file.write(utils.prettify_xml(root)) | Save the ROI model as an XML |
def viewlog(calc_id, host='localhost', port=8000):
base_url = 'http://%s:%s/v1/calc/' % (host, port)
start = 0
psize = 10
try:
while True:
url = base_url + '%d/log/%d:%d' % (calc_id, start, start + psize)
rows = json.load(urlopen(url))
for row in rows:
... | Extract the log of the given calculation ID from the WebUI |
def validate_url(cls, url: str) -> Optional[Match[str]]:
match = re.match(cls._VALID_URL, url)
return match | Check if the Extractor can handle the given url. |
def filter_batch(self, batch):
for item in batch:
if self.filter(item):
yield item
else:
self.set_metadata('filtered_out',
self.get_metadata('filtered_out') + 1)
self.total += 1
self._log_progress() | Receives the batch, filters it, and returns it. |
def _first_expander(fringe, iteration, viewer):
current = fringe[0]
neighbors = current.expand(local_search=True)
if viewer:
viewer.event('expanded', [current], [neighbors])
fringe.extend(neighbors) | Expander that expands only the first node on the fringe. |
def _handle_units_placement(changeset, units, records):
for service_name, service in sorted(changeset.bundle['services'].items()):
num_units = service.get('num_units')
if num_units is None:
continue
placement_directives = service.get('to', [])
if not isinstance(placement_... | Ensure that requires and placement directives are taken into account. |
def ms_cutall(self, viewer, event, data_x, data_y):
if not self.cancut:
return True
x, y = self.get_win_xy(viewer)
if event.state == 'move':
self._cutboth_xy(viewer, x, y)
elif event.state == 'down':
self._start_x, self._start_y = x, y
imag... | An interactive way to set the low AND high cut levels. |
def _new_cls_attr(self, clazz, name, cls=None, mult=MULT_ONE, cont=True,
ref=False, bool_assignment=False, position=0):
attr = MetaAttr(name, cls, mult, cont, ref, bool_assignment,
position)
clazz._tx_attrs[name] = attr
return attr | Creates new meta attribute of this class. |
def find_deprecated_usages(
schema: GraphQLSchema, ast: DocumentNode
) -> List[GraphQLError]:
type_info = TypeInfo(schema)
visitor = FindDeprecatedUsages(type_info)
visit(ast, TypeInfoVisitor(type_info, visitor))
return visitor.errors | Get a list of GraphQLError instances describing each deprecated use. |
def value(self):
from abilian.services.repository import session_repository as repository
repository.delete(self, self.uuid) | Remove value from repository. |
def deserialize(self, value, **kwargs):
kwargs.update({'trusted': kwargs.get('trusted', False)})
if self.deserializer is not None:
return self.deserializer(value, **kwargs)
if value is None:
return None
output_tuples = [
(
self.key_prop... | Return a deserialized copy of the dict |
def _configure_logging(self):
if not self.LOGGING_CONFIG:
dictConfig(self.DEFAULT_LOGGING)
else:
dictConfig(self.LOGGING_CONFIG) | Setting up logging from logging config in settings |
def visit_yield(self, node, parent):
newnode = nodes.Yield(node.lineno, node.col_offset, parent)
if node.value is not None:
newnode.postinit(self.visit(node.value, newnode))
return newnode | visit a Yield node by returning a fresh instance of it |
def parents(self, vertex):
return [self.tail(edge) for edge in self.in_edges(vertex)] | Return the list of immediate parents of this vertex. |
def channel(self, match):
if len(match) == 9 and match[0] in ('C','G','D'):
return self._lookup(Channel, 'id', match)
return self._lookup(Channel, 'name', match) | Return Channel object for a given Slack ID or name |
def _initSwapInfo(self):
self._swapList = []
sysinfo = SystemInfo()
for (swap,attrs) in sysinfo.getSwapStats().iteritems():
if attrs['type'] == 'partition':
dev = self._getUniqueDev(swap)
if dev is not None:
self._swapList.append(de... | Initialize swap partition to device mappings. |
def extract_status(self, status_headers):
self['status'] = status_headers.get_statuscode()
if not self['status']:
self['status'] = '-'
elif self['status'] == '204' and 'Error' in status_headers.statusline:
self['status'] = '-' | Extract status code only from status line |
def pub(self, topic, message):
return self.send(' '.join((constants.PUB, topic)), message) | Publish to a topic |
def error(self, message, code=1):
sys.stderr.write(message)
sys.exit(code) | Prints the error, and exits with the given code. |
def send_sms_message(sms_message, backend=None, fail_silently=False):
with get_sms_connection(backend=backend, fail_silently=fail_silently) as connection:
result = connection.send_messages([sms_message])
return result | Send an SMSMessage instance using a connection given by the specified `backend`. |
def related_obj_to_dict(obj, **kwargs):
kwargs.pop('formatter', None)
suppress_private_attr = kwargs.get("suppress_private_attr", False)
suppress_empty_values = kwargs.get("suppress_empty_values", False)
attrs = fields(obj.__class__)
return_dict = kwargs.get("dict_factory", OrderedDict)()
for a ... | Covert a known related object to a dictionary. |
def _netstat_route_netbsd():
ret = []
cmd = 'netstat -f inet -rn | tail -n+5'
out = __salt__['cmd.run'](cmd, python_shell=True)
for line in out.splitlines():
comps = line.split()
ret.append({
'addr_family': 'inet',
'destination': comps[0],
'gateway': c... | Return netstat routing information for NetBSD |
def evaluate(self, x, y, flux, x_0, y_0):
if self.xname is None:
dx = x - x_0
else:
dx = x
setattr(self.psfmodel, self.xname, x_0)
if self.xname is None:
dy = y - y_0
else:
dy = y
setattr(self.psfmodel, self.yname, y... | The evaluation function for PRFAdapter. |
def merge_versioned(releases, schema=None, merge_rules=None):
if not merge_rules:
merge_rules = get_merge_rules(schema)
merged = OrderedDict()
for release in sorted(releases, key=lambda release: release['date']):
release = release.copy()
ocid = release.pop('ocid')
merged[('oc... | Merges a list of releases into a versionedRelease. |
def duration(self):
if not self._loaded:
return 0
delta = datetime.datetime.now() - self._start_time
total_secs = (delta.microseconds +
(delta.seconds + delta.days * 24 * 3600) *
10 ** 6) / 10 ** 6
return max(0, int(round(total_secs... | Returns task's current duration in minutes. |
def only_self(self):
others, self.others = self.others, []
try:
yield
finally:
self.others = others + self.others | Only match in self not others. |
def shutdown(self):
self.__should_stop.set()
if self.__server_thread == threading.current_thread():
self.__is_shutdown.set()
self.__is_running.clear()
else:
if self.__wakeup_fd is not None:
os.write(self.__wakeup_fd.write_fd, b'\x00')
self.__is_shutdown.wait()
if self.__wakeup_fd is not None:
... | Shutdown the server and stop responding to requests. |
def predict(self, testing_features):
if self.clean:
testing_features = self.impute_data(testing_features)
if self._best_inds:
X_transform = self.transform(testing_features)
try:
return self._best_estimator.predict(self.transform(testing_features))
... | predict on a holdout data set. |
def start(port, root_directory, bucket_depth):
application = S3Application(root_directory, bucket_depth)
http_server = httpserver.HTTPServer(application)
http_server.listen(port)
ioloop.IOLoop.current().start() | Starts the mock S3 server on the given port at the given path. |
def AsyncResponseMiddleware(environ, resp):
future = create_future()
future._loop.call_soon(future.set_result, resp)
return future | This is just for testing the asynchronous response middleware |
def inspect(item, maxchar=80):
for i in dir(item):
try:
member = str(getattr(item, i))
if maxchar and len(member) > maxchar:
member = member[:maxchar] + "..."
except:
member = "[ERROR]"
print("{}: {}".format(i, member), file=sys.stderr) | Inspect the attributes of an item. |
def write_ha_config(ip, mac, hass, port, id):
click.echo("Write configuration for Home Assistant to device %s..." % ip)
action = "get://{1}:{2}/api/mystrom?{0}={3}"
data = {
'single': action.format('single', hass, port, id),
'double': action.format('double', hass, port, id),
'long':... | Write the configuration for Home Assistant to a myStrom button. |
def spit_config(self, conf_file, firstwordonly=False):
cfg = ConfigParser.RawConfigParser()
for sec in _CONFIG_SECS:
cfg.add_section(sec)
sec = 'channels'
for i in sorted(self.pack.D):
cfg.set(sec, str(i),
self.pack.name(i, firstwordonly=firstw... | conf_file a file opened for writing. |
def dump(self):
data = dict(
sessions_active=self.sess_active,
connections_active=self.conn_active,
connections_ps=self.conn_ps.last_average,
packets_sent_ps=self.pack_sent_ps.last_average,
packets_recv_ps=self.pack_recv_ps.last_average
)
... | Return dictionary with current statistical information |
def configure_swagger(graph):
ns = Namespace(
subject=graph.config.swagger_convention.name,
version=graph.config.swagger_convention.version,
)
convention = SwaggerConvention(graph)
convention.configure(ns, discover=tuple())
return ns.subject | Build a singleton endpoint that provides swagger definitions for all operations. |
def register_combo(self, parent, legs):
parent = self.ibConn.contractString(parent)
legs_dict = {}
for leg in legs:
leg = self.ibConn.contractString(leg)
legs_dict[leg] = self.get_instrument(leg)
self.instrument_combos[parent] = legs_dict | add contracts to groups |
def _is_good_file_for_multiqc(fpath):
(ftype, encoding) = mimetypes.guess_type(fpath)
if encoding is not None:
return False
if ftype is not None and ftype.startswith('image'):
return False
return True | Returns False if the file is binary or image. |
def disallow_positional_args(wrapped=None, allowed=None):
if wrapped is None:
return functools.partial(disallow_positional_args, allowed=allowed)
@wrapt.decorator
def disallow_positional_args_dec(fn, instance, args, kwargs):
ismethod = instance is not None
_check_no_positional(fn, args, ismethod, allo... | Requires function to be called using keyword arguments. |
def wp_draw_callback(self, points):
if len(points) < 3:
return
from MAVProxy.modules.lib import mp_util
home = self.wploader.wp(0)
self.wploader.clear()
self.wploader.target_system = self.target_system
self.wploader.target_component = self.target_component
... | callback from drawing waypoints |
def connection(self):
if self._connection is None:
self._connection = self.client[self.database_name]
if self.disable_id_injector:
incoming = self._connection._Database__incoming_manipulators
for manipulator in incoming:
if isinstance(m... | Connect to and return mongodb database object. |
def CreateHunt(hunt_obj):
data_store.REL_DB.WriteHuntObject(hunt_obj)
if hunt_obj.HasField("output_plugins"):
output_plugins_states = flow.GetOutputPluginStates(
hunt_obj.output_plugins,
source="hunts/%s" % hunt_obj.hunt_id,
token=access_control.ACLToken(username=hunt_obj.creator))
d... | Creates a hunt using a given hunt object. |
def user_exists(self, name):
users = self.data['users']
for user in users:
if user['name'] == name:
return True
return False | Check if a given user exists. |
def register(cls, *args, **kwargs):
if cls.app is None:
return register(*args, handler=cls, **kwargs)
return cls.app.register(*args, handler=cls, **kwargs) | Register view to handler. |
def _merge_default_values(self):
values = self._get_default_values()
for key, value in values.items():
if not self.data.get(key):
self.data[key] = value | Merge default values with resource data. |
def _mainthread_accept_clients(self):
try:
if self._accept_selector.select(timeout=self.block_time):
client = self._server_socket.accept()
logging.info('Client connected: {}'.format(client[1]))
self._threads_limiter.start_thread(target=self._subthread_... | Accepts new clients and sends them to the to _handle_accepted within a subthread |
def init_exporter(extract_images, execute, **exporter_config):
config = Config(InteractExporter=exporter_config)
preprocessors = []
if extract_images:
preprocessors.append(
'nbconvert.preprocessors.ExtractOutputPreprocessor'
)
if execute:
preprocessors.append('nbinter... | Returns an initialized exporter. |
def update_security_of_project(self, ID, data):
log.info('Update project %s security %s' % (ID, data))
self.put('projects/%s/security.json' % ID, data) | Update security of project. |
def key_add(self):
from .main import add_api_key
add_api_key(self.key_name.get(), self.key_val.get())
self.key_name.set("")
self.key_val.set("") | Adds the current API key to the bot's data |
def draw_rect(self, color, world_rect, thickness=0):
tl = self.world_to_surf.fwd_pt(world_rect.tl).round()
br = self.world_to_surf.fwd_pt(world_rect.br).round()
rect = pygame.Rect(tl, br - tl)
pygame.draw.rect(self.surf, color, rect, thickness) | Draw a rectangle using world coordinates. |
def update(self, **kwargs):
"Update document not update index."
kw = dict(index=self.name, doc_type=self.doc_type, ignore=[404])
kw.update(**kwargs)
return self._client.update(**kw) | Update document not update index. |
def color_parts(parts):
return parts._replace(
title=Fore.GREEN + parts.title + Style.RESET_ALL,
doi=Fore.CYAN + parts.doi + Style.RESET_ALL
) | Adds colors to each part of the citation |
def lowpass(var, key, factor):
global lowpass_data
if not key in lowpass_data:
lowpass_data[key] = var
else:
lowpass_data[key] = factor*lowpass_data[key] + (1.0 - factor)*var
return lowpass_data[key] | a simple lowpass filter |
def _hash(self, string, hash_type):
hash_types = {
'TABLE_OFFSET': 0,
'HASH_A': 1,
'HASH_B': 2,
'TABLE': 3
}
seed1 = 0x7FED7FED
seed2 = 0xEEEEEEEE
for ch in string.upper():
if not isinstance(ch, int): ch = ord(ch)
... | Hash a string using MPQ's hash function. |
def http(container = None):
"wrap a WSGI-style class method to a HTTPRequest event handler"
def decorator(func):
@functools.wraps(func)
def handler(self, event):
return _handler(self if container is None else container, event, lambda env: func(self, env))
return handler
r... | wrap a WSGI-style class method to a HTTPRequest event handler |
def setup(app):
sphinx_compatibility._app = app
app.add_config_value('sphinx_gallery_conf', DEFAULT_GALLERY_CONF, 'html')
for key in ['plot_gallery', 'abort_on_example_error']:
app.add_config_value(key, get_default_config_value(key), 'html')
try:
app.add_css_file('gallery.css')
excep... | Setup sphinx-gallery sphinx extension |
def save_model(self, request, obj, form, change):
super(GenericPositionsAdmin, self).save_model(request, obj, form,
change)
c_type = ContentType.objects.get_for_model(obj)
try:
ObjectPosition.objects.get(content_type__pk=c_type.id... | Add an ObjectPosition to the object. |
def allow_role(role):
def processor(action, argument):
db.session.add(
ActionRoles.allow(action, argument=argument, role_id=role.id)
)
return processor | Allow a role identified by an email address. |
def _logger_fh(self):
logfile = os.path.join(self.default_args.tc_log_path, self.default_args.tc_log_file)
fh = logging.FileHandler(logfile)
fh.set_name('fh')
fh.setLevel(logging.DEBUG)
fh.setFormatter(self._logger_formatter)
self.log.addHandler(fh) | Add File logging handler. |
def unscale_dict_wet(C):
return {k: _scale_dict[k] * v for k, v in C.items()} | Undo the scaling applied in `scale_dict_wet`. |
def from_iterable(cls, target_types, address_mapper, adaptor_iter):
inst = cls(target_types, address_mapper)
all_valid_addresses = set()
for target_adaptor in adaptor_iter:
inst._inject_target(target_adaptor)
all_valid_addresses.add(target_adaptor.address)
inst._validate(all_valid_addresses)... | Create a new DependentGraph from an iterable of TargetAdaptor subclasses. |
def _finish(self, update_ops, name_scope):
iter_ = self._get_iter_variable()
beta1_power, beta2_power = self._get_beta_accumulators()
with tf.control_dependencies(update_ops):
with tf.colocate_with(iter_):
def update_beta_op():
update_beta1 = beta1_power.assign(
beta1_p... | Updates beta_power variables every n batches and incrs counter. |
def make_quantile_df(data, draw_quantiles):
dens = data['density'].cumsum() / data['density'].sum()
ecdf = interp1d(dens, data['y'], assume_sorted=True)
ys = ecdf(draw_quantiles)
violin_xminvs = interp1d(data['y'], data['xminv'])(ys)
violin_xmaxvs = interp1d(data['y'], data['xmaxv'])(ys)
data = ... | Return a dataframe with info needed to draw quantile segments |
def _generator_file(self):
for path in self.paths:
if os.path.isfile(path):
if isvalid(path, self.access, self.extensions,
minsize=self.minsize):
yield os.path.abspath(path)
elif os.path.isdir(path):
for root,... | Generator for `self.filetype` of 'file |
def instruction(self, val):
self._instruction = val
if isinstance(val, tuple):
if len(val) is 2:
self._action, self.command = val
else:
self._action, self.command, self.extra = val
else:
split = val.split(" ", 1)
if ... | Set the action and command from an instruction |
def pathExists(self, path):
def commandComplete(cmd):
return not cmd.didFail()
return self.runRemoteCommand('stat', {'file': path,
'logEnviron': self.logEnviron, },
abandonOnFailure=False,
... | test whether path exists |
def values(self):
values = []
for __, data in self.items():
values.append(data)
return values | return a list of all state values |
def coth(x, context=None):
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_coth,
(BigFloat._implicit_convert(x),),
context,
) | Return the hyperbolic cotangent of x. |
def cli(yamlfile, format, context):
print(JSONLDGenerator(yamlfile, format).serialize(context=context)) | Generate JSONLD file from biolink schema |
def printData(self, output = sys.stdout):
self.printDatum("Name : ", self.fileName, output)
self.printDatum("Author : ", self.author, output)
self.printDatum("Repository : ", self.repository, output)
self.printDatum("Category : ", self.category, output)
self.printDatum("Downloads :... | Output all the file data to be written to any writable output |
def register(self, f, *args, **kwargs):
self._functions.append(lambda: f(*args, **kwargs)) | Register a function and arguments to be called later. |
def usernameAvailable(self, username, domain):
if len(username) < 2:
return [False, u"Username too short"]
for char in u"[ ,:;<>@()!\"'%&\\|\t\b":
if char in username:
return [False,
u"Username contains invalid character: '%s'" % char]
... | Check to see if a username is available for the user to select. |
def extern_create_exception(self, context_handle, msg_ptr, msg_len):
c = self._ffi.from_handle(context_handle)
msg = self.to_py_str(msg_ptr, msg_len)
return c.to_value(Exception(msg)) | Given a utf8 message string, create an Exception object. |
def detect_metadata_url_scheme(url):
scheme = None
url_lower = url.lower()
if any(x in url_lower for x in ['wms', 'service=wms']):
scheme = 'OGC:WMS'
if any(x in url_lower for x in ['wmts', 'service=wmts']):
scheme = 'OGC:WMTS'
elif all(x in url for x in ['/MapServer', 'f=json']):
... | detect whether a url is a Service type that HHypermap supports |
def init_fundamental_types(self):
for _id in range(2, 25):
setattr(self, TypeKind.from_id(_id).name,
self._handle_fundamental_types) | Registers all fundamental typekind handlers |
def fail(self, reason, obj, pointer=None):
pointer = pointer_join(pointer)
err = ValidationError(reason, obj, pointer)
if self.fail_fast:
raise err
else:
self.errors.append(err)
return err | Called when validation fails. |
def map_wrap(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
return f(*args, **kwargs)
return wrapper | Wrap standard function to easily pass into 'map' processing. |
def replace_cells(self, key, sorted_row_idxs):
row, col, tab = key
new_keys = {}
del_keys = []
selection = self.grid.actions.get_selection()
for __row, __col, __tab in self.grid.code_array:
if __tab == tab and \
(not selection or (__row, __col) in selec... | Replaces cells in current selection so that they are sorted |
def fwd_chunk(self):
raise NotImplementedError("%s not implemented for %s" % (self.fwd_chunk.__func__.__name__,
self.__class__.__name__)) | Returns the chunk following this chunk in the list of free chunks. |
def on_step_end(self, step, logs={}):
for callback in self.callbacks:
if callable(getattr(callback, 'on_step_end', None)):
callback.on_step_end(step, logs=logs)
else:
callback.on_batch_end(step, logs=logs) | Called at end of each step for each callback in callbackList |
def show_image(kwargs, call=None):
if call != 'function':
raise SaltCloudSystemExit(
'The show_images function must be called with '
'-f or --function'
)
if not isinstance(kwargs, dict):
kwargs = {}
location = get_location()
if 'location' in kwargs:
... | Show the details from aliyun image |
def known(self, object):
try:
md = object.__metadata__
known = md.sxtype
return known
except:
pass | get the type specified in the object's metadata |
def link_markdown_cells(cells, modules):
"Create documentation links for all cells in markdown with backticks."
for i, cell in enumerate(cells):
if cell['cell_type'] == 'markdown':
cell['source'] = link_docstring(modules, cell['source']) | Create documentation links for all cells in markdown with backticks. |
def character_span(self):
begin, end = self.token_span
return (self.sentence[begin].character_span[0], self.sentence[end-1].character_span[-1]) | Returns the character span of the token |
def timeline(self, request, drip_id, into_past, into_future):
from django.shortcuts import render, get_object_or_404
drip = get_object_or_404(Drip, id=drip_id)
shifted_drips = []
seen_users = set()
for shifted_drip in drip.drip.walk(into_past=int(into_past), into_future=int(into_... | Return a list of people who should get emails. |
def __default(self, ast_token):
if self.list_level == 1:
if self.list_entry is None:
self.list_entry = ast_token
elif not isinstance(ast_token, type(self.list_entry)):
self.final_ast_tokens.append(ast_token)
elif self.list_level == 0:
s... | Handle tokens inside the list or outside the list. |
def remove(self, key):
copydict = ImmutableDict()
copydict.tree = self.tree.remove(hash(key))
copydict._length = self._length - 1
return copydict | Returns a new ImmutableDict with the given key removed. |
async def jsk_debug(self, ctx: commands.Context, *, command_string: str):
alt_ctx = await copy_context_with(ctx, content=ctx.prefix + command_string)
if alt_ctx.command is None:
return await ctx.send(f'Command "{alt_ctx.invoked_with}" is not found')
start = time.perf_counter()
... | Run a command timing execution and catching exceptions. |
def checkoutbranch(accountable, options):
issue = accountable.checkout_branch(options)
headers = sorted(['id', 'key', 'self'])
rows = [headers, [itemgetter(header)(issue) for header in headers]]
print_table(SingleTable(rows)) | Create a new issue and checkout a branch named after it. |
def delete_intf_router(self, tenant_id, tenant_name, router_id):
in_sub = self.get_in_subnet_id(tenant_id)
out_sub = self.get_out_subnet_id(tenant_id)
subnet_lst = set()
subnet_lst.add(in_sub)
subnet_lst.add(out_sub)
router_id = self.get_router_id(tenant_id, tenant_name)
... | Routine to delete the router. |
def add_cancel_button(self):
button_box = QDialogButtonBox(QDialogButtonBox.Cancel, self)
button_box.rejected.connect(self.reject)
self.layout.addWidget(button_box) | Add a cancel button at the bottom of the dialog window. |
def reset(self):
paths = []
for filename in os.listdir(self.cached_repo):
if filename.startswith(".git"):
continue
path = os.path.join(self.cached_repo, filename)
if os.path.isfile(path):
paths.append(path)
elif os.path.isdi... | Initialize the remote Git repository. |
def load_config(config_file='~/.stancache.ini'):
if not os.path.exists(config_file):
logging.warning('Config file does not exist: {}. Using default settings.'.format(config_file))
return
config = configparser.ConfigParser()
config.read(config_file)
if not config.has_section('main'):
... | Load config file into default settings |
def syntax_check():
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
... | Runs flake8 against the codebase. |
def server(self):
try:
tar = urllib2.urlopen(self.registry)
meta = tar.info()
return int(meta.getheaders("Content-Length")[0])
except (urllib2.URLError, IndexError):
return " " | Returns the size of remote files |
def view(self, cls=None):
result = self.copy()
result._id = self._id
return result | this is defined as a copy with the same identity |
def timed(function):
@wraps(function)
def function_wrapper(obj, *args, **kwargs):
name = obj.__class__.__name__ + '.' + function.__name__
start = time.clock()
result = function(obj, *args, **kwargs)
print('{}: {:.4f} seconds'.format(name, time.clock() - start))
return res... | Decorator timing the method call and printing the result to `stdout` |
def load_config_file(self, path, profile=None):
config_cls = self.get_config_reader()
return config_cls.load_config(self, path, profile=profile) | Load the standard config file. |
def use_any_status_sequence_rule_enabler_view(self):
self._operable_views['sequence_rule_enabler'] = ANY_STATUS
for session in self._get_provider_sessions():
try:
session.use_any_status_sequence_rule_enabler_view()
except AttributeError:
pass | Pass through to provider SequenceRuleEnablerLookupSession.use_any_status_sequence_rule_enabler_view |
def _start_console(self):
class InputStream:
def __init__(self):
self._data = b""
def write(self, data):
self._data += data
@asyncio.coroutine
def drain(self):
if not self.ws.closed:
self.ws.send_... | Start streaming the console via telnet |
def _sincedb_init(self):
if not self._sincedb_path:
return
if not os.path.exists(self._sincedb_path):
self._log_debug('initializing sincedb sqlite schema')
conn = sqlite3.connect(self._sincedb_path, isolation_level=None)
conn.execute(
)
conn.cl... | Initializes the sincedb schema in an sqlite db |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.