code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def send_activation(self, user, sender=None, **kwargs):
if user.is_active:
return False
token = self.get_token(user)
kwargs.update({"token": token})
self.email_message(
user, self.activation_subject, self.activation_body, sender, **kwargs
).send() | Invites a user to join the site |
def signature_validate(signature, error = None) :
"is signature a valid sequence of zero or more complete types."
error, my_error = _get_error(error)
result = dbus.dbus_signature_validate(signature.encode(), error._dbobj) != 0
my_error.raise_if_set()
return \
result | is signature a valid sequence of zero or more complete types. |
def camel_to_underscore(name):
as_list = []
length = len(name)
for index, i in enumerate(name):
if index != 0 and index != length - 1 and i.isupper():
as_list.append('_%s' % i.lower())
else:
as_list.append(i.lower())
return ''.join(as_list) | convert CamelCase style to under_score_case |
def find_matching(cls, message, channel):
return (
handler
for handler in cls._registry
if isinstance(handler, cls)
and handler.match(message, channel)
) | Yield ``cls`` subclasses that match message and channel |
def show_shortcuts(self, menu):
for element in getattr(self, menu + '_menu_actions'):
if element and isinstance(element, QAction):
if element._shown_shortcut is not None:
element.setShortcut(element._shown_shortcut) | Show action shortcuts in menu |
def calc_mb_ports(self):
model = self.device_info['model']
chassis = self.device_info['chassis']
num_ports = MODEL_MATRIX[model][chassis]['ports']
ports = []
if num_ports > 0:
port_type = MODEL_MATRIX[model][chassis]['type']
for i in range(num_ports):
port_temp = {'name': PORT_TYPES[port_type] + '0/' + str(i),
'id': self.port_id,
'port_number': i,
'slot_number': 0}
ports.append(port_temp)
self.port_id += 1
self.node['ports'].extend(ports) | Add the default ports to add to a router |
def xincludeProcessFlags(self, flags):
ret = libxml2mod.xmlXIncludeProcessFlags(self._o, flags)
return ret | Implement the XInclude substitution on the XML document @doc |
def _AssAttr(self, t):
self._dispatch(t.expr)
self._write('.'+t.attrname) | Handle assigning an attribute of an object |
def DispatchSpout(*a, **kw):
spout_class_name = get_config()['Spout']
spout_class = import_name(spout_class_name, default_ns='birding.spout')
return spout_class(*a, **kw) | Factory to dispatch spout class based on config. |
def pos_int(i):
try:
if isinstance(i, string_types):
i = int(i)
if not isinstance(i, int) or i < 0:
raise Exception()
except:
raise ValueError("Not a positive integer")
return i | Simple positive integer validation. |
def _domain_event_job_completed_cb(conn, domain, params, opaque):
_salt_send_domain_event(opaque, conn, domain, opaque['event'], {
'params': params
}) | Domain job completion events handler |
def _bse_cli_list_roles(args):
all_roles = api.get_roles()
if args.no_description:
liststr = all_roles.keys()
else:
liststr = format_columns(all_roles.items())
return '\n'.join(liststr) | Handles the list-roles subcommand |
def add_plugin(self, plugin, tabs, data, icon):
self.plugins_tabs.append((tabs, plugin))
self.plugins_data.append((data, icon))
self.plugins_instances.append(plugin) | Add a plugin to display its files. |
def _set_seed(self):
if self.flags['SEED'] is not None:
tf.set_random_seed(self.flags['SEED'])
np.random.seed(self.flags['SEED']) | Set random seed for numpy and tensorflow packages |
def convert_time_to_hour_minute(hour, minute, convention):
if hour is None:
hour = 0
if minute is None:
minute = 0
if convention is None:
convention = 'am'
hour = int(hour)
minute = int(minute)
if convention.lower() == 'pm':
hour += 12
return {'hours': hour, 'minutes': minute} | Convert time to hour, minute |
def addContinuousSet(self):
self._openRepo()
dataset = self._repo.getDatasetByName(self._args.datasetName)
filePath = self._getFilePath(self._args.filePath,
self._args.relativePath)
name = getNameFromPath(self._args.filePath)
continuousSet = continuous.FileContinuousSet(dataset, name)
referenceSetName = self._args.referenceSetName
if referenceSetName is None:
raise exceptions.RepoManagerException(
"A reference set name must be provided")
referenceSet = self._repo.getReferenceSetByName(referenceSetName)
continuousSet.setReferenceSet(referenceSet)
continuousSet.populateFromFile(filePath)
self._updateRepo(self._repo.insertContinuousSet, continuousSet) | Adds a new continuous set into this repo |
def neg_log_perplexity(batch, model_predictions):
_, targets = batch
model_predictions, targets = _make_list(model_predictions, targets)
xent = []
for (prediction, target) in zip(model_predictions, targets):
hot_target = layers.one_hot(target, prediction.shape[-1])
xent.append(np.sum(prediction * hot_target, axis=-1))
return masked_mean(xent, targets) | Calculate negative log perplexity. |
def _validate_conn(self, conn):
super(HTTPSConnectionPool, self)._validate_conn(conn)
if not getattr(conn, 'sock', None):
conn.connect()
if not conn.is_verified:
warnings.warn((
'Unverified HTTPS request is being made. '
'Adding certificate verification is strongly advised. See: '
'https://urllib3.readthedocs.org/en/latest/security.html'),
InsecureRequestWarning) | Called right before a request is made, after the socket is created. |
def compile_and_process(self, in_path):
out_path = self.path_mapping[in_path]
if not self.embed:
pdebug("[%s::%s] %s -> %s" % (
self.compiler_name,
self.name,
os.path.relpath(in_path),
os.path.relpath(out_path)),
groups=["build_task"],
autobreak=True)
else:
pdebug("[%s::%s] %s -> <cache>" % (
self.compiler_name,
self.name,
os.path.relpath(in_path)),
groups=["build_task"],
autobreak=True)
compiled_string = self.compile_file(in_path)
if not self.embed:
if compiled_string != "":
with open(out_path, "w") as f:
f.write(compiled_string)
return compiled_string | compile a file, save it to the ouput file if the inline flag true |
def search_function(cls, encoding):
if encoding == cls._codec_name:
return codecs.CodecInfo(
name=cls._codec_name,
encode=cls.encode,
decode=cls.decode,
)
return None | Search function to find 'rotunicode' codec. |
def _load_managed_entries(self):
for process_name, process_entry in context.process_context.items():
if isinstance(process_entry, ManagedProcessEntry):
function = self.fire_managed_worker
else:
self.logger.warning('Skipping non-managed context entry {0} of type {1}.'
.format(process_name, process_entry.__class__.__name__))
continue
try:
self._register_process_entry(process_entry, function)
except Exception:
self.logger.error('Managed Thread Handler {0} failed to start. Skipping it.'
.format(process_entry.key), exc_info=True) | loads scheduler managed entries. no start-up procedures are performed |
def defpm(name, *lines):
'define a new package manager'
@register_macro(name, group='package managers')
@shell_escape_args
def package_manager(*packages):
if not packages:
return
sh_packages = ' '.join(pkg for pkg in packages)
expanded_lines = [line.format(sh_packages) for line in lines]
return eval([['run'] + expanded_lines])
package_manager.__doc__ = "install packages with {}".format(name) | define a new package manager |
def _usernamesToSidObjects(cls, val, **kwargs):
if not val:
return val
if isinstance(val, six.string_types):
val = val.split(',')
sids = []
for _user in val:
try:
sid = win32security.LookupAccountName('', _user)[0]
sids.append(sid)
except Exception as e:
log.exception('Handle this explicitly')
raise CommandExecutionError((
'There was an error obtaining the SID of user "{0}". Error '
'returned: {1}'
).format(_user, e))
return sids | converts a list of usernames to sid objects |
def max_neg(self):
if self.__len__() == 0:
return ArgumentError('empty set has no maximum negative value.')
if self.contains(0):
return None
negative = [interval for interval in self.intervals
if interval.right < 0]
if len(negative) == 0:
return None
return numpy.max(list(map(lambda i: i.right, negative))) | Returns maximum negative value or None. |
def uploaded_filepath(instance, filename):
ext = filename.split('.')[-1]
filename = "%s.%s" % (uuid.uuid4(), ext)
today = datetime.now().strftime('%Y-%m-%d')
return os.path.join('django-summernote', today, filename) | Returns default filepath for uploaded files. |
def read_PIA0_B_control(self, cpu_cycles, op_address, address):
value = self.pia_0_B_control.value
log.error(
"%04x| read $%04x (PIA 0 B side Control reg.) send $%02x (%s) back.\t|%s",
op_address, address, value, byte2bit_string(value),
self.cfg.mem_info.get_shortest(op_address)
)
return value | read from 0xff03 -> PIA 0 B side Control reg. |
def write_docstring(self):
line_to_cursor = self.code_editor.get_text('sol', 'cursor')
if self.is_beginning_triple_quotes(line_to_cursor):
cursor = self.code_editor.textCursor()
prev_pos = cursor.position()
quote = line_to_cursor[-1]
docstring_type = CONF.get('editor', 'docstring_type')
docstring = self._generate_docstring(docstring_type, quote)
if docstring:
self.code_editor.insert_text(docstring)
cursor = self.code_editor.textCursor()
cursor.setPosition(prev_pos, QTextCursor.KeepAnchor)
cursor.movePosition(QTextCursor.NextBlock)
cursor.movePosition(QTextCursor.EndOfLine,
QTextCursor.KeepAnchor)
cursor.clearSelection()
self.code_editor.setTextCursor(cursor)
return True
return False | Write docstring to editor. |
def _report_problem(self, problem, level=logging.ERROR):
problem = self.basename + ': ' + problem
if self._logger.isEnabledFor(level):
self._problematic = True
if self._check_raises:
raise DapInvalid(problem)
self._logger.log(level, problem) | Report a given problem |
def execute(self, request_object):
if not request_object.is_valid:
return ResponseFailure.build_from_invalid_request(
request_object)
try:
return self.process_request(request_object)
except ValidationError as err:
return ResponseFailure.build_unprocessable_error(err.normalized_messages)
except ObjectNotFoundError:
return ResponseFailure.build_not_found(
[{'identifier': 'Object with this ID does not exist.'}])
except Exception as exc:
logger.error(
f'{self.__class__.__name__} execution failed due to error {exc}',
exc_info=True)
return ResponseFailure.build_system_error([{exc.__class__.__name__: exc}]) | Generic executor method of all UseCases |
def load_object(self, kwargs):
self.object = None
self.config = None
self.model = self.get_model_class()
kwargs.pop('app', None)
kwargs.pop('model', None)
if self.model and kwargs.get('pk', False):
try:
self.object = self.model.objects.get(pk=kwargs.pop('pk'))
except Exception:
raise Exception("Could not load {}".format(self.model.__name__.lower()))
setattr(self, self.model.__name__.lower(), self.object)
return kwargs | Load object and model config and remove pk from kwargs |
def make_func_call(id_, lineno, params):
return symbols.FUNCCALL.make_node(id_, params, lineno) | This will return an AST node for a function call. |
def url(self):
path = None
nodes = self.parents()
while not nodes.empty():
path = urljoin(path, nodes.get().path())
return path | Returns the whole URL from the base to this node. |
def derive_single_object_url_pattern(slug_url_kwarg, path, action):
if slug_url_kwarg:
return r'^%s/%s/(?P<%s>[^/]+)/$' % (path, action, slug_url_kwarg)
else:
return r'^%s/%s/(?P<pk>\d+)/$' % (path, action) | Utility function called by class methods for single object views |
def add_to_line_plot(ax, x, y, color = '0.' , label = ''):
plt.sca(ax)
plt.plot(x,y, color = color, label = label)
return ax | This function takes an axes and adds one line to it |
def _read_sequences(self, graph):
for e in self._get_elements(graph, SBOL.Sequence):
identity = e[0]
c = self._get_rdf_identified(graph, identity)
c['elements'] = self._get_triplet_value(graph, identity, SBOL.elements)
c['encoding'] = self._get_triplet_value(graph, identity, SBOL.encoding)
seq = Sequence(**c)
self._sequences[identity.toPython()] = seq
self._collection_store[identity.toPython()] = seq | Read graph and add sequences to document |
def _save_trace(self):
stack_trace = stack()
try:
self.trace = []
for frm in stack_trace[5:]:
self.trace.insert(0, frm[1:])
finally:
del stack_trace | Save current stack trace as formatted string. |
def _reseed(self):
self._char = choice(self._snow_chars)
self._rate = randint(1, 3)
self._x = randint(0, self._screen.width - 1)
self._y = self._screen.start_line + randint(0, self._rate) | Randomly create a new snowflake once this one is finished. |
def l2n(l, c):
"host to network long"
c = []
c.append(int((l >> 24) & U32(0xFF)))
c.append(int((l >> 16) & U32(0xFF)))
c.append(int((l >> 8) & U32(0xFF)))
c.append(int((l ) & U32(0xFF)))
return c | host to network long |
def run_thread(agent_classes, players, map_name, visualize):
with sc2_env.SC2Env(
map_name=map_name,
players=players,
agent_interface_format=sc2_env.parse_agent_interface_format(
feature_screen=FLAGS.feature_screen_size,
feature_minimap=FLAGS.feature_minimap_size,
rgb_screen=FLAGS.rgb_screen_size,
rgb_minimap=FLAGS.rgb_minimap_size,
action_space=FLAGS.action_space,
use_feature_units=FLAGS.use_feature_units),
step_mul=FLAGS.step_mul,
game_steps_per_episode=FLAGS.game_steps_per_episode,
disable_fog=FLAGS.disable_fog,
visualize=visualize) as env:
env = available_actions_printer.AvailableActionsPrinter(env)
agents = [agent_cls() for agent_cls in agent_classes]
run_loop.run_loop(agents, env, FLAGS.max_agent_steps, FLAGS.max_episodes)
if FLAGS.save_replay:
env.save_replay(agent_classes[0].__name__) | Run one thread worth of the environment with agents. |
def _validate_index(self, index):
if isinstance(index, slice):
if index.step and index.step != 1:
raise IndexError('Step is not allowed.')
indexes = (index.start, index.stop)
else:
indexes = (index,)
for index in indexes:
if index is not None and index < 0:
raise IndexError('Negative indexes are not allowed.') | Validates given index, eventually raises errors. |
def _match_real(filename, include, exclude, follow, symlinks):
sep = '\\' if util.platform() == "windows" else '/'
if isinstance(filename, bytes):
sep = os.fsencode(sep)
if not filename.endswith(sep) and os.path.isdir(filename):
filename += sep
matched = False
for pattern in include:
if _fs_match(pattern, filename, sep, follow, symlinks):
matched = True
break
if matched:
matched = True
if exclude:
for pattern in exclude:
if _fs_match(pattern, filename, sep, follow, symlinks):
matched = False
break
return matched | Match real filename includes and excludes. |
def _emplace_transcript(transcripts, parent):
transcripts.sort(key=lambda t: (len(t), t.get_attribute('ID')))
pt = transcripts.pop()
parent.children = [pt] | Retrieve the primary transcript and discard all others. |
def _add_cli_args(self):
self._arg_parser.add_argument('-l', '--list',
action='store_true',
help='List installed sprockets apps')
self._arg_parser.add_argument('-s', '--syslog',
action='store_true',
help='Log to syslog')
self._arg_parser.add_argument('-v', '--verbose',
action='count',
help=('Verbose logging output, use -vv '
'for DEBUG level logging'))
self._arg_parser.add_argument('--version',
action='version',
version='sprockets v%s ' % __version__)
subparsers = self._arg_parser.add_subparsers(dest='controller',
help=DESCRIPTION)
for key in self._controllers:
help_text = self._get_controller_help(key)
sub_parser = subparsers.add_parser(key, help=help_text)
try:
self._controllers[key].add_cli_arguments(sub_parser)
except AttributeError:
LOGGER.debug('%s missing add_cli_arguments()', key)
self._arg_parser.add_argument('application',
action="store",
help='The sprockets app to run') | Add the cli arguments to the argument parser. |
def getHostsFromFile(filename):
valid_hostname = r"^[^ /\t=\n]+"
workers = r"\d+"
hostname_re = re.compile(valid_hostname)
worker_re = re.compile(workers)
hosts = []
with open(filename) as f:
for line in f:
if re.search('[\[\]]', line):
hosts = hosts + parseSLURM(line.strip())
else:
host = hostname_re.search(line.strip())
if host:
hostname = host.group()
n = worker_re.search(line[host.end():])
if n:
n = n.group()
else:
n = 0
hosts.append((hostname, int(n)))
return hosts | Parse a file to return a list of hosts. |
def tls_meta_data(self):
for flow in self.input_stream:
if flow['protocol'] != 'TCP':
continue
if flow['direction'] == 'CTS':
try:
tls_records, bytes_consumed = dpkt.ssl.tls_multi_factory(flow['payload'])
if bytes_consumed != len(flow['payload']):
logger.warning('Incomplete TLS record at the end...')
flow['tls'] = {'type':'TLS_CTS', 'data':{'tls_records': tls_records, 'uri':None, 'headers':None}}
except (dpkt.dpkt.NeedData, dpkt.dpkt.UnpackError, dpkt.ssl.SSL3Exception):
flow['tls'] = None
else:
try:
tls_records, bytes_consumed = dpkt.ssl.tls_multi_factory(flow['payload'])
if bytes_consumed != len(flow['payload']):
logger.warning('Incomplete TLS record at the end...')
flow['tls'] = {'type':'TLS_STC', 'data':{'tls_records': tls_records, 'uri':None, 'headers':None}}
except (dpkt.dpkt.NeedData, dpkt.dpkt.UnpackError, dpkt.ssl.SSL3Exception):
flow['tls'] = None
yield flow | Pull out the TLS metadata for each flow in the input_stream |
def _init_grps(code2nt):
seen = set()
seen_add = seen.add
groups = [nt.group for nt in code2nt.values()]
return [g for g in groups if not (g in seen or seen_add(g))] | Return list of groups in same order as in code2nt |
def vertices(self):
return {'a': self.a, 'a_neg': self.a_neg,
'b': self.b, 'b_neg': self.b_neg} | A dictionary of four points where the axes intersect the ellipse, dict. |
def avail_images(call=None):
all_servers = list_nodes_full()
templates = {}
for server in all_servers:
if server["IsTemplate"]:
templates.update({"Template Name": server["Name"]})
return templates | returns a list of images available to you |
def simulate_one(fw, name, size):
from random import choice
seq = Seq(''.join(choice('ACGT') for _ in xrange(size)))
s = SeqRecord(seq, id=name, description="Fake sequence")
SeqIO.write([s], fw, "fasta") | Simulate a random sequence with name and size |
def create_network(self):
class_ = getattr(networks, self.network_class)
return class_(max_size=self.quorum) | Create a new network by reading the configuration file. |
def to_file(cls, status):
return {
Status.PENDING: DataFile.PENDING,
Status.WAITING: DataFile.PENDING,
Status.COMPLETED: DataFile.COMPLETED,
Status.DELETED: DataFile.COMPLETED
}[status] | Returns the file in which this task is stored. |
def _fixup_cdef_enums(string, reg=re.compile(r"=\s*(\d+)\s*<<\s*(\d+)")):
def repl_shift(match):
shift_by = int(match.group(2))
value = int(match.group(1))
int_value = ctypes.c_int(value << shift_by).value
return "= %s" % str(int_value)
return reg.sub(repl_shift, string) | Converts some common enum expressions to constants |
def new_deploy(py_ver: PyVer, release_target: ReleaseTarget):
cache_file = f'app_{py_ver.name}.tar'
template = yaml.safe_load(f
)
return deploy_name(py_ver, release_target), template | Job for deploying package to pypi |
def update_tool_tip(self, service_running: bool):
if service_running:
self.setToolTip(TOOLTIP_RUNNING)
else:
self.setToolTip(TOOLTIP_PAUSED) | Slot function that updates the tooltip when the user activates or deactivates the expansion service. |
def stats_flush(self):
while True:
try:
eventlet.sleep(self.flush_interval)
if self.debug:
print "seen %d stats so far." % self.stats_seen
print "current counters: %s" % self.counters
if self.pickle_proto:
payload = self.pickle_payload()
if payload:
for batch in payload:
self.report_stats(batch)
else:
payload = self.plain_payload()
if payload:
self.report_stats(payload)
except:
self.logger.critical('Encountered error in stats_flush loop') | Periodically flush stats to graphite |
def xpointerNewRangeNodes(self, end):
if end is None: end__o = None
else: end__o = end._o
ret = libxml2mod.xmlXPtrNewRangeNodes(self._o, end__o)
if ret is None:raise treeError('xmlXPtrNewRangeNodes() failed')
return xpathObjectRet(ret) | Create a new xmlXPathObjectPtr of type range using 2 nodes |
def extract_translations(self, string):
tree = ast.parse(string)
visitor = TransVisitor(
self.tranz_functions,
self.tranzchoice_functions)
visitor.visit(tree)
return visitor.translations | Extract messages from Python string. |
def describe_tile(self, index):
if index >= len(self.tile_manager.registered_tiles):
tile = TileInfo.CreateInvalid()
else:
tile = self.tile_manager.registered_tiles[index]
return tile.registration_packet() | Get the registration information for the tile at the given index. |
def cmd_lockup_autopilot(self, args):
if len(args) > 0 and args[0] == 'IREALLYMEANIT':
print("Sending lockup command")
self.master.mav.command_long_send(self.settings.target_system, self.settings.target_component,
mavutil.mavlink.MAV_CMD_PREFLIGHT_REBOOT_SHUTDOWN, 0,
42, 24, 71, 93, 0, 0, 0)
else:
print("Invalid lockup command") | lockup autopilot for watchdog testing |
def string_to_state(s):
s = lexer(s)
attrs = {}
while True:
if s.cur == '>':
attrs['start'] = True
s.pos += 1
elif s.cur == '@':
attrs['accept'] = True
s.pos += 1
else:
break
x = parse_symbol(s)
parse_end(s)
return x, attrs | s is a string possibly preceded by > or @. |
def current_view(self):
state = self.state
return (state.lat, state.lon, state.width, state.height,
state.ground_width, state.mt.tiles_pending()) | return a tuple representing the current view |
def init_converse_args(self, parser):
parser.add_argument('-f', '--configuration', dest='config', default=DEFAULT_USER_CONFIG_PATH,
help='the path to the configuration file to use -- ./config.yaml by default')
parser.add_argument('-m', '--model', dest='model_name', help='the name of the (pretrained) dialogue model to use') | Only invoked conditionally if subcommand is 'converse' |
def from_validation_exception(cls, exception, **kwargs):
errors = []
def flatten(error, path=""):
if isinstance(error, halogen.exceptions.ValidationError):
if not path.endswith("/"):
path += "/"
if error.attr is not None:
path += error.attr
elif error.index is not None:
path += six.text_type(error.index)
for e in error.errors:
flatten(e, path)
else:
message = error
if isinstance(error, Exception):
try:
message = error.message
except AttributeError:
message = six.text_type(error)
errors.append(Error(message=message, path=path))
flatten(exception)
message = kwargs.pop("message", "Validation error.")
return cls(message=message, errors=sorted(errors, key=lambda error: error.path or ""), **kwargs) | Create an error from validation exception. |
def _get_digraph_char(self, cli):
" Return `False`, or the Digraph symbol to be used. "
if cli.quoted_insert:
return '^'
if cli.vi_state.waiting_for_digraph:
if cli.vi_state.digraph_symbol1:
return cli.vi_state.digraph_symbol1
return '?'
return False | Return `False`, or the Digraph symbol to be used. |
def error(message, code=1):
if message:
print('ERROR: {0}'.format(message), file=sys.stderr)
else:
print(file=sys.stderr)
sys.exit(code) | Prints an error message to stderr and exits with a status of 1 by default. |
def location_gmap(context, location):
gmapq = None
if getattr(MapFieldPanel, "UsingWagtailGMaps", False):
gmapq = location
return {'gmapq': gmapq} | Display a link to Google maps iff we are using WagtailGMaps |
def to_dict(self):
return {
'high': self.high,
'low': self.low,
'mean': self.mean,
'count': self.count,
'deviation': self.deviation,
} | Pack the stats computed into a dictionary. |
def _update_uncompressed(collection_name, upsert, multi, spec,
doc, safe, last_error_args, check_keys, opts):
op_update, max_bson_size = _update(
collection_name, upsert, multi, spec, doc, check_keys, opts)
rid, msg = __pack_message(2001, op_update)
if safe:
rid, gle, _ = __last_error(collection_name, last_error_args)
return rid, msg + gle, max_bson_size
return rid, msg, max_bson_size | Internal update message helper. |
def isValidPublicAddress(address: str) -> bool:
valid = False
if len(address) == 34 and address[0] == 'A':
try:
base58.b58decode_check(address.encode())
valid = True
except ValueError:
valid = False
return valid | Check if address is a valid NEO address |
def setup(self, app):
for other in app.plugins:
if not isinstance(other, MySQLPlugin):
continue
if other.keyword == self.keyword:
raise PluginError("Found another mysql plugin with conflicting settings (non-unique keyword).")
elif other.name == self.name:
self.name += '_%s' % self.keyword | Make sure that other installed plugins don't affect the same keyword argument. |
def insert(self, anchor):
self.prec = anchor.prec
self.succ = anchor
self.succ.prec = self
self.prec.succ = self | insert list item before anchor |
def request_signature(self):
signature = self.query_parameters.get(_x_amz_signature)
if signature is not None:
signature = signature[0]
else:
signature = self.authorization_header_parameters.get(_signature)
if signature is None:
raise AttributeError("Signature was not passed in the request")
return signature | The signature passed in the request. |
def copy_model_to_fp32(m, optim):
fp32_params = [m_param.clone().type(torch.cuda.FloatTensor).detach() for m_param in trainable_params_(m)]
optim_groups = [group['params'] for group in optim.param_groups]
iter_fp32_params = iter(fp32_params)
for group_params in optim_groups:
for i in range(len(group_params)):
if not group_params[i].requires_grad: continue
fp32_param = next(iter_fp32_params)
assert(fp32_param.shape == group_params[i].shape)
fp32_param.requires_grad = group_params[i].requires_grad
group_params[i] = fp32_param
return fp32_params | Creates a fp32 copy of model parameters and sets optimizer parameters |
def updateRPYLocations(self):
self.rollText.set_position((self.leftPos+(self.vertSize/10.0),-0.97+(2*self.vertSize)-(self.vertSize/10.0)))
self.pitchText.set_position((self.leftPos+(self.vertSize/10.0),-0.97+self.vertSize-(0.5*self.vertSize/10.0)))
self.yawText.set_position((self.leftPos+(self.vertSize/10.0),-0.97))
self.rollText.set_size(self.fontSize)
self.pitchText.set_size(self.fontSize)
self.yawText.set_size(self.fontSize) | Update the locations of roll, pitch, yaw text. |
def update(self, new_email_address, name):
params = {"email": self.email_address}
body = {
"EmailAddress": new_email_address,
"Name": name}
response = self._put("/admins.json",
body=json.dumps(body), params=params)
self.email_address = new_email_address | Updates the details for an administrator. |
def paste_from_clipboard(self):
to = self.get_current_path()
if os.path.isfile(to):
to = os.path.abspath(os.path.join(to, os.pardir))
mime = QtWidgets.QApplication.clipboard().mimeData()
paste_operation = None
if mime.hasFormat(self._UrlListMimeData.format(copy=True)):
paste_operation = True
elif mime.hasFormat(self._UrlListMimeData.format(copy=False)):
paste_operation = False
if paste_operation is not None:
self._paste(
self._UrlListMimeData.list_from(mime, copy=paste_operation),
to, copy=paste_operation) | Pastes files from clipboard. |
def alerts(self):
endpoint = '/'.join((self.endpoint, self.id, 'alerts'))
return self.alertFactory.find(
endpoint=endpoint,
api_key=self.api_key,
) | Query for alerts attached to this incident. |
def update(self):
for p in self._plugins:
if self._plugins[p].is_disable():
continue
self._plugins[p].input_method = 'snmp'
self._plugins[p].short_system_name = self.system_name
try:
self._plugins[p].update()
except Exception as e:
logger.error("Update {} failed: {}".format(p, e))
else:
self._plugins[p].update_stats_history()
self._plugins[p].update_views() | Update the stats using SNMP. |
def header(self, name, default=None):
wsgi_header = "HTTP_{0}".format(name.upper())
try:
return self.env_raw[wsgi_header]
except KeyError:
return default | Returns the value of the HTTP header identified by `name`. |
def _notify_change(self, model, attr, old, new, hint=None, setter=None, callback_invoker=None):
if attr == 'name':
if old is not None:
self._all_models_by_name.remove_value(old, model)
if new is not None:
self._all_models_by_name.add_value(new, model)
if hint is None:
serializable_new = model.lookup(attr).serializable_value(model)
else:
serializable_new = None
event = ModelChangedEvent(self, model, attr, old, new, serializable_new, hint, setter, callback_invoker)
self._trigger_on_change(event) | Called by Model when it changes |
def cli(obj):
client = obj['client']
metrics = client.mgmt_status()['metrics']
headers = {'title': 'METRIC', 'type': 'TYPE', 'name': 'NAME', 'value': 'VALUE', 'average': 'AVERAGE'}
click.echo(tabulate([{
'title': m['title'],
'type': m['type'],
'name': '{}.{}'.format(m['group'], m['name']),
'value': m.get('value', None) or m.get('count', 0),
'average': int(m['totalTime']) * 1.0 / int(m['count']) if m['type'] == 'timer' else None
} for m in metrics], headers=headers, tablefmt=obj['output'])) | Display API server switch status and usage metrics. |
def show_rbac_policy(self, rbac_policy_id, **_params):
return self.get(self.rbac_policy_path % rbac_policy_id,
params=_params) | Fetch information of a certain RBAC policy. |
def format_help(self):
if self._subparsers:
for action in self._subparsers._actions:
if isinstance(action, LazySubParsersAction):
for parser_name, parser in action._name_parser_map.iteritems():
action._setup_subparser(parser_name, parser)
return super(LazyArgumentParser, self).format_help() | Sets up all sub-parsers when help is requested. |
async def block(self) -> None:
await asyncio.gather(
*itertools.chain(
(sp.wait_done() for sp in self._running_set),
(sp.wait_done() for sp in self._pending_set))) | Block until all running and pending subprocesses have finished. |
def read_temperature(self):
UT = self.read_raw_temp()
X1 = ((UT - self.cal_AC6) * self.cal_AC5) >> 15
X2 = (self.cal_MC << 11) // (X1 + self.cal_MD)
B5 = X1 + X2
temp = ((B5 + 8) >> 4) / 10.0
self.logger.debug('Calibrated temperature {0} C', temp)
return temp | Gets the compensated temperature in degrees celsius. |
def exp10(x, context=None):
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_exp10,
(BigFloat._implicit_convert(x),),
context,
) | Return ten raised to the power x. |
def list_view_changed(self, widget, event, data=None):
adj = self.scrolled_window.get_vadjustment()
adj.set_value(adj.get_upper() - adj.get_page_size()) | Function shows last rows. |
def export(cls, folder, particles, datetimes):
normalized_locations = [particle.normalized_locations(datetimes) for particle in particles]
track_coords = []
for x in xrange(0, len(datetimes)):
points = MultiPoint([loc[x].point.coords[0] for loc in normalized_locations])
track_coords.append(points.centroid.coords[0])
ls = LineString(track_coords)
if not os.path.exists(folder):
os.makedirs(folder)
filepath = os.path.join(folder, "trackline.geojson")
f = open(filepath, "wb")
f.write(json.dumps(mapping(ls)))
f.close()
return filepath | Export trackline data to GeoJSON file |
async def message_handler(self, event):
chat = await event.get_chat()
if event.is_group:
if event.out:
sprint('>> sent "{}" to chat {}'.format(
event.text, get_display_name(chat)
))
else:
sprint('<< {} @ {} sent "{}"'.format(
get_display_name(await event.get_sender()),
get_display_name(chat),
event.text
))
else:
if event.out:
sprint('>> "{}" to user {}'.format(
event.text, get_display_name(chat)
))
else:
sprint('<< {} sent "{}"'.format(
get_display_name(chat), event.text
)) | Callback method for received events.NewMessage |
def process_event(self, event):
if not isinstance(event, KindleEvent):
pass
elif isinstance(event, AddEvent):
self._data[event.asin] = BookSnapshot(event.asin)
elif isinstance(event, SetReadingEvent):
self._data[event.asin].status = ReadingStatus.CURRENT
self._data[event.asin].progress = event.initial_progress
elif isinstance(event, ReadEvent):
self._data[event.asin].progress += event.progress
elif isinstance(event, SetFinishedEvent):
self._data[event.asin].status = ReadingStatus.COMPLETED
else:
raise TypeError | Apply an event to the snapshot instance |
def current_item(self):
if self._history and self._index >= 0:
self._check_index()
return self._history[self._index] | Return the current element. |
def c_array(ctype, values):
if isinstance(values, np.ndarray) and values.dtype.itemsize == ctypes.sizeof(ctype):
return (ctype * len(values)).from_buffer_copy(values)
return (ctype * len(values))(*values) | Convert a python string to c array. |
def _collapse_device(self, node, flat):
items = [item
for branch in node.branches
for item in self._collapse_device(branch, flat)
if item]
show_all = not flat or self._quickmenu_actions == 'all'
methods = node.methods if show_all else [
method
for method in node.methods
if method.method in self._quickmenu_actions
]
if flat:
items.extend(methods)
else:
items.append(MenuSection(None, methods))
return items | Collapse device hierarchy into a flat folder. |
def query_random(**kwargs):
if 'limit' in kwargs:
limit = kwargs['limit']
elif 'num' in kwargs:
limit = kwargs['num']
else:
limit = 10
kind = kwargs.get('kind', None)
if kind:
rand_recs = TabPost.select().where(
(TabPost.kind == kind) &
(TabPost.valid == 1)
).order_by(
peewee.fn.Random()
).limit(limit)
else:
rand_recs = TabPost.select().where(
TabPost.valid == 1
).order_by(
peewee.fn.Random()
).limit(limit)
return rand_recs | Return the random records of centain kind. |
def gen500(request, baseURI, project=None):
return HttpResponseServerError(
render_to_response('plugIt/500.html', {
'context': {
'ebuio_baseUrl': baseURI,
'ebuio_userMode': request.session.get('plugit-standalone-usermode', 'ano'),
},
'project': project
}, context_instance=RequestContext(request))) | Return a 500 error |
def status(self, job_id):
check_jobid(job_id)
queue = self._get_queue()
if queue is None:
raise QueueDoesntExist
ret, output = self._call('%s %s' % (
shell_escape(queue / 'commands/status'),
job_id),
True)
if ret == 0:
directory, result = output.splitlines()
result = result.decode('utf-8')
return RemoteQueue.JOB_DONE, PosixPath(directory), result
elif ret == 2:
directory = output.splitlines()[0]
return RemoteQueue.JOB_RUNNING, PosixPath(directory), None
elif ret == 3:
raise JobNotFound
else:
raise RemoteCommandFailure(command="commands/status",
ret=ret) | Gets the status of a previously-submitted job. |
def GetKnowledgeBase(rdf_client_obj, allow_uninitialized=False):
if not allow_uninitialized:
if rdf_client_obj is None:
raise artifact_utils.KnowledgeBaseUninitializedError(
"No client snapshot given.")
if rdf_client_obj.knowledge_base is None:
raise artifact_utils.KnowledgeBaseUninitializedError(
"KnowledgeBase empty for %s." % rdf_client_obj.client_id)
kb = rdf_client_obj.knowledge_base
if not kb.os:
raise artifact_utils.KnowledgeBaseAttributesMissingError(
"KnowledgeBase missing OS for %s. Knowledgebase content: %s" %
(rdf_client_obj.client_id, kb))
if rdf_client_obj is None or rdf_client_obj.knowledge_base is None:
return rdf_client.KnowledgeBase()
version = rdf_client_obj.os_version.split(".")
kb = rdf_client_obj.knowledge_base
try:
kb.os_major_version = int(version[0])
if len(version) >= 1:
kb.os_minor_version = int(version[1])
except ValueError:
pass
return kb | Returns a knowledgebase from an rdf client object. |
def hkm_fc(fdata, Nmax, m, s):
f = fdata[:, m]
L1 = f.size
MM = int(L1 / 2)
Q = s.size
ff = np.zeros(Q, dtype=np.complex128)
for n in xrange(MM, L1):
ff[n] = f[n - MM]
for n in xrange(0, MM):
ff[n] = f[n + MM]
F = np.fft.fft(ff)
S = np.fft.fft(s)
out = 4 * np.pi * np.fft.ifft(F * S)
return out[0:Nmax + 1] | Assume fdata has even rows |
def _looks_like_lru_cache(node):
if not node.decorators:
return False
for decorator in node.decorators.nodes:
if not isinstance(decorator, astroid.Call):
continue
if _looks_like_functools_member(decorator, "lru_cache"):
return True
return False | Check if the given function node is decorated with lru_cache. |
def _score_for_model(meta):
mean_acc = list()
pipes = meta["pipeline"]
acc = meta["accuracy"]
if "tagger" in pipes:
mean_acc.append(acc["tags_acc"])
if "parser" in pipes:
mean_acc.append((acc["uas"] + acc["las"]) / 2)
if "ner" in pipes:
mean_acc.append((acc["ents_p"] + acc["ents_r"] + acc["ents_f"]) / 3)
return sum(mean_acc) / len(mean_acc) | Returns mean score between tasks in pipeline that can be used for early stopping. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.