code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def update_project_actions(self):
if self.recent_projects:
self.clear_recent_projects_action.setEnabled(True)
else:
self.clear_recent_projects_action.setEnabled(False)
active = bool(self.get_active_project_path())
self.close_project_action.setEnabled(active)... | Update actions of the Projects menu |
def _check_consistent_units_orbitInput(self,orb):
if self._roSet and orb._roSet:
assert m.fabs(self._ro-orb._ro) < 10.**-10., 'Physical conversion for the actionAngle object is not consistent with that of the Orbit given to it'
if self._voSet and orb._voSet:
assert m.fabs(self._v... | Internal function to check that the set of units for this object is consistent with that for an input orbit |
def euler_options(fn):
euler_functions = cheat, generate, preview, skip, verify, verify_all
for option in reversed(euler_functions):
name, docstring = option.__name__, option.__doc__
kwargs = {'flag_value': option, 'help': docstring}
flag = '--%s' % name.replace('_', '-')
flags =... | Decorator to link CLI options with their appropriate functions |
def ascent(self):
total_ascent = 0.0
altitude_data = self.altitude_points()
for i in range(len(altitude_data) - 1):
diff = altitude_data[i+1] - altitude_data[i]
if diff > 0.0:
total_ascent += diff
return total_ascent | Returns ascent of workout in meters |
def _zom_index(lexer):
tok = next(lexer)
if isinstance(tok, COMMA):
first = _expect_token(lexer, {IntegerToken}).value
rest = _zom_index(lexer)
return (first, ) + rest
else:
lexer.unpop_token(tok)
return tuple() | Return zero or more indices. |
def remove_outliers(series, stddev):
return series[(series - series.mean()).abs() < stddev * series.std()] | Remove the outliers from a series. |
def update_attribute(self, attr, value):
update = [fapi._attr_up(attr, value)]
r = fapi.update_workspace_attributes(self.namespace, self.name,
update, self.api_url)
fapi._check_response_code(r, 200) | Set the value of a workspace attribute. |
def list_network_ip_availabilities(self, retrieve_all=True, **_params):
return self.list('network_ip_availabilities',
self.network_ip_availabilities_path,
retrieve_all, **_params) | Fetches IP availibility information for all networks |
def mod(x, y, context=None):
return _apply_function_in_current_context(
BigFloat,
mpfr_mod,
(
BigFloat._implicit_convert(x),
BigFloat._implicit_convert(y),
),
context,
) | Return the remainder of x divided by y, with sign matching that of y. |
def process_token(self, tok):
if(tok[0].__str__() in ('Token.Comment.Multiline', 'Token.Comment',
'Token.Literal.String.Doc')):
self.comments += tok[1].count('\n')+1
elif(tok[0].__str__() in ('Token.Comment.Single')):
self.comments += 1
elif(self.contains_... | count comments and non-empty lines that contain code |
def link_version(self, source, target):
if not hasattr(target, VERSION_ID):
logger.warn("No iniatial version found for '{}'"
.format(repr(target)))
return
if not hasattr(source, REFERENCE_VERSIONS):
source.reference_versions = {}
target... | Link the current version of the target on the source |
def _ascii_tree(self, indent: str, no_types: bool, val_count: bool) -> str:
def suffix(sn):
return f" {{{sn.val_count}}}\n" if val_count else "\n"
if not self.children:
return ""
cs = []
for c in self.children:
cs.extend(c._flatten())
cs.sort(k... | Return the receiver's subtree as ASCII art. |
def example_sync_client(api_client):
try:
pprint(api_client.echo())
except errors.RequestError as exc:
log.exception('Exception occurred: %s', exc) | Example sync client use with. |
def log_likelihood(C, T):
C = C.tocsr()
T = T.tocsr()
ind = scipy.nonzero(C)
relT = np.array(T[ind])[0, :]
relT = np.log(relT)
relC = np.array(C[ind])[0, :]
return relT.dot(relC) | implementation of likelihood of C given T |
def display(self, messages, sig="", debug=False):
full_message = "".join(
sig + line for line in " ".join(
str(msg) for msg in messages
).splitlines(True)
)
if not full_message:
full_message = sig.rstrip()
if debug:
printerr... | Prints an iterator of messages. |
def _flush_graph_val(self):
if not self._graphvals2set:
return
delafter = {}
for graph, key, branch, turn, tick, value in self._graphvals2set:
if (graph, key, branch) in delafter:
delafter[graph, key, branch] = min((
(turn, tick),
... | Send all new and changed graph values to the database. |
def _finalize(self):
container = {}
try:
for name in self._traces:
container[name] = self._traces[name]._trace
container['_state_'] = self._state_
file = open(self.filename, 'w+b')
std_pickle.dump(container, file)
file.close()
... | Dump traces using cPickle. |
def _get_shipped_from(row):
try:
spans = row.find('div', {'id': 'coltextR2'}).find_all('span')
if len(spans) < 2:
return None
return spans[1].string
except AttributeError:
return None | Get where package was shipped from. |
def check_has_path(self, api):
if not hasattr(api, 'path'):
msg = 'The Api class "{}" lacks a `path` attribute.'
return [msg.format(api.__name__)] | An API class must have a `path` attribute. |
async def async_delete_all_keys(session, host, port, api_key, api_keys=[]):
url = 'http://{}:{}/api/{}/config'.format(host, str(port), api_key)
response = await async_request(session.get, url)
api_keys.append(api_key)
for key in response['whitelist'].keys():
if key not in api_keys:
a... | Delete all API keys except for the ones provided to the method. |
def make_random_key() -> Text:
r = SystemRandom()
allowed = \
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_+/[]'
return ''.join([r.choice(allowed) for _ in range(0, 50)]) | Generates a secure random string |
def OnHelpSize(self, event):
size = event.GetSize()
config["help_window_size"] = repr((size.width, size.height))
event.Skip() | Help window size event handler stores size in config |
def calc_csd(self):
CSDarray = np.array([])
CSDdict = {}
i = 0
for y in self.y:
fil = os.path.join(self.populations_path,
self.output_file.format(y, 'CSD.h5'))
f = h5py.File(fil)
if i == 0:
CSDarray = np.z... | Sum all the CSD contributions from every layer. |
def recommend(self, client_data, limit, extra_data={}):
guids = self._curated_wl.get_randomized_guid_sample(limit)
results = [(guid, 1.0) for guid in guids]
log_data = (client_data["client_id"], str(guids))
self.logger.info(
"Curated recommendations client_id: [%s], guids: [%... | Curated recommendations are just random selections |
def limit_roles(self):
new_roles = {}
roles = self.options.limit.split(",")
for key, value in self.roles.iteritems():
for role in roles:
role = role.strip()
if key == role:
new_roles[key] = value
self.roles = new_roles | Limit the roles being scanned. |
def _read_conf_file(path):
log.debug('Reading configuration from %s', path)
with salt.utils.files.fopen(path, 'r') as conf_file:
try:
conf_opts = salt.utils.yaml.safe_load(conf_file) or {}
except salt.utils.yaml.YAMLError as err:
message = 'Error parsing configuration fil... | Read in a config file from a given path and process it into a dictionary |
def send_response_message(self, request_id, meta, body):
self.response_messages.append((request_id, meta, body)) | Add the response to the deque. |
def cluster_del_slots(self, slot, *slots):
slots = (slot,) + slots
if not all(isinstance(s, int) for s in slots):
raise TypeError("All parameters must be of type int")
fut = self.execute(b'CLUSTER', b'DELSLOTS', *slots)
return wait_ok(fut) | Set hash slots as unbound in receiving node. |
def to_json(self, skip_nulls=True):
return json.dumps(self.to_dict(skip_nulls=skip_nulls)) | Convert object to a json string |
async def processNodeInBox(self):
while self.nodeInBox:
m = self.nodeInBox.popleft()
await self.process_one_node_message(m) | Process the messages in the node inbox asynchronously. |
def complete_hosts(self, text, line, begidx, endidx):
"Tab-complete 'creds' commands."
commands = ["add", "remove", "dc"]
mline = line.partition(' ')[2]
offs = len(mline) - len(text)
return [s[offs:] for s in commands if s.startswith(mline)] | Tab-complete 'creds' commands. |
def _load_raster_text(self, raster_path):
with open(raster_path, 'r') as f:
self.rasterText = f.read()
lines = self.rasterText.split('\n')
for line in lines[0:6]:
spline = line.split()
if 'north' in spline[0].lower():
self.north = float(spline[... | Loads grass ASCII to object |
def _setuintbe(self, uintbe, length=None):
if length is not None and length % 8 != 0:
raise CreationError("Big-endian integers must be whole-byte. "
"Length = {0} bits.", length)
self._setuint(uintbe, length) | Set the bitstring to a big-endian unsigned int interpretation. |
def nickmask(prefix: str, kwargs: Dict[str, Any]) -> None:
if "!" in prefix and "@" in prefix:
kwargs["nick"], remainder = prefix.split("!", 1)
kwargs["user"], kwargs["host"] = remainder.split("@", 1)
else:
kwargs["host"] = prefix | store nick, user, host in kwargs if prefix is correct format |
def shutdown(self):
'Close the hub connection'
log.info("shutting down")
self._peer.go_down(reconnect=False, expected=True) | Close the hub connection |
def _get_proposed_values(self):
momentum_bar = self.momentum + 0.5 * self.stepsize * self.grad_log_position
position_bar = self.position + self.stepsize * momentum_bar
grad_log, _ = self.grad_log_pdf(position_bar, self.model).get_gradient_log_pdf()
momentum_bar = momentum_bar + 0.5 * sel... | Method to perform time splitting using leapfrog |
def _parse_spectra(self, line):
if line in ['\n', '\r\n', '//\n', '//\r\n', '', '//']:
self.start_spectra = False
self.current_id_meta += 1
self.collect_meta = True
return
splist = line.split()
if len(splist) > 2 and not self.ignore_additional_spec... | Parse and store the spectral details |
def jpegrescan(ext_args):
args = copy.copy(_JPEGRESCAN_ARGS)
if Settings.jpegrescan_multithread:
args += ['-t']
if Settings.destroy_metadata:
args += ['-s']
args += [ext_args.old_filename, ext_args.new_filename]
extern.run_ext(args)
return _JPEG_FORMAT | Run the EXTERNAL program jpegrescan. |
def sync_role_definitions(self):
from superset import conf
logging.info('Syncing role definition')
self.create_custom_permissions()
self.set_role('Admin', self.is_admin_pvm)
self.set_role('Alpha', self.is_alpha_pvm)
self.set_role('Gamma', self.is_gamma_pvm)
self.s... | Inits the Superset application with security roles and such |
def _family_notes_path(family, data_dir):
data_dir = fix_data_dir(data_dir)
family = family.lower()
if not family in get_families(data_dir):
raise RuntimeError("Family '{}' does not exist".format(family))
file_name = 'NOTES.' + family.lower()
file_path = os.path.join(data_dir, file_name)
... | Form a path to the notes for a family |
def _log_error(self, message):
key = (self.feature_name, self.target.get('formula'))
self.environment.log_feature_error(key, "ERROR: " + message) | Log an error for the feature |
def _check_valgrind(xml_file):
log(_("checking for valgrind errors..."))
xml = ET.ElementTree(file=xml_file)
reported = set()
for error in xml.iterfind("error"):
kind = error.find("kind").text
what = error.find("xwhat/text" if kind.startswith("Leak_") else "what").text
msg = ["\t... | Log and report any errors encountered by valgrind. |
def replication_factor(self, cluster='main'):
if not self.config.has_section(cluster):
raise SystemExit("Cluster '%s' not defined in %s"
% (cluster, self.config_file))
return int(self.config.get(cluster, 'replication_factor')) | Return the replication factor for a cluster as an integer. |
def _get_a2(bbar, dbar, slip_moment, mmax):
return ((dbar - bbar) / bbar) * (slip_moment / _scale_moment(mmax)) | Returns the A2 value defined in II.4 of Table 2 |
def train_input_fn(params):
file_pattern = os.path.join(getattr(params, "data_dir", ""), "*encoded-train*")
return _read_and_batch_from_files(
file_pattern, params.batch_size, params.max_length, params.num_cpu_cores,
shuffle=True, repeat=params.repeat_dataset) | Load and return dataset of batched examples for use during training. |
def nodes_ali(c_obj):
ali_nodes = []
try:
ali_nodes = c_obj.list_nodes()
except BaseHTTPError as e:
abort_err("\r HTTP Error with AliCloud: {}".format(e))
ali_nodes = adj_nodes_ali(ali_nodes)
return ali_nodes | Get node objects from AliCloud. |
def sorts_query(sortables):
stmts = []
for sortable in sortables:
if sortable.desc:
stmts.append('{} DESC'.format(sortable.field))
else:
stmts.append('{} ASC'.format(sortable.field))
return ' ORDER BY {}'.format(', '.join(stmts)) | Turn the Sortables into a SQL ORDER BY query |
def step(self, action):
total_reward = 0.0
done = None
for i in range(self._skip):
obs, reward, done, info = self.env.step(action)
if i == self._skip - 2:
self._obs_buffer[0] = obs
if i == self._skip - 1:
self._obs_buffer[1] = obs
total_reward += reward
if done:... | Repeat action, sum reward, and max over last observations. |
def _add_fcp(self, fcp):
try:
LOG.info("fcp %s found in CONF.volume.fcp_list, add it to db" %
fcp)
self.db.new(fcp)
except Exception:
LOG.info("failed to add fcp %s into db", fcp) | add fcp to db if it's not in db but in fcp list and init it |
def load_each(*loaders):
def _load_each(metadata):
return merge(
loader(metadata)
for loader in loaders
)
return _load_each | Loader factory that combines a series of loaders. |
def calc_bin(self, _bin=None):
if _bin is None:
try:
_bin = bins.bins(self.start, self.end, one=True)
except TypeError:
_bin = None
return _bin | Calculate the smallest UCSC genomic bin that will contain this feature. |
def check_output(args, env=None, sp=subprocess):
log.debug('calling %s with env %s', args, env)
output = sp.check_output(args=args, env=env)
log.debug('output: %r', output)
return output | Call an external binary and return its stdout. |
def add_host(host):
p = new_prefix()
p.prefix = str(host['ipaddr'])
p.type = "host"
p.description = host['description']
p.node = host['fqdn']
p.avps = {}
if 'additional' in host:
p.comment = host['additional']
if len(host['location']) > 0:
p.avps['location'] = host['locat... | Put your host information in the prefix object. |
def StreamInChunks(self, callback=None, finish_callback=None,
additional_headers=None):
self.StreamMedia(callback=callback, finish_callback=finish_callback,
additional_headers=additional_headers,
use_chunks=True) | Stream the entire download in chunks. |
def put(self, path, data, **options):
data, options = self._update_request(data, options)
return self.request('put', path, data=data, **options) | Parses PUT request options and dispatches a request |
def as_dict(self, replace_value_names=True):
r = RootSectionTerm(doc=self)
for s in self:
for t in s:
r.terms.append(t)
return r.as_dict(replace_value_names) | Iterate, link terms and convert to a dict |
def OnMouseMotion(self, event):
grid = self.grid
pos_x, pos_y = grid.CalcUnscrolledPosition(event.GetPosition())
row = grid.YToRow(pos_y)
col = grid.XToCol(pos_x)
tab = grid.current_table
key = row, col, tab
merge_area = self.grid.code_array.cell_attributes[key]["... | Mouse motion event handler |
def suggest(self, utility_function):
if len(self._space) == 0:
return self._space.array_to_params(self._space.random_sample())
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self._gp.fit(self._space.params, self._space.target)
suggestion = acq... | Most promissing point to probe next |
def unit_vector(x):
y = np.array(x, dtype='float')
return y/norm(y) | Return a unit vector in the same direction as x. |
def date_decoder(dic):
if '__date__' in dic:
try:
d = datetime.date(**{c: v for c, v in dic.items() if not c == "__date__"})
except (TypeError, ValueError):
raise json.JSONDecodeError("Corrupted date format !", str(dic), 1)
elif '__datetime__' in dic:
try:
... | Add python types decoding. See JsonEncoder |
def factory(codes, base=_Exception):
if not issubclass(base, _Exception):
raise FactoryException("Invalid class passed as parent: Must be a subclass of an Exception class created with this function",
FactoryException.INVALID_EXCEPTION_CLASS, intended_parent=base)
class Err... | Creates a custom exception class with arbitrary error codes and arguments. |
def find_root(self):
node = self
while node.parent is not None:
node = node.parent
return node | Finds the outermost context. |
def check_failed_login(self):
last_attempt = self.get_last_failed_access_attempt()
if not last_attempt:
user_access = self._FailedAccessAttemptModel(ip_address=self.ip)
elif last_attempt:
user_access = last_attempt
if self.request.method == 'POST':
if ... | 'Private method', check failed logins, it's used for wath_login decorator |
def expected_part_size(self, part_number):
last_part = self.multipart.last_part_number
if part_number == last_part:
return self.multipart.last_part_size
elif part_number >= 0 and part_number < last_part:
return self.multipart.chunk_size
else:
raise Mul... | Get expected part size for a particular part number. |
def paint_pattern(self):
x = 0
while x < self.width:
y = 0
while y < self.height:
self.paint_cube(x, y)
y += self.cube_size
x += self.cube_size | Paints all the cubes. |
def rename_retract_ar_transition(portal):
logger.info("Renaming 'retract_ar' transition to 'invalidate'")
wf_tool = api.get_tool("portal_workflow")
workflow = wf_tool.getWorkflowById("bika_ar_workflow")
if "invalidate" not in workflow.transitions:
workflow.transitions.addTransition("invalidate")... | Renames retract_ar transition to invalidate |
def default_select(identifier, all_entry_points):
if len(all_entry_points) == 0:
raise PluginMissingError(identifier)
elif len(all_entry_points) == 1:
return all_entry_points[0]
elif len(all_entry_points) > 1:
raise AmbiguousPluginError(all_entry_points) | Raise an exception when we have ambiguous entry points. |
def run(self, ket: State) -> State:
res = [op.run(ket) for op in self.operators]
probs = [asarray(ket.norm()) * w for ket, w in zip(res, self.weights)]
probs = np.asarray(probs)
probs /= np.sum(probs)
newket = np.random.choice(res, p=probs)
return newket.normalize() | Apply the action of this Kraus quantum operation upon a state |
def closeLog(self):
self._logPtr.close()
if self._namePtr:
self._namePtr.close()
self.log = 0 | Closes the log file. |
def random_point_triangle(triangle, use_int_coords=True):
xs, ys = triangle.exterior.coords.xy
A, B, C = zip(xs[:-1], ys[:-1])
r1, r2 = np.random.rand(), np.random.rand()
rx, ry = (1 - sqrt(r1)) * np.asarray(A) + sqrt(r1) * (1 - r2) * np.asarray(B) + sqrt(r1) * r2 * np.asarray(C)
... | Selects a random point in interior of a triangle |
def api_related(self, query):
url = "{0}/{1}/related/?format=json".format(self.base_url, query)
response = requests.get(url, headers=self.headers, verify=self.verify_ssl)
if response.status_code == 200:
return response.json()
else:
self.error('Received status code... | Find related objects through SoltraEdge API |
def load_settings_file(self, settings_file=None):
if not settings_file:
settings_file = self.get_json_or_yaml_settings()
if not os.path.isfile(settings_file):
raise ClickException("Please configure your zappa_settings file or call `zappa init`.")
path, ext = os.path.split... | Load our settings file. |
def _ranging_attributes(attributes, param_class):
next_attributes = {param_class.next_in_enumeration(attribute) for attribute in attributes}
in_first = attributes.difference(next_attributes)
in_second = next_attributes.difference(attributes)
if len(in_first) == 1 and len(in_second) == 1:
for x i... | Checks if there is a continuous range |
def dump(self):
for modpath in sorted(self.map):
title = 'Used by %s' % modpath
print('\n' + title + '\n' + '-'*len(title))
for origin in sorted(self.get_used_origins(modpath)):
print(' %s' % origin) | Prints out the contents of the usage map. |
def use_comparative_asset_view(self):
self._object_views['asset'] = COMPARATIVE
for session in self._get_provider_sessions():
try:
session.use_comparative_asset_view()
except AttributeError:
pass | Pass through to provider AssetLookupSession.use_comparative_asset_view |
def pil_image3d(input, size=(800, 600), pcb_rotate=(0, 0, 0), timeout=20, showgui=False):
f = tempfile.NamedTemporaryFile(suffix='.png', prefix='eagexp_')
output = f.name
export_image3d(input, output=output, size=size,
pcb_rotate=pcb_rotate, timeout=timeout, showgui=showgui)
im = Imag... | same as export_image3d, but there is no output file, PIL object is returned instead |
def _perspective_warp(c, magnitude:partial(uniform,size=8)=0, invert=False):
"Apply warp of `magnitude` to `c`."
magnitude = magnitude.view(4,2)
targ_pts = [[x+m for x,m in zip(xs, ms)] for xs, ms in zip(_orig_pts, magnitude)]
return _do_perspective_warp(c, targ_pts, invert) | Apply warp of `magnitude` to `c`. |
def currentRepoTreeItemChanged(self):
currentItem, currentIndex = self.getCurrentItem()
hasCurrent = currentIndex.isValid()
assert hasCurrent == (currentItem is not None), \
"If current idex is valid, currentIndex may not be None"
if hasCurrent:
logger.info("Addin... | Called to update the GUI when a repo tree item has changed or a new one was selected. |
def cli(env, identifier):
mgr = SoftLayer.ObjectStorageManager(env.client)
credential_limit = mgr.limit_credential(identifier)
table = formatting.Table(['limit'])
table.add_row([
credential_limit,
])
env.fout(table) | Credential limits for this IBM Cloud Object Storage account. |
def indent_func(input_):
if isinstance(input_, six.string_types):
lbl = input_
return _indent_decor(lbl)
elif isinstance(input_, (bool, tuple)):
func = input_
return func
else:
func = input_
lbl = '[' + meta_util_six.get_funcname(func) + ']'
return _in... | Takes either no arguments or an alias label |
def conns(self, value: Set[str]) -> None:
if not self._conns == value:
old = self._conns
self._conns = value
ins = value - old
outs = old - value
logger.display("{}'s connections changed from {} to {}".format(self, old, value))
self._connsC... | Updates the connection count of this node if not already done. |
def transform_using_this_method(original_sample):
new_sample = original_sample.copy()
new_data = new_sample.data
new_data['Y2-A'] = log(new_data['Y2-A'])
new_data = new_data.dropna()
new_sample.data = new_data
return new_sample | This function implements a log transformation on the data. |
def view(grid):
"Show a grid human-readably."
p_mark, q_mark = player_marks(grid)
return grid_format % tuple(p_mark if by_p else q_mark if by_q else '.'
for by_p, by_q in zip(*map(player_bits, grid))) | Show a grid human-readably. |
def getblockhash(self, index: int) -> str:
return cast(str, self.api_fetch('getblockhash?index=' + str(index))) | Returns the hash of the block at ; index 0 is the genesis block. |
def node_stat_copy(self, node_or_char, node=None):
if node is None:
node = node_or_char
else:
node = self._real.character[node_or_char].node[node]
return {
k: v.unwrap() if hasattr(v, 'unwrap') and not hasattr(v, 'no_unwrap') else v
for (k, v) in n... | Return a node's stats, prepared for pickling, in a dictionary. |
def del_label(self, name):
labels_tag = self.root[0]
labels_tag.remove(self._find_label(name)) | Delete a label by name. |
def live_scores(self, live_scores):
scores = sorted(live_scores, key=lambda x: x["league"])
for league, games in groupby(scores, key=lambda x: x["league"]):
self.league_header(league)
for game in games:
self.scores(self.parse_result(game), add_new_line=False)
... | Prints the live scores in a pretty format |
def dispatch_request(self, *args, **kwargs):
if self.validation:
specs = {}
attrs = flasgger.constants.OPTIONAL_FIELDS + [
'parameters', 'definitions', 'responses',
'summary', 'description'
]
for attr in attrs:
specs... | If validation=True perform validation |
def _extract_options(orig_script):
first = (orig_script + '\n').splitlines()[0]
match = _first_line_re().match(first)
options = match.group(1) or '' if match else ''
return options.strip() | Extract any options from the first line of the script. |
def _displaystr2num(st):
num = None
for s, n in [('DFP-', 16), ('TV-', 8), ('CRT-', 0)]:
if st.startswith(s):
try:
curnum = int(st[len(s):])
if 0 <= curnum <= 7:
num = n + curnum
break
except Exception:
... | Return a display number from a string |
def link_head(self, node):
assert not node.tail
old_head = self.head
if old_head:
assert old_head.tail == self
old_head.tail = node
node.head = old_head
node.tail = self
self.head = node | Add a node to the head. |
def handle_user(self, params):
params = params.split(' ', 3)
if len(params) != 4:
raise IRCError.from_name(
'needmoreparams',
'USER :Not enough parameters')
user, mode, unused, realname = params
self.user = user
self.mode = mode
... | Handle the USER command which identifies the user to the server. |
def load_decorate(package):
from acorn.logging.decoration import set_decorating, decorating
origdecor = decorating
set_decorating(True)
import sys
from importlib import import_module
apack = import_module(package)
from acorn.logging.decoration import decorate
decorate(apack)
... | Imports and decorates the package with the specified name. |
def format_datetime(time):
user_time_zone = timezone.get_current_timezone()
if time.tzinfo is None:
time = time.replace(tzinfo=pytz.utc)
user_time_zone = pytz.timezone(getattr(settings, 'USER_TIME_ZONE', 'GMT'))
time = time.astimezone(user_time_zone)
return time.strftime("%b %d, %Y %H:%M... | Formats a date, converting the time to the user timezone if one is specified |
def difference_update(self, other):
return self.client.sdiffstore(self.name, [self.name, other.name]) | Remove all elements of another set from this set. |
def _concat_same_dtype(self, to_concat, name):
attribs = self._get_attributes_dict()
attribs['name'] = name
if len({str(x.dtype) for x in to_concat}) != 1:
raise ValueError('to_concat must have the same tz')
new_data = type(self._values)._concat_same_type(to_concat).asi8
... | Concatenate to_concat which has the same class. |
def parse_rss_bytes(data: bytes) -> RSSChannel:
root = parse_xml(BytesIO(data)).getroot()
return _parse_rss(root) | Parse an RSS feed from a byte-string containing XML data. |
def _init_kws(self, **kws_usr):
kws_self = {}
user_keys = set(kws_usr)
for objname, expset in self.exp_keys.items():
usrkeys_curr = user_keys.intersection(expset)
kws_self[objname] = get_kwargs(kws_usr, usrkeys_curr, usrkeys_curr)
dpi = str(kws_self['dag'].get('dp... | Return a dict containing user-specified plotting options. |
def train_agent(real_env, learner, world_model_dir, hparams, epoch):
initial_frame_chooser = rl_utils.make_initial_frame_chooser(
real_env, hparams.frame_stack_size, hparams.simulation_random_starts,
hparams.simulation_flip_first_random_for_beginning
)
env_fn = rl.make_simulated_env_fn_from_hparams(
... | Train the PPO agent in the simulated environment. |
def _read_pyMatch(fn, precursors):
with open(fn) as handle:
reads = defaultdict(realign)
for line in handle:
query_name, seq, chrom, reference_start, end, mism, add = line.split()
reference_start = int(reference_start)
if query_name not in reads:
r... | read pyMatch file and perform realignment of hits |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.