code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def _parse_mut(mut):
multiplier = 1
if mut.startswith("-"):
mut = mut[1:]
multiplier = -1
nt = mut.strip('0123456789')
pos = int(mut[:-2]) * multiplier
return nt, pos | Parse mutation field to get position and nts. |
def install_config(self):
text = templ_config.render(**self.options)
config = Configuration(self.buildout, 'supervisord.conf', {
'deployment': self.deployment_name,
'text': text})
return [config.install()] | install supervisor main config file |
def auth(self, encoded):
message, signature = self.split(encoded)
computed = self.sign(message)
if not hmac.compare_digest(signature, computed):
raise AuthenticatorInvalidSignature | Validate integrity of encoded bytes |
def reject(self):
if self.hideWindow():
self.hideWindow().show()
self.close()
self.deleteLater() | Rejects the snapshot and closes the widget. |
def build_graph(formula):
graph = {}
for clause in formula:
for (lit, _) in clause:
for neg in [False, True]:
graph[(lit, neg)] = []
for ((a_lit, a_neg), (b_lit, b_neg)) in formula:
add_edge(graph, (a_lit, a_neg), (b_lit, not b_neg))
add_edge(graph, (b_lit... | Builds the implication graph from the formula |
def _find_integer_tolerance(epsilon, v_max, min_tol):
int_tol = min(epsilon / (10 * v_max), 0.1)
min_tol = max(1e-10, min_tol)
if int_tol < min_tol:
eps_lower = min_tol * 10 * v_max
logger.warning(
'When the maximum flux is {}, it is recommended that'
' epsilon > {} t... | Find appropriate integer tolerance for gap-filling problems. |
def create_csp_header(cspDict):
policy = ['%s %s' % (k, v) for k, v in cspDict.items() if v != '']
return '; '.join(policy) | create csp header string |
def collab_learner(data, n_factors:int=None, use_nn:bool=False, emb_szs:Dict[str,int]=None, layers:Collection[int]=None,
ps:Collection[float]=None, emb_drop:float=0., y_range:OptRange=None, use_bn:bool=True,
bn_final:bool=False, **learn_kwargs)->Learner:
"Create a Learner for... | Create a Learner for collaborative filtering on `data`. |
def upgrade_defaults(self):
self.defaults.upgrade()
self.reset_defaults(self.defaults.filename) | Upgrade config file and reload. |
def redact_http_basic_auth(output):
url_re = '(https?)://.*@'
redacted = r'\1://<redacted>@'
if sys.version_info >= (2, 7):
return re.sub(url_re, redacted, output, flags=re.IGNORECASE)
else:
if re.search(url_re, output.lower()):
return re.sub(url_re, redacted, output.lower())... | Remove HTTP user and password |
def import_backend(config):
backend_name = config['backend']
path = backend_name.split('.')
backend_mod_name, backend_class_name = '.'.join(path[:-1]), path[-1]
backend_mod = importlib.import_module(backend_mod_name)
backend_class = getattr(backend_mod, backend_class_name)
return backend_class(c... | Imports and initializes the Backend class. |
def __make_scubadir(self):
self.__scubadir_hostpath = tempfile.mkdtemp(prefix='scubadir')
self.__scubadir_contpath = '/.scuba'
self.add_volume(self.__scubadir_hostpath, self.__scubadir_contpath) | Make temp directory where all ancillary files are bind-mounted |
def example_ylm(m=0, n=2, shape=128, limits=[-4, 4], draw=True, show=True, **kwargs):
import ipyvolume.pylab as p3
__, __, __, r, theta, phi = xyz(shape=shape, limits=limits, spherical=True)
radial = np.exp(-(r - 2) ** 2)
data = np.abs(scipy.special.sph_harm(m, n, theta, phi) ** 2) * radial
if draw:... | Show a spherical harmonic. |
def _insert_html_configs(c, *, project_name, short_project_name):
c['templates_path'] = [
'_templates',
lsst_sphinx_bootstrap_theme.get_html_templates_path()]
c['html_theme'] = 'lsst_sphinx_bootstrap_theme'
c['html_theme_path'] = [lsst_sphinx_bootstrap_theme.get_html_theme_path()]
c['htm... | Insert HTML theme configurations. |
def convertDate(date):
d, t = date.split('T')
return decimal_date(d, timeobs=t) | Convert DATE string into a decimal year. |
def comply(self, path):
utils.ensure_permissions(path, self.user.pw_name, self.group.gr_name,
self.mode) | Issues a chown and chmod to the file paths specified. |
def to_det_oid(self, det_id_or_det_oid):
try:
int(det_id_or_det_oid)
except ValueError:
return det_id_or_det_oid
else:
return self.get_det_oid(det_id_or_det_oid) | Convert det OID or ID to det OID |
def msg(self, target, message, formatted=True, tags=None):
if formatted:
message = unescape(message)
self.send('PRIVMSG', params=[target, message], source=self.nick, tags=tags) | Send a privmsg to the given target. |
def _check_status(self):
logger.info('Checking repo status')
status = self.log_call(
['git', 'status', '--porcelain'],
callwith=subprocess.check_output,
cwd=self.cwd,
)
if status:
raise DirtyException(status) | Check repo status and except if dirty. |
def _remove_finder(importer, finder):
existing_finder = _get_finder(importer)
if not existing_finder:
return
if isinstance(existing_finder, ChainedFinder):
try:
existing_finder.finders.remove(finder)
except ValueError:
return
if len(existing_finder.finders) == 1:
pkg_resources.re... | Remove an existing finder from pkg_resources. |
def _merge_sorted_items(self, index):
def load_partition(j):
path = self._get_spill_dir(j)
p = os.path.join(path, str(index))
with open(p, 'rb', 65536) as f:
for v in self.serializer.load_stream(f):
yield v
disk_items = [load_partit... | load a partition from disk, then sort and group by key |
def user_terms_updated(sender, **kwargs):
LOGGER.debug("User T&C Updated Signal Handler")
if kwargs.get('instance').user:
cache.delete('tandc.not_agreed_terms_' + kwargs.get('instance').user.get_username()) | Called when user terms and conditions is changed - to force cache clearing |
def cmd_up(self, args):
if len(args) == 0:
adjust = 5.0
else:
adjust = float(args[0])
old_trim = self.get_mav_param('TRIM_PITCH_CD', None)
if old_trim is None:
print("Existing trim value unknown!")
return
new_trim = int(old_trim + (... | adjust TRIM_PITCH_CD up by 5 degrees |
def to_unit(C, val, unit=None):
md = re.match(r'^(?P<num>[\d\.]+)(?P<unit>.*)$', val)
if md is not None:
un = float(md.group('num')) * CSS.units[md.group('unit')]
if unit is not None:
return un.asUnit(unit)
else:
return un | convert a string measurement to a Unum |
def send(self, to, from_, body):
try:
msg = self.client.sms.messages.create(
body=body,
to=to,
from_=from_
)
print msg.sid
except twilio.TwilioRestException as e:
raise | Send BODY to TO from FROM as an SMS! |
def show_grid_from_file(self, fname):
with open(fname, "r") as f:
for y, row in enumerate(f):
for x, val in enumerate(row):
self.draw_cell(y, x, val) | reads a saved grid file and paints it on the canvas |
def _apply_orthogonal_view(self):
left, right, bottom, top = self.get_view_coordinates()
glOrtho(left, right, bottom, top, -10, 0) | Orthogonal view with respect to current aspect ratio |
def system_summary(providername=None):
_providername = providername
if not _providername:
_providername = provider_check()
import_str = 'netshowlib.%s.system_summary' % _providername
return import_module(import_str).SystemSummary() | returns SystemSummary class from mentioned provider |
def emit(self, record):
if self.triggerLevelNo is not None and record.levelno>=self.triggerLevelNo:
self.triggered = True
logging.handlers.BufferingHandler.emit(self,record) | Emit record after checking if message triggers later sending of e-mail. |
def unquote_string(self, string):
value = string[1:-1]
forbidden_sequences = {ESCAPE_SUBS[STRING_QUOTES[string[0]]]}
valid_sequences = set(ESCAPE_SEQUENCES) - forbidden_sequences
for seq in ESCAPE_REGEX.findall(value):
if seq not in valid_sequences:
raise self... | Return the unquoted value of a quoted string. |
def combine_hla_fqs(hlas, out_file, data):
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
with open(tx_out_file, "w") as out_handle:
for hla_type, hla_fq in hlas:
if utils.file_exists(hla_fq):
... | OptiType performs best on a combination of all extracted HLAs. |
def machine_listings_to_file_entries(listings: Iterable[dict]) -> \
Iterable[FileEntry]:
for listing in listings:
yield FileEntry(
listing['name'],
type=listing.get('type'),
size=listing.get('size'),
date=listing.get('modify')
) | Convert results from parsing machine listings to FileEntry list. |
def crop_box(im, box=False, **kwargs):
if box:
im = im.crop(box)
return im | Uses box coordinates to crop an image without resizing it first. |
def execfile(fname, variables):
with open(fname) as f:
code = compile(f.read(), fname, 'exec')
exec(code, variables) | This is builtin in python2, but we have to roll our own on py3. |
def initialize(self):
if self.croniter is None:
self.time = time.time()
self.datetime = datetime.now(self.tz)
self.loop_time = self.loop.time()
self.croniter = croniter(self.spec, start_time=self.datetime) | Initialize croniter and related times |
def start_index(self):
paginator = self.paginator
if paginator.count == 0:
return 0
elif self.number == 1:
return 1
return (
(self.number - 2) * paginator.per_page + paginator.first_page + 1) | Return the 1-based index of the first item on this page. |
def _initial_interior_point(self, buses, generators, xmin, xmax, ny):
Va = self.om.get_var("Va")
va_refs = [b.v_angle * pi / 180.0 for b in buses
if b.type == REFERENCE]
x0 = (xmin + xmax) / 2.0
x0[Va.i1:Va.iN + 1] = va_refs[0]
if ny > 0:
yvar = sel... | Selects an interior initial point for interior point solver. |
def sink_storage(client, to_delete):
bucket = _sink_storage_setup(client)
to_delete.append(bucket)
SINK_NAME = "robots-storage-%d" % (_millis(),)
FILTER = "textPayload:robot"
DESTINATION = "storage.googleapis.com/%s" % (bucket.name,)
sink = client.sink(SINK_NAME, filter_=FILTER, destination=DEST... | Sink log entries to storage. |
def save(self, fname=''):
if fname != '':
with open(fname, 'w') as f:
for i in self.lstPrograms:
f.write(self.get_file_info_line(i, ','))
filemap = mod_filemap.FileMap([], [])
object_fileList = filemap.get_full_filename(filemap.find_type('OBJECT'),... | Save the list of items to AIKIF core and optionally to local file fname |
def get(self, *args, **kwargs):
self.before_get(args, kwargs)
relationship_field, model_relationship_field, related_type_, related_id_field = self._get_relationship_data()
obj, data = self._data_layer.get_relationship(model_relationship_field,
... | Get a relationship details |
def size(self):
try:
return os.fstat(self.file.fileno()).st_size
except io.UnsupportedOperation:
pass
if is_seekable(self.file):
with wpull.util.reset_file_offset(self.file):
self.file.seek(0, os.SEEK_END)
return self.file.tell(... | Return the size of the file. |
async def _clean_shutdown(self):
remaining_tasks = []
for task in self._tasks.get(None, []):
self._logger.debug("Cancelling task at shutdown %s", task)
task.cancel()
remaining_tasks.append(task)
asyncio.gather(*remaining_tasks, return_exceptions=True)
... | Cleanly shutdown the emulation loop. |
def piece_wise_linear(scale, points):
assert len(points) >= 2
assert points[0][0] == 0
assert points[-1][0] == 1
assert all(i < j for i, j in zip(points[:-1], points[1:]))
out = numpy.zeros((scale, 3))
p1, c1 = points[0]
p2, c2 = points[1]
next_pt = 2
for i in range(1, scale):
v = i / scale
if... | Create a palette that is piece-wise linear given some colors at points. |
def updateSiteName(self, block_name, origin_site_name):
if not origin_site_name:
dbsExceptionHandler('dbsException-invalid-input',
"DBSBlock/updateSiteName. origin_site_name is mandatory.")
conn = self.dbi.connection()
trans = conn.begin()
try:... | Update the origin_site_name for a given block name |
def to_geojson(self, filename):
with open(filename, 'w') as fd:
json.dump(self.to_record(WGS84_CRS), fd) | Save vector as geojson. |
def _on_sphinx_thread_error_msg(self, error_msg):
self._sphinx_thread.wait()
self.plain_text_action.setChecked(True)
sphinx_ver = programs.get_module_version('sphinx')
QMessageBox.critical(self,
_('Help'),
_("The following error occured when ... | Display error message on Sphinx rich text failure |
def _load_names(self) -> List[str]:
names = []
for path in self._get_files():
for name in self._get_names(path):
names.append(self._normalize_name(name))
return names | Return list of thirdparty modules from requirements |
def update(self):
self._controller.update(self._id, wake_if_asleep=False)
data = self._controller.get_charging_params(self._id)
if data:
self.__battery_range = data['battery_range']
self.__est_battery_range = data['est_battery_range']
self.__ideal_battery_rang... | Update the battery range state. |
def mirror():
slack_mirror = read_config(
read_file("{0}{1}".format(etc_slackpkg, "mirrors")))
if slack_mirror:
return slack_mirror + changelog_txt
else:
print("\nYou do not have any mirror selected in /etc/slackpkg/mirrors"
"\nPlease edit that file and uncomment ONE mi... | Get mirror from slackpkg mirrors file |
def format_error(module, error):
logging.error(module)
print error.message
print json.dumps(error.error, sort_keys=True, indent=4, separators=(',', ': '))
exit(1) | Format the error for the given module. |
def filter_sum(self, inst_rc, threshold, take_abs=True):
inst_df = self.dat_to_df()
if inst_rc == 'row':
inst_df = run_filter.df_filter_row_sum(inst_df, threshold, take_abs)
elif inst_rc == 'col':
inst_df = run_filter.df_filter_col_sum(inst_df, threshold, take_abs)
self.df_to_dat(inst_df) | Filter a network's rows or columns based on the sum across rows or columns. |
def json_error_formatter(body, status, title, environ):
body = webob.exc.strip_tags(body)
status_code = int(status.split(None, 1)[0])
error_dict = {
'status': status_code,
'title': title,
'detail': body
}
return {'errors': [error_dict]} | A json_formatter for webob exceptions. |
def _call_one_middleware(self, middleware):
args = {}
for arg in middleware['args']:
if hasattr(self, arg):
args[arg] = reduce(getattr, arg.split('.'), self)
self.logger.debug('calling middleware event {}'
.format(middleware['name']))
... | Evaluate arguments and execute the middleware function |
def add_tags(self, *tags):
self.tags.extend([
Tag(name=tag) for tag in tags
]) | Add a list of strings to the statement as tags. |
def to_dict(self):
d = dict(doses=self.doses, ns=self.ns, incidences=self.incidences)
d.update(self.kwargs)
return d | Returns a dictionary representation of the dataset. |
def complete(self, uio, dropped=False):
if self.dropped and not dropped:
return
for end in ['src', 'dst']:
if getattr(self, end):
continue
uio.show('\nEnter ' + end + ' for transaction:')
uio.show('')
uio.show(self.summary())
... | Query for all missing information in the transaction |
def pout(*args, **kwargs):
if should_msg(kwargs.get("groups", ["normal"])):
args = indent_text(*args, **kwargs)
sys.stderr.write("".join(args))
sys.stderr.write("\n") | print to stdout, maintaining indent level |
def random_crop_and_flip(x, pad_rows=4, pad_cols=4):
rows = tf.shape(x)[1]
cols = tf.shape(x)[2]
channels = x.get_shape()[3]
def _rand_crop_img(img):
return tf.random_crop(img, [rows, cols, channels])
with tf.device('/CPU:0'):
x = tf.image.resize_image_with_crop_or_pad(x, rows + pad_rows,
... | Augment a batch by randomly cropping and horizontally flipping it. |
def uninstall(self):
if self.is_installed():
installed = self.installed_dir()
if installed.is_symlink():
installed.unlink()
else:
shutil.rmtree(str(installed)) | Delete code inside NApp directory, if existent. |
def align_cell(fmt, elem, width):
if fmt == "<":
return elem + ' ' * (width - len(elem))
if fmt == ">":
return ' ' * (width - len(elem)) + elem
return elem | Returns an aligned element. |
def resample(self, target_sr):
y_hat = librosa.core.resample(self.y, self.sr, target_sr)
return Sound(y_hat, target_sr) | Returns a new sound with a samplerate of target_sr. |
def fetchone(table, cols="*", where=(), group="", order=(), limit=(), **kwargs):
return select(table, cols, where, group, order, limit, **kwargs).fetchone() | Convenience wrapper for database SELECT and fetch one. |
def build_data_list(lst):
txt = '<H3>' + List + '<H3><UL>'
for i in lst:
txt += '<LI>' + i + '</LI>'
txt += '<UL>'
return txt | returns the html with supplied list as a HTML listbox |
def reset(self) -> None:
Log.debug('resetting timer task %s')
self.target = self.time() + self.DELAY | Reset task execution to `DELAY` seconds from now. |
def __roll(self, unrolled):
rolled = []
index = 0
for count in range(len(self.__sizes) - 1):
in_size = self.__sizes[count]
out_size = self.__sizes[count+1]
theta_unrolled = np.matrix(unrolled[index:index+(in_size+1)*out_size])
theta_rolled = theta_... | Converts parameter array back into matrices. |
def keyPressEvent(self, ev):
if ev.key() in (Qt.Key_Enter, Qt.Key_Return):
self._startOrStopEditing()
elif ev.key() == Qt.Key_Escape:
self._cancelEditing()
else:
Kittens.widgets.ClickableTreeWidget.keyPressEvent(self, ev) | Stop editing if enter is pressed |
def loadJson(self, filename):
jsonConfig = {}
if os.path.isfile(filename):
jsonConfig = json.loads(' '.join(open(filename, 'r').readlines()))
return jsonConfig | Read, parse and return given Json config file |
def verify_convention_version(self, ds):
try:
for convention in getattr(ds, "Conventions", '').replace(' ', '').split(','):
if convention == 'ACDD-' + self._cc_spec_version:
return ratable_result((2, 2), None, [])
m = ["Conventions does not contain 'AC... | Verify that the version in the Conventions field is correct |
def check_missing(self, param, action):
assert action in ('debug', 'info', 'warn', 'error'), action
if self.inputs.get(param):
msg = '%s_file in %s is ignored in %s' % (
param, self.inputs['job_ini'], self.calculation_mode)
if action == 'error':
ra... | Make sure the given parameter is missing in the job.ini file |
def readn(self, n):
data = ''
while len(data) < n:
received = self.sock.recv(n - len(data))
if not len(received):
raise socket.error('no data read from socket')
data += received
return data | Keep receiving data until exactly `n` bytes have been read. |
def print_verbose(*args, **kwargs):
if kwargs.pop('verbose', False) is True:
gprint(*args, **kwargs) | Utility to print something only if verbose=True is given |
def add_log_error(self, x, flag_also_show=False, E=None):
if len(x) == 0:
x = "(empty error)"
tb.print_stack()
x_ = x
if E is not None:
a99.get_python_logger().exception(x_)
else:
a99.get_python_logger().info("ERROR: {}".format(x_))... | Sets text of labelError. |
def _get_folds(n_rows, n_folds, use_stored):
if use_stored is not None:
with open(os.path.expanduser(use_stored)) as json_file:
json_data = json.load(json_file)
if json_data['N_rows'] != n_rows:
raise Exception('N_rows from folds doesnt match the number of... | Get the used CV folds |
def ticket1to2(old):
if isinstance(old.benefactor, Multifactor):
types = list(chain(*[b.powerupNames for b in
old.benefactor.benefactors('ascending')]))
elif isinstance(old.benefactor, InitializerBenefactor):
types = list(chain(*[b.powerupNames for b in
old.benefa... | change Ticket to refer to Products and not benefactor factories. |
def toimages(self):
from thunder.images.images import Images
if self.mode == 'spark':
values = self.values.values_to_keys((0,)).unchunk()
if self.mode == 'local':
values = self.values.unchunk()
return Images(values) | Convert blocks to images. |
def _create_memory_database_interface(self) -> GraphDatabaseInterface:
Base = declarative_base()
engine = sqlalchemy.create_engine("sqlite://", poolclass=StaticPool)
Session = sessionmaker(bind=engine)
dbi: GraphDatabaseInterface = create_graph_database_interface(
sqlalchemy,... | Creates and returns the in-memory database interface the graph will use. |
def main(conf):
global config
config = load_configuration(conf)
app.conf.update(config['celery'])
run(host=config['valigator']['bind'], port=config['valigator']['port']) | Main function, entry point of the program. |
def remove_update_callback(self, callback, device):
if [callback, device] in self._update_callbacks:
self._update_callbacks.remove([callback, device])
_LOGGER.debug('Removed update callback %s for %s',
callback, device) | Remove a registered update callback. |
def update(self):
yield from self.update_remotes()
yield from self.rename_local_untracked()
yield from self.reset_deleted_files()
if self.repo_is_dirty():
yield from self.ensure_lock()
yield from execute_cmd(['git', 'commit', '-am', 'WIP', '--allow-empty'], cwd=se... | Do the pulling if necessary |
def colorgamut(self):
try:
light_spec = self.controlcapabilities
gtup = tuple([XYPoint(*x) for x in light_spec['colorgamut']])
color_gamut = GamutType(*gtup)
except KeyError:
color_gamut = None
return color_gamut | The color gamut information of the light. |
def total_length_per_neurite(neurites, neurite_type=NeuriteType.all):
return list(sum(s.length for s in n.iter_sections())
for n in iter_neurites(neurites, filt=is_type(neurite_type))) | Get the path length per neurite in a collection |
def main(argv=None):
arguments = cli_common(__doc__, argv=argv)
report = ReportNode(arguments['CAMPAIGN-DIR'])
jobs = wait_for_completion(report, float(arguments['--interval']))
status = ReportStatus(report, jobs)
if not arguments['--silent']:
fmt = arguments['--format'] or 'log'
sta... | ben-wait entry point |
def add_basic_info(self, run_id, timestamp):
datetime = time.strftime('%A %b %d, %Y %H:%M:%S', time.localtime(timestamp))
user = getpass.getuser()
machine = socket.gethostname()
buildroot = get_buildroot()
self.add_infos(('id', run_id), ('timestamp', timestamp), ('datetime', datetime),
... | Adds basic build info. |
def _accept_as_blank(self, url_info: URLInfo):
_logger.debug(__('Got empty robots.txt for {0}.', url_info.url))
self._robots_txt_pool.load_robots_txt(url_info, '') | Mark the URL as OK in the pool. |
def inspect_current_object(self):
editor = self.get_current_editor()
editor.sig_display_signature.connect(self.display_signature_help)
line, col = editor.get_cursor_line_column()
editor.request_hover(line, col) | Inspect current object in the Help plugin |
def selectByIdx(self, rowIdxs):
'Select given row indexes, without progress bar.'
self.select((self.rows[i] for i in rowIdxs), progress=False) | Select given row indexes, without progress bar. |
def setbit(self, key, offset, value):
key = self._encode(key)
index, bits, mask = self._get_bits_and_offset(key, offset)
if index >= len(bits):
bits.extend(b"\x00" * (index + 1 - len(bits)))
prev_val = 1 if (bits[index] & mask) else 0
if value:
bits[index]... | Set the bit at ``offset`` in ``key`` to ``value``. |
def increment_title(title):
count = re.search('\d+$', title).group(0)
new_title = title[:-(len(count))] + str(int(count)+1)
return new_title | Increments a string that ends in a number |
def save_twi(self, rootpath, raw=False, as_int=True):
self.twi = np.ma.masked_array(self.twi, mask=self.twi <= 0,
fill_value=-9999)
self.twi[self.flats] = 0
self.twi.mask[self.flats] = True
self.save_array(self.twi, None, 'twi', rootpath, raw, as_int... | Saves the topographic wetness index to a file |
def __embed_branch_recursive(u, dfs_data):
for v in dfs_data['adj'][u]:
nonplanar = True
if a(v, dfs_data) == u:
if b(v, dfs_data) == u:
successful = __insert_branch(u, v, dfs_data)
if not successful:
nonplanar = True
... | A recursive implementation of the EmbedBranch function, as defined on pages 8 and 22 of the paper. |
def main(name, output, font):
bootstrapped_directory = os.getcwd() + os.sep + name.lower().replace(' ', '-') + os.sep
copy_tree(get_real_path(os.sep + 'my-cool-os-template'), bootstrapped_directory)
start_byte = int('0xb8000', 16)
instructions_list = []
for c in output:
char_as_hex = '0x02'... | Easily bootstrap an OS project to fool HR departments and pad your resume. |
def delete(self):
with self.draft_context():
draft = self.one(Q._uid == self._uid)
if draft:
super(PublisherFrame, draft).delete()
with self.published_context():
published = self.one(Q._uid == self._uid)
if published:
super(... | Delete this document and any counterpart document |
def wait_until_done(self, timeout=None):
start = datetime.now()
if not self.__th:
raise IndraDBRestResponseError("There is no thread waiting to "
"complete.")
self.__th.join(timeout)
now = datetime.now()
dt = now - start
... | Wait for the background load to complete. |
def applyVoucherCodesFinal(sender,**kwargs):
logger.debug('Signal fired to mark voucher codes as applied.')
finalReg = kwargs.pop('registration')
tr = finalReg.temporaryRegistration
tvus = TemporaryVoucherUse.objects.filter(registration=tr)
for tvu in tvus:
vu = VoucherUse(voucher=tvu.... | Once a registration has been completed, vouchers are used and referrers are awarded |
def format_users():
lines = []
u = users()
count = u['count']
if not count:
raise DapiCommError('Could not find any users on DAPI.')
for user in u['results']:
line = user['username']
if user['full_name']:
line += ' (' + user['full_name'] + ')'
lines.append... | Formats a list of users available on Dapi |
def _iter_vals(key):
for i in range(winreg.QueryInfoKey(key)[1]):
yield winreg.EnumValue(key, i) | ! Iterate over values of a key |
def overlap(self, query, subject):
if (self.pt_within(query[0], subject) or self.pt_within(query[1], subject) or
self.pt_within(subject[0], query) or self.pt_within(subject[1], query)):
return True
return False | Accessory function to check if two ranges overlap |
def _calculate_average_field_lengths(self):
accumulator = defaultdict(int)
documents_with_field = defaultdict(int)
for field_ref, length in self.field_lengths.items():
_field_ref = FieldRef.from_string(field_ref)
field = _field_ref.field_name
documents_with_fi... | Calculates the average document length for this index |
def unproject(self, xy):
(x, y) = xy
lng = x/EARTH_RADIUS * RAD_TO_DEG
lat = 2 * atan(exp(y/EARTH_RADIUS)) - pi/2 * RAD_TO_DEG
return (lng, lat) | Returns the coordinates from position in meters |
def training_data(self):
data = pickle.load(open(os.path.join(self.repopath, 'training.pkl')))
return data.keys(), data.values() | Returns data dictionary from training.pkl |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.