code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def standings(self):
headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain","User-Agent": user_agent}
req = self.session.get('http://'+self.domain+'/standings.phtml',headers=headers).content
soup = BeautifulSoup(req)
table = soup.find('table',{'id':'tablestandings'}).find_all('tr')
clasificacion = []
[clasificacion.append(('%s\t%s\t%s\t%s\t%s')%(tablas.find('td').text,tablas.find('div')['id'],tablas.a.text,tablas.find_all('td')[3].text,tablas.find_all('td')[4].text)) for tablas in table[1:]]
return clasificacion | Get standings from the community's account |
def uniquify_list(L):
return [e for i, e in enumerate(L) if L.index(e) == i] | Same order unique list using only a list compression. |
def store(self, moments):
if len(self.storage) == self.nsave:
self.storage[-1].combine(moments, mean_free=self.remove_mean)
else:
self.storage.append(moments)
while self._can_merge_tail():
M = self.storage.pop()
self.storage[-1].combine(M, mean_free=self.remove_mean) | Store object X with weight w |
def _is_memory_usage_qualified(self):
def f(l):
return 'mixed' in l or 'string' in l or 'unicode' in l
return any(f(l) for l in self._inferred_type_levels) | return a boolean if we need a qualified .info display |
def reset(self):
self.persisted_exists = False
self.persisted_nodes = []
self.persisted_streamers = []
self.persisted_constants = []
self.graph.clear()
self.streamer_status = {} | Clear the sensorgraph from RAM and flash. |
def removeJob(self, jobBatchSystemID):
assert jobBatchSystemID in self.jobBatchSystemIDToIssuedJob
jobNode = self.jobBatchSystemIDToIssuedJob[jobBatchSystemID]
if jobNode.preemptable:
assert self.preemptableJobsIssued > 0
self.preemptableJobsIssued -= 1
del self.jobBatchSystemIDToIssuedJob[jobBatchSystemID]
if jobNode.jobStoreID in self.toilState.serviceJobStoreIDToPredecessorJob:
if jobNode.preemptable:
self.preemptableServiceJobsIssued -= 1
else:
self.serviceJobsIssued -= 1
return jobNode | Removes a job from the system. |
def stopService(self):
super(_SiteScheduler, self).stopService()
if self.timer is not None:
self.timer.cancel()
self.timer = None | Stop calling persistent timed events. |
def maximum_address(self):
maximum_address = self._segments.maximum_address
if maximum_address is not None:
maximum_address //= self.word_size_bytes
return maximum_address | The maximum address of the data, or ``None`` if the file is empty. |
def cli(env):
mgr = SoftLayer.ObjectStorageManager(env.client)
accounts = mgr.list_accounts()
table = formatting.Table(['id', 'name', 'apiType'])
table.sortby = 'id'
api_type = None
for account in accounts:
if 'vendorName' in account and account['vendorName'] == 'Swift':
api_type = 'Swift'
elif 'Cleversafe' in account['serviceResource']['name']:
api_type = 'S3'
table.add_row([
account['id'],
account['username'],
api_type,
])
env.fout(table) | List object storage accounts. |
def _get_last_snapshot(config='root'):
snapshot_list = sorted(list_snapshots(config), key=lambda x: x['id'])
return snapshot_list[-1] | Returns the last existing created snapshot |
def _send_ssh_pub(self, load, ssh_minions=False):
if self.opts['enable_ssh_minions'] is True and ssh_minions is True:
log.debug('Send payload to ssh minions')
threading.Thread(target=self.ssh_client.cmd, kwargs=load).start() | Take a load and send it across the network to ssh minions |
def clear(self):
self._size = 0
self._level = 1
self._head = Node('HEAD', None,
[None]*SKIPLIST_MAXLEVEL,
[1]*SKIPLIST_MAXLEVEL) | Clear the container from all data. |
def togglePopup(self):
if not self._popupWidget.isVisible():
self.showPopup()
elif self._popupWidget.currentMode() != self._popupWidget.Mode.Dialog:
self._popupWidget.close() | Toggles whether or not the popup is visible. |
def rbac_policy_list(request, **kwargs):
policies = neutronclient(request).list_rbac_policies(
**kwargs).get('rbac_policies')
return [RBACPolicy(p) for p in policies] | List of RBAC Policies. |
def _vpn_signal_handler(self, args):
active = "ActiveConnections"
if active in args.keys() and sorted(self.active) != sorted(args[active]):
self.active = args[active]
self.py3.update() | Called on NetworkManager PropertiesChanged signal |
def _close_connection(self):
if (self._mode == PROP_MODE_SERIAL):
self._serial.close()
elif (self._mode == PROP_MODE_TCP):
self._socket.close()
elif (self._mode == PROP_MODE_FILE):
self._file.close() | Close the connection to the easyfire unit. |
def acquire(self, resources, prop_name):
custom_prop = getattr(self.props, prop_name, None)
if custom_prop:
return custom_prop
for parent in self.parents(resources):
acquireds = parent.props.acquireds
if acquireds:
rtype_acquireds = acquireds.get(self.rtype)
if rtype_acquireds:
prop_acquired = rtype_acquireds.get(prop_name)
if prop_acquired:
return prop_acquired
all_acquireds = acquireds.get('all')
if all_acquireds:
prop_acquired = all_acquireds.get(prop_name)
if prop_acquired:
return prop_acquired
return | Starting with self, walk until you find prop or None |
def read(source, channels, start=None, end=None, series_class=TimeSeries,
scaled=None):
if scaled is not None:
warnings.warn(
"the `scaled` keyword argument is not supported by lalframe, "
"if you require ADC scaling, please install "
"python-ldas-tools-framecpp",
)
stream = open_data_source(source)
epoch = lal.LIGOTimeGPS(stream.epoch.gpsSeconds,
stream.epoch.gpsNanoSeconds)
streamdur = get_stream_duration(stream)
if start is None:
start = epoch
else:
start = max(epoch, lalutils.to_lal_ligotimegps(start))
if end is None:
offset = float(start - epoch)
duration = streamdur - offset
else:
end = min(epoch + streamdur, lalutils.to_lal_ligotimegps(end))
duration = float(end - start)
out = series_class.DictClass()
for name in channels:
out[name] = series_class.from_lal(
_read_channel(stream, str(name), start=start, duration=duration),
copy=False)
lalframe.FrStreamSeek(stream, epoch)
return out | Read data from one or more GWF files using the LALFrame API |
def str_to_class(s):
lst = s.split(".")
klass = lst[-1]
mod_list = lst[:-1]
module = ".".join(mod_list)
try:
mod = __import__(module)
if hasattr(mod, klass):
return getattr(mod, klass)
else:
return None
except ImportError:
return None | Alternate helper function to map string class names to module classes. |
def train(self, text, className):
self.data.increaseClass(className)
tokens = self.tokenizer.tokenize(text)
for token in tokens:
token = self.tokenizer.remove_stop_words(token)
token = self.tokenizer.remove_punctuation(token)
self.data.increaseToken(token, className) | enhances trained data using the given text and class |
def orient_import2(self, event):
pmag_menu_dialogs.ImportAzDipFile(self.parent, self.parent.WD) | initialize window to import an AzDip format file into the working directory |
def _GetCallingPrototypeAsString(self, flow_cls):
output = []
output.append("flow.StartAFF4Flow(client_id=client_id, ")
output.append("flow_name=\"%s\", " % flow_cls.__name__)
prototypes = []
if flow_cls.args_type:
for type_descriptor in flow_cls.args_type.type_infos:
if not type_descriptor.hidden:
prototypes.append("%s=%s" %
(type_descriptor.name, type_descriptor.name))
output.append(", ".join(prototypes))
output.append(")")
return "".join(output) | Get a description of the calling prototype for this flow class. |
def bbox_vflip(bbox, rows, cols):
x_min, y_min, x_max, y_max = bbox
return [x_min, 1 - y_max, x_max, 1 - y_min] | Flip a bounding box vertically around the x-axis. |
def line_range(self, line_number):
if line_number <= 0 or line_number > len(self.lines):
raise IndexError('NOTE: Python file line numbers are offset by 1.')
if line_number not in self.logical_lines:
return slice(line_number, line_number + 1)
else:
start, stop, _ = self.logical_lines[line_number]
return slice(start, stop) | Return a slice for the given line number |
def RegisterLateBindingCallback(target_name, callback, **kwargs):
_LATE_BINDING_STORE.setdefault(target_name, []).append((callback, kwargs)) | Registers a callback to be invoked when the RDFValue named is declared. |
def output_to_terminal(sources):
results = OrderedDict()
for source in sources:
if source.get_is_available():
source.update()
results.update(source.get_summary())
for key, value in results.items():
sys.stdout.write(str(key) + ": " + str(value) + ", ")
sys.stdout.write("\n")
sys.exit() | Print statistics to the terminal |
def _got_request_exception(self, sender, exception, **extra):
extra = self.summary_extra()
extra['errno'] = 500
self.summary_logger.error(str(exception), extra=extra)
g._has_exception = True | The signal handler for the got_request_exception signal. |
def unsort_vector(data, indices_of_increasing):
return numpy.array([data[indices_of_increasing.index(i)] for i in range(len(data))]) | Upermutate 1-D data that is sorted by indices_of_increasing. |
def update(self, async_=False, **kw):
async_ = kw.get('async', async_)
headers = {'Content-Type': 'application/xml'}
new_kw = dict()
if self.offline_model_name:
upload_keys = ('_parent', 'name', 'offline_model_name', 'offline_model_project', 'qos', 'instance_num')
else:
upload_keys = ('_parent', 'name', 'qos', '_model_resource', 'instance_num', 'predictor', 'runtime')
for k in upload_keys:
new_kw[k] = getattr(self, k)
new_kw.update(kw)
obj = type(self)(version='0', **new_kw)
data = obj.serialize()
self._client.put(self.resource(), data, headers=headers)
self.reload()
if not async_:
self.wait_for_service() | Update online model parameters to server. |
def _move_files_to_compute(compute, project_id, directory, files_path):
location = os.path.join(directory, files_path)
if os.path.exists(location):
for (dirpath, dirnames, filenames) in os.walk(location):
for filename in filenames:
path = os.path.join(dirpath, filename)
dst = os.path.relpath(path, directory)
yield from _upload_file(compute, project_id, path, dst)
shutil.rmtree(os.path.join(directory, files_path)) | Move the files to a remote compute |
def filter(self, sids):
dic = self.__class__(self.shape_y, self.shape_z)
for sid in sids:
try:
dic[sid] = self[sid]
except KeyError:
pass
return dic | Extracs a submap of self for the given sids. |
def execution_environment():
context = {}
context['conf'] = config()
if relation_id():
context['reltype'] = relation_type()
context['relid'] = relation_id()
context['rel'] = relation_get()
context['unit'] = local_unit()
context['rels'] = relations()
context['env'] = os.environ
return context | A convenient bundling of the current execution context |
def iter_fit_shifts(xy,uv,nclip=3,sigma=3.0):
fit = fit_shifts(xy,uv)
if nclip is None: nclip = 0
for n in range(nclip):
resids = compute_resids(xy,uv,fit)
resids1d = np.sqrt(np.power(resids[:,0],2)+np.power(resids[:,1],2))
sig = resids1d.std()
goodpix = resids1d < sigma*sig
xy = xy[goodpix]
uv = uv[goodpix]
fit = fit_shifts(xy,uv)
fit['img_coords'] = xy
fit['ref_coords'] = uv
return fit | Perform an iterative-fit with 'nclip' iterations |
def cleanup(self):
if self.sock is not None:
self.sock.close()
if self.outfile is not None:
self.outfile.close()
if self.bar is not None:
self.update_progress(complete=True) | Release resources used during memory capture |
def push(self, channel_id, data):
channel_path = self.channel_path(channel_id)
response = requests.post(channel_path, data)
return response.json() | Push message with POST ``data`` for ``channel_id`` |
def piper(self, in_sock, out_sock, out_addr, onkill):
"Worker thread for data reading"
try:
while True:
written = in_sock.recv(32768)
if not written:
try:
out_sock.shutdown(socket.SHUT_WR)
except socket.error:
self.threads[onkill].kill()
break
try:
out_sock.sendall(written)
except socket.error:
pass
self.data_handled += len(written)
except greenlet.GreenletExit:
return | Worker thread for data reading |
def ReadHashBlobReferences(self, hashes, cursor):
query = ("SELECT hash_id, blob_references FROM hash_blob_references WHERE "
"hash_id IN {}").format(mysql_utils.Placeholders(len(hashes)))
cursor.execute(query, [hash_id.AsBytes() for hash_id in hashes])
results = {hash_id: None for hash_id in hashes}
for hash_id, blob_references in cursor.fetchall():
sha_hash_id = rdf_objects.SHA256HashID.FromBytes(hash_id)
refs = rdf_objects.BlobReferences.FromSerializedString(blob_references)
results[sha_hash_id] = list(refs.items)
return results | Reads blob references of a given set of hashes. |
def format(self, method, data):
if data is None:
if method == 'GET':
raise NotFound()
return ''
return self._meta.formatter.format(data) | Calls format on list or detail |
def follow_fd(fd):
dump = Dump()
for line in fd:
if not line.strip():
continue
with flushing(sys.stdout, sys.stderr):
status = load(line)
if status:
dump(status) | Dump each line of input to stdio. |
def _str_to_datetime(self, str_value):
try:
ldt = [int(f) for f in str_value.split('-')]
dt = datetime.datetime(*ldt)
except (ValueError, TypeError):
return None
return dt | Parses a `YYYY-MM-DD` string into a datetime object. |
def delete_cloud_obj(self, cloud_obj):
self._connection.delete_object(
container=self.container_name,
obj=cloud_obj,
) | Deletes an object from the container. |
def example_rgb_to_xyz():
print("=== RGB Example: RGB->XYZ ===")
rgb = sRGBColor(120, 130, 140)
print(rgb)
xyz = convert_color(rgb, XYZColor, target_illuminant='D50')
print(xyz)
print("=== End Example ===\n") | The reverse is similar. |
def _prepare_graph_terms(self, default_screen):
columns = self.columns.copy()
screen = self.screen
if screen is None:
screen = default_screen
columns[SCREEN_NAME] = screen
return columns | Helper for to_graph and to_execution_plan. |
def copy_figure(self):
if self.fmt in ['image/png', 'image/jpeg']:
qpixmap = QPixmap()
qpixmap.loadFromData(self.fig, self.fmt.upper())
QApplication.clipboard().setImage(qpixmap.toImage())
elif self.fmt == 'image/svg+xml':
svg_to_clipboard(self.fig)
else:
return
self.blink_figure() | Copy figure to clipboard. |
def create(self, repo_name, scm='git', private=True, **kwargs):
url = self.bitbucket.url('CREATE_REPO')
return self.bitbucket.dispatch('POST', url, auth=self.bitbucket.auth, name=repo_name, scm=scm, is_private=private, **kwargs) | Creates a new repository on own Bitbucket account and return it. |
def _set_package_directory():
package_directory = os.path.normpath(os.path.join(os.path.dirname(__file__), "../"))
package_directory not in sys.path and sys.path.append(package_directory) | Sets the Application package directory in the path. |
def unhexlify(blob):
lines = blob.split('\n')[1:]
output = []
for line in lines:
output.append(binascii.unhexlify(line[9:-2]))
if (output[0][0:2].decode('utf-8') != u'MP'):
return ''
output[0] = output[0][4:]
output[-1] = output[-1].strip(b'\x00')
script = b''.join(output)
try:
result = script.decode('utf-8')
return result
except UnicodeDecodeError:
return '' | Takes a hexlified script and turns it back into a string of Python code. |
def _grid_widgets(self):
self._canvas.grid(sticky="nswe")
self.header_label.grid(row=1, column=1, sticky="nswe", pady=5, padx=5)
self.text_label.grid(row=3, column=1, sticky="nswe", pady=6, padx=5) | Place the widgets in the Toplevel. |
def init_celery(project_name):
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '%s.settings' % project_name)
app = Celery(project_name)
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(settings.INSTALLED_APPS, related_name='tasks')
return app | init celery app without the need of redundant code |
def ConvertFromWireFormat(self, value, container=None):
result = self.type()
ReadIntoObject(value[2], 0, result)
return result | The wire format is simply a string. |
def indent(txt, spacing=4):
return prefix(str(txt), ''.join([' ' for _ in range(spacing)])) | Indent given text using custom spacing, default is set to 4. |
def hex2bin(hexstr):
num_of_bits = len(hexstr) * 4
binstr = bin(int(hexstr, 16))[2:].zfill(int(num_of_bits))
return binstr | Convert a hexdecimal string to binary string, with zero fillings. |
def tco_return_handle(tokens):
internal_assert(len(tokens) == 2, "invalid tail-call-optimizable return statement tokens", tokens)
if tokens[1].startswith("()"):
return "return _coconut_tail_call(" + tokens[0] + ")" + tokens[1][2:]
else:
return "return _coconut_tail_call(" + tokens[0] + ", " + tokens[1][1:] | Process tail-call-optimizable return statements. |
def build_base_parameters(request):
getParameters = {}
postParameters = {}
files = {}
for v in request.GET:
if v[:6] != 'ebuio_':
val = request.GET.getlist(v)
if len(val) == 1:
getParameters[v] = val[0]
else:
getParameters[v] = val
if request.method == 'POST':
for v in request.POST:
if v[:6] != 'ebuio_':
val = request.POST.getlist(v)
if len(val) == 1:
postParameters[v] = val[0]
else:
postParameters[v] = val
for v in request.FILES:
if v[:6] != 'ebuio_':
files[v] = request.FILES[v]
return (getParameters, postParameters, files) | Build the list of parameters to forward from the post and get parameters |
def list_devices(self, project_id, conditions=None, params=None):
default_params = {'per_page': 1000}
if params:
default_params.update(params)
data = self.api('projects/%s/devices' % project_id, params=default_params)
devices = []
for device in self.filter(conditions, data['devices']):
devices.append(packet.Device(device, self.manager))
return devices | Retrieve list of devices in a project by one of more conditions. |
def connected(self, node_id):
conn = self._conns.get(node_id)
if conn is None:
return False
return conn.connected() | Return True iff the node_id is connected. |
def csv2yaml(in_file, out_file=None):
if out_file is None:
out_file = "%s.yaml" % os.path.splitext(in_file)[0]
barcode_ids = _generate_barcode_ids(_read_input_csv(in_file))
lanes = _organize_lanes(_read_input_csv(in_file), barcode_ids)
with open(out_file, "w") as out_handle:
out_handle.write(yaml.safe_dump(lanes, default_flow_style=False))
return out_file | Convert a CSV SampleSheet to YAML run_info format. |
def _get_persist_command(self):
for command in [_SETSID_COMMAND, _NOHUP_COMMAND]:
try:
if command in self._adb.shell(['which',
command]).decode('utf-8'):
return command
except adb.AdbError:
continue
self.log.warning(
'No %s and %s commands available to launch instrument '
'persistently, tests that depend on UiAutomator and '
'at the same time performs USB disconnection may fail',
_SETSID_COMMAND, _NOHUP_COMMAND)
return '' | Check availability and return path of command if available. |
def peer_ips(peer_relation='cluster', addr_key='private-address'):
peers = {}
for r_id in relation_ids(peer_relation):
for unit in relation_list(r_id):
peers[unit] = relation_get(addr_key, rid=r_id, unit=unit)
return peers | Return a dict of peers and their private-address |
def update(self, data):
if data is None:
return
for key, value in sorted(data.items()):
if key.startswith('/'):
name = key.lstrip('/')
match = re.search("([^/]+)(/.*)", name)
if match:
name = match.groups()[0]
value = {match.groups()[1]: value}
self.child(name, value)
else:
self.data[key] = value
log.debug("Data for '{0}' updated.".format(self))
log.data(pretty(self.data)) | Update metadata, handle virtual hierarchy |
def add_scanner_param(self, name, scanner_param):
assert name
assert scanner_param
self.scanner_params[name] = scanner_param
command = self.commands.get('start_scan')
command['elements'] = {
'scanner_params':
{k: v['name'] for k, v in self.scanner_params.items()}} | Add a scanner parameter. |
def zone_schedules_backup(self, filename):
_LOGGER.info("Backing up schedules from ControlSystem: %s (%s)...",
self.systemId, self.location.name)
schedules = {}
if self.hotwater:
_LOGGER.info("Retrieving DHW schedule: %s...",
self.hotwater.zoneId)
schedule = self.hotwater.schedule()
schedules[self.hotwater.zoneId] = {
'name': 'Domestic Hot Water',
'schedule': schedule}
for zone in self._zones:
zone_id = zone.zoneId
name = zone.name
_LOGGER.info("Retrieving Zone schedule: %s - %s", zone_id, name)
schedule = zone.schedule()
schedules[zone_id] = {'name': name, 'schedule': schedule}
schedule_db = json.dumps(schedules, indent=4)
_LOGGER.info("Writing to backup file: %s...", filename)
with open(filename, 'w') as file_output:
file_output.write(schedule_db)
_LOGGER.info("Backup completed.") | Backup all zones on control system to the given file. |
def set(self, key, value):
key = "{0}{1}".format(self.prefix, key)
value = json.dumps(value, cls=NumpyEncoder)
self.redis.set(key, value) | Set a key, value pair. |
def ToolMatches(tools=None, version='HEAD'):
matches = []
if tools:
for tool in tools:
match_version = version
if tool[1] != '':
match_version = tool[1]
match = ''
if tool[0].endswith('/'):
match = tool[0][:-1]
elif tool[0] != '.':
match = tool[0]
if not match.startswith('/') and match != '':
match = '/'+match
matches.append((match, match_version))
return matches | Get the tools paths and versions that were specified |
def abstracts(soup):
abstracts = []
abstract_tags = raw_parser.abstract(soup)
for tag in abstract_tags:
abstract = {}
abstract["abstract_type"] = tag.get("abstract-type")
title_tag = raw_parser.title(tag)
if title_tag:
abstract["title"] = node_text(title_tag)
abstract["content"] = None
if raw_parser.paragraph(tag):
abstract["content"] = ""
abstract["full_content"] = ""
good_paragraphs = remove_doi_paragraph(raw_parser.paragraph(tag))
glue = ""
for p_tag in good_paragraphs:
abstract["content"] += glue + node_text(p_tag)
glue = " "
for p_tag in good_paragraphs:
abstract["full_content"] += '<p>' + node_contents_str(p_tag) + '</p>'
abstracts.append(abstract)
return abstracts | Find the article abstract and format it |
def parse_csv_response(data, unit_handler):
return squish([parse_csv_dataset(d, unit_handler) for d in data.split(b'\n\n')]) | Handle CSV-formatted HTTP responses. |
def create_content(self, cli, width, height):
complete_state = cli.current_buffer.complete_state
if complete_state:
completions = complete_state.current_completions
index = complete_state.complete_index
menu_width = self._get_menu_width(width, complete_state)
menu_meta_width = self._get_menu_meta_width(width - menu_width, complete_state)
show_meta = self._show_meta(complete_state)
def get_line(i):
c = completions[i]
is_current_completion = (i == index)
result = self._get_menu_item_tokens(c, is_current_completion, menu_width)
if show_meta:
result += self._get_menu_item_meta_tokens(c, is_current_completion, menu_meta_width)
return result
return UIContent(get_line=get_line,
cursor_position=Point(x=0, y=index or 0),
line_count=len(completions),
default_char=Char(' ', self.token))
return UIContent() | Create a UIContent object for this control. |
def remove_update_callback(self, group, name=None, cb=None):
if not cb:
return
if not name:
if group in self.group_update_callbacks:
self.group_update_callbacks[group].remove_callback(cb)
else:
paramname = '{}.{}'.format(group, name)
if paramname in self.param_update_callbacks:
self.param_update_callbacks[paramname].remove_callback(cb) | Remove the supplied callback for a group or a group.name |
def update(self, pointvol):
if self.use_kdtree:
kdtree = spatial.KDTree(self.live_u)
else:
kdtree = None
if self.use_pool_update:
pool = self.pool
else:
pool = None
self.radfriends.update(self.live_u, pointvol=pointvol,
rstate=self.rstate, bootstrap=self.bootstrap,
pool=pool, kdtree=kdtree)
if self.enlarge != 1.:
self.radfriends.scale_to_vol(self.radfriends.vol_ball *
self.enlarge)
return copy.deepcopy(self.radfriends) | Update the N-sphere radii using the current set of live points. |
def service_list():
r = salt.utils.http.query(DETAILS['url']+'service/list', decode_type='json', decode=True)
return r['dict'] | List "services" on the REST server |
def _c_base_var(self):
if self.opts.no_structs:
return self.name
return 'windll->{}.{}'.format(
self.name, self.opts.base
) | Return the name of the module base variable. |
def increase_weight(self, proxy):
new_weight = proxy.weight * self.inc_ratio
if new_weight < 1.0:
proxy.weight = new_weight
else:
proxy.weight = 1.0 | Increase the weight of a proxy by multiplying inc_ratio |
def instruction_LSR_register(self, opcode, register):
a = register.value
r = self.LSR(a)
register.set(r) | Logical shift right accumulator |
def from_int(value):
if not isinstance(value, int):
raise PyVLXException("value_has_to_be_int")
if not Parameter.is_valid_int(value):
raise PyVLXException("value_out_of_range")
return bytes([value >> 8 & 255, value & 255]) | Create raw out of position vlaue. |
def removeZeroLenPadding(str, blocksize=AES_blocksize):
'Remove Padding with zeroes + last byte equal to the number of padding bytes'
try:
pad_len = ord(str[-1])
except TypeError:
pad_len = str[-1]
assert pad_len < blocksize, 'padding error'
assert pad_len < len(str), 'padding error'
return str[:-pad_len] | Remove Padding with zeroes + last byte equal to the number of padding bytes |
def load_ssl_context(cert_file, pkey_file):
from OpenSSL import SSL
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.use_certificate_file(cert_file)
ctx.use_privatekey_file(pkey_file)
return ctx | Loads an SSL context from a certificate and private key file. |
def execute_transaction(conn, statements: Iterable):
with conn:
with conn.cursor() as cursor:
for statement in statements:
cursor.execute(statement)
conn.commit() | Execute several statements in single DB transaction. |
def vt_ip_check(ip, vt_api):
if not is_IPv4Address(ip):
return None
url = 'https://www.virustotal.com/vtapi/v2/ip-address/report'
parameters = {'ip': ip, 'apikey': vt_api}
response = requests.get(url, params=parameters)
try:
return response.json()
except ValueError:
return None | Checks VirusTotal for occurrences of an IP address |
def do_rm(self, path):
path = path[0]
self.n.delete(self.current_path + path)
self.dirs = self.dir_complete()
self.files = self.file_complete() | delete a file or directory |
def modeldeclarations(self):
lines = Lines()
lines.add(0, '@cython.final')
lines.add(0, 'cdef class Model(object):')
lines.add(1, 'cdef public int idx_sim')
lines.add(1, 'cdef public Parameters parameters')
lines.add(1, 'cdef public Sequences sequences')
if hasattr(self.model, 'numconsts'):
lines.add(1, 'cdef public NumConsts numconsts')
if hasattr(self.model, 'numvars'):
lines.add(1, 'cdef public NumVars numvars')
return lines | Attribute declarations of the model class. |
def _generate_relative_positions_matrix(length_q, length_k,
max_relative_position,
cache=False):
if not cache:
if length_q == length_k:
range_vec_q = range_vec_k = tf.range(length_q)
else:
range_vec_k = tf.range(length_k)
range_vec_q = range_vec_k[-length_q:]
distance_mat = range_vec_k[None, :] - range_vec_q[:, None]
else:
distance_mat = tf.expand_dims(tf.range(-length_k+1, 1, 1), 0)
distance_mat_clipped = tf.clip_by_value(distance_mat, -max_relative_position,
max_relative_position)
final_mat = distance_mat_clipped + max_relative_position
return final_mat | Generates matrix of relative positions between inputs. |
def digraph_walker_backwards(graph, element, call_back):
call_back(graph, element)
for predecessor in graph.predecessors(element):
call_back(graph, predecessor)
for predecessor in graph.predecessors(element):
digraph_walker_backwards(graph, predecessor, call_back) | Visits every element guaranteeing that the previous elements have been visited before |
def package_assets(example_path):
examples(example_path, force=True, root=__file__)
for root, dirs, files in os.walk(example_path):
walker(root, dirs+files)
setup_args['packages'] += packages
for p, exts in extensions.items():
if exts:
setup_args['package_data'][p] = exts | Generates pseudo-packages for the examples directory. |
def normalized_messages(self, no_field_name='_entity'):
if isinstance(self.messages, dict):
return self.messages
if not self.field_names:
return {no_field_name: self.messages}
return dict((name, self.messages) for name in self.field_names) | Return all the error messages as a dictionary |
def pawns_at(self, x, y):
for pawn in self.pawn.values():
if pawn.collide_point(x, y):
yield pawn | Iterate over pawns that collide the given point. |
def showBindingsForActionSet(self, unSizeOfVRSelectedActionSet_t, unSetCount, originToHighlight):
fn = self.function_table.showBindingsForActionSet
pSets = VRActiveActionSet_t()
result = fn(byref(pSets), unSizeOfVRSelectedActionSet_t, unSetCount, originToHighlight)
return result, pSets | Shows the current binding all the actions in the specified action sets |
def connect(self, region):
if self.eucalyptus:
conn = boto.connect_euca(host=self.eucalyptus_host)
conn.APIVersion = '2010-08-31'
else:
conn = self.connect_to_aws(ec2, region)
return conn | create connection to api server |
def show(self, ax:plt.Axes=None, figsize:tuple=(3,3), title:Optional[str]=None, hide_axis:bool=True, **kwargs):
"Show the `ImagePoints` on `ax`."
if ax is None: _,ax = plt.subplots(figsize=figsize)
pnt = scale_flow(FlowField(self.size, self.data), to_unit=False).flow.flip(1)
params = {'s': 10, 'marker': '.', 'c': 'r', **kwargs}
ax.scatter(pnt[:, 0], pnt[:, 1], **params)
if hide_axis: ax.axis('off')
if title: ax.set_title(title) | Show the `ImagePoints` on `ax`. |
def create(cls, user, **kwargs):
parent_id = kwargs.get(cls.parent_resource.resource_type + '_id')
try:
parent = yield cls.parent_resource.get(parent_id)
except couch.NotFound:
msg = 'Parent {} with id {} not found'.format(
cls.parent_resource.resource_type,
parent_id)
raise exceptions.ValidationError(msg)
if not parent.editable:
err = 'Cannot create child of {} resource'.format(parent.state.name)
raise exceptions.Unauthorized(err)
resource = yield super(SubResource, cls).create(user, **kwargs)
resource._parent = parent
raise Return(resource) | If parent resource is not an editable state, should not be able to create. |
def mix_wave(src, dst):
if len(src) > len(dst):
dst, src = src, dst
for i, sv in enumerate(src):
dv = dst[i]
if sv < 128 and dv < 128:
dst[i] = int(sv * dv / 128)
else:
dst[i] = int(2 * (sv + dv) - sv * dv / 128 - 256)
return dst | Mix two wave body into one. |
def _compile_and_collapse(self):
self._real_regex = self._real_re_compile(*self._regex_args,
**self._regex_kwargs)
for attr in self._regex_attributes_to_copy:
setattr(self, attr, getattr(self._real_regex, attr)) | Actually compile the requested regex |
def to_internal_value(self, data):
if not data:
return None
request = self._get_request()
user = request.user
try:
obj = core_utils.instance_from_url(data, user=user)
model = obj.__class__
except ValueError:
raise serializers.ValidationError(_('URL is invalid: %s.') % data)
except (Resolver404, AttributeError, MultipleObjectsReturned, ObjectDoesNotExist):
raise serializers.ValidationError(_("Can't restore object from url: %s") % data)
if model not in self.related_models:
raise serializers.ValidationError(_('%s object does not support such relationship.') % six.text_type(obj))
return obj | Restores model instance from its url |
def secured_apps_copy(self, apps):
return [[app_name, path] for app_name, path in apps if
app_name not in (self.LETSENCRYPT_VERIFY_APP_NAME,)] | Given the http app list of a website, return what should be in the secure version |
def any_text_to_fernet_key(self, text):
md5 = fingerprint.fingerprint.of_text(text)
fernet_key = base64.b64encode(md5.encode("utf-8"))
return fernet_key | Convert any text to a fernet key for encryption. |
def main_view(request, ident, stateless=False, cache_id=None, **kwargs):
'Main view for a dash app'
_, app = DashApp.locate_item(ident, stateless, cache_id=cache_id)
view_func = app.locate_endpoint_function()
resp = view_func()
return HttpResponse(resp) | Main view for a dash app |
def _get_file_list(load):
if 'env' in load:
load.pop('env')
if 'saltenv' not in load or load['saltenv'] not in envs():
return []
ret = set()
for repo in init():
repo['repo'].open()
ref = _get_ref(repo, load['saltenv'])
if ref:
manifest = repo['repo'].manifest(rev=ref[1])
for tup in manifest:
relpath = os.path.relpath(tup[4], repo['root'])
if not relpath.startswith('../'):
ret.add(os.path.join(repo['mountpoint'], relpath))
repo['repo'].close()
return sorted(ret) | Get a list of all files on the file server in a specified environment |
def _dirdiffandupdate(self, dir1, dir2):
self._dowork(dir1, dir2, None, self._update) | Private function which does directory diff & update |
def _get_session(region, key, keyid, profile):
if profile:
if isinstance(profile, six.string_types):
_profile = __salt__['config.option'](profile)
elif isinstance(profile, dict):
_profile = profile
key = _profile.get('key', None)
keyid = _profile.get('keyid', None)
region = _profile.get('region', None)
if not region and __salt__['config.option']('datapipeline.region'):
region = __salt__['config.option']('datapipeline.region')
if not region:
region = 'us-east-1'
return boto3.session.Session(
region_name=region,
aws_secret_access_key=key,
aws_access_key_id=keyid,
) | Get a boto3 session |
def _CollectSignedData(self, extent):
start, length = extent
self.file.seek(start, os.SEEK_SET)
buf = self.file.read(length)
signed_data = []
while len(buf) >= 8:
dw_length, w_revision, w_cert_type = struct.unpack('<IHH', buf[:8])
if dw_length < 8:
return signed_data
b_cert = buf[8:dw_length]
buf = buf[(dw_length + 7) & 0x7ffffff8:]
signed_data.append((w_revision, w_cert_type, b_cert))
return signed_data | Extracts signedData blob from PECOFF binary and parses first layer. |
def make_vel(self):
"Make a set of velocities to be randomly chosen for emitted particles"
self.vel = random.normal(self.vel_mu, self.vel_sigma, 16)
for i, vel in enumerate(self.vel):
if abs(vel) < 0.125 / self._size:
if vel < 0:
self.vel[i] = -0.125 / self._size
else:
self.vel[i] = 0.125 / self._size | Make a set of velocities to be randomly chosen for emitted particles |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.