code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def round_to_int(number, precision):
precision = int(precision)
rounded = (int(number) + precision / 2) // precision * precision
return rounded | Round a number to a precision |
def _create_id(self):
cursor = self._db.cursor()
cursor.execute('INSERT INTO Ids DEFAULT VALUES')
return cursor.lastrowid | Returns a freshly created DB-wide unique ID. |
def hasFeature(self, prop, check_softs=False):
return prop in self.props or (check_softs and
any([fs.hasFeature(prop) for fs in self.props.get(SoftFeatures.SOFT, [])])) | Return if there is a property with that name. |
def namedb_update_must_equal( rec, change_fields ):
must_equal = []
if len(change_fields) != 0:
given = rec.keys()
for k in given:
if k not in change_fields:
must_equal.append(k)
return must_equal | Generate the set of fields that must stay the same across an update. |
def unholdAction(self):
self._actionHeld = False
point = self.mapFromGlobal(QCursor.pos())
self.setCurrentAction(self.actionAt(point)) | Unholds the action from being blocked on the leave event. |
def during(self, year, month):
return self.filter(created__year=year, created__month=month) | Return Transfers between a certain time range. |
def push(self, *args, **kwargs):
stdout = six.b('')
cmd = self.git.push(as_process=True, *args, **kwargs)
while True:
output = cmd.stdout.read(1)
sys.stdout.write(output.decode('utf-8'))
sys.stdout.flush()
stdout += output
if output == six.b(""):
break
try:
cmd.wait()
except GitCommandError as error:
message = "'{0}' returned exit status {1}".format(
' '.join(str(c) for c in error.command),
error.status
)
raise GitError(message, stderr=error.stderr, stdout=stdout)
return stdout.strip() | Push commits to remote |
def save_email(self, list_name, email_msg, index):
msg_id = email_msg.get('Message-ID')
if not msg_id:
return
mailinglist, created = MailingList.objects.get_or_create(
name=list_name
)
mailinglist.last_imported_index = index
if created:
self.create_email(mailinglist, email_msg)
else:
try:
Message.all_objects.get(
message_id=msg_id,
thread__mailinglist=mailinglist
)
except Message.DoesNotExist:
self.create_email(mailinglist, email_msg)
mailinglist.save() | Save email message into the database. |
def via_clb_upi(self, clb_upi, det_id):
try:
return DOM.from_json([
d for d in self._json
if d["CLBUPI"] == clb_upi and d["DetOID"] == det_id
][0])
except IndexError:
log.critical("No DOM found for CLB UPI '{0}'".format(clb_upi)) | return DOM for given CLB UPI |
def tile_sprite(self, out_format="sprite.json", out_folder=None):
url = "{url}/resources/sprites/{f}".format(url=self._url,
f=out_format)
if out_folder is None:
out_folder = tempfile.gettempdir()
return self._get(url=url,
param_dict={},
out_folder=out_folder,
securityHandler=self._securityHandler,
proxy_port=self._proxy_port,
proxy_url=self._proxy_host) | This resource returns sprite image and metadata |
def device_status(self):
try:
return self.device_status_simple(
self.data.get('status').get('status1'))
except (KeyError, AttributeError):
return self.device_status_simple('') | Return the status of the device as string. |
def unpack_attribute(att):
if att.unsigned:
log.warning('Unsupported unsigned attribute!')
if att.len == 0:
val = None
elif att.dataType == stream.STRING:
val = att.sdata
elif att.dataType:
val = np.frombuffer(att.data,
dtype='>' + _dtypeLookup[att.dataType], count=att.len)
elif att.type:
val = np.frombuffer(att.data,
dtype=_attrConverters[att.type], count=att.len)
elif att.sdata:
val = att.sdata
else:
val = np.array(att.data, dtype=_dtypeLookup[att.dataType])
if att.len == 1:
val = val[0]
return att.name, val | Unpack an embedded attribute into a python or numpy object. |
def _get_view_result(view, raw_result, **kwargs):
if raw_result:
return view(**kwargs)
if kwargs:
return Result(view, **kwargs)
return view.result | Get view results helper. |
def runGetCallSet(self, id_):
compoundId = datamodel.CallSetCompoundId.parse(id_)
dataset = self.getDataRepository().getDataset(compoundId.dataset_id)
variantSet = dataset.getVariantSet(compoundId.variant_set_id)
callSet = variantSet.getCallSet(id_)
return self.runGetRequest(callSet) | Returns a callset with the given id |
def create_bcm_socket(channel):
s = socket.socket(PF_CAN, socket.SOCK_DGRAM, CAN_BCM)
if HAS_NATIVE_SUPPORT:
s.connect((channel,))
else:
addr = get_addr(s, channel)
libc.connect(s.fileno(), addr, len(addr))
return s | create a broadcast manager socket and connect to the given interface |
def copy(self, _):
c = SimSymbolicDbgMemory(
mem=self.mem.branch(),
memory_id=self.id,
endness=self.endness,
abstract_backer=self._abstract_backer,
read_strategies=[ s.copy() for s in self.read_strategies ],
write_strategies=[ s.copy() for s in self.write_strategies ],
stack_region_map=self._stack_region_map,
generic_region_map=self._generic_region_map
)
return c | Return a copy of the SimMemory. |
def _wrap_result(self, result, block=None, obj=None):
if obj is None:
obj = self._selected_obj
index = obj.index
if isinstance(result, np.ndarray):
if block is not None:
if is_timedelta64_dtype(block.values.dtype):
from pandas import to_timedelta
result = to_timedelta(
result.ravel(), unit='ns').values.reshape(result.shape)
if result.ndim == 1:
from pandas import Series
return Series(result, index, name=obj.name)
return type(obj)(result, index=index, columns=block.columns)
return result | Wrap a single result. |
def _validate_incident_date_range(incident, numdays):
try:
datetime_object = datetime.datetime.strptime(incident.get('date'), '%m/%d/%y %I:%M %p')
except ValueError:
raise ValueError("Incorrect date format, should be MM/DD/YY HH:MM AM/PM")
timedelta = datetime.timedelta(days=numdays)
today = datetime.datetime.now()
if today - datetime_object <= timedelta:
return True
return False | Returns true if incident is within date range |
def save_json_metadata(self, package_info: Dict) -> bool:
try:
with utils.rewrite(self.json_file) as jf:
dump(package_info, jf, indent=4, sort_keys=True)
except Exception as e:
logger.error(
"Unable to write json to {}: {}".format(self.json_file, str(e))
)
return False
symlink_dir = self.json_pypi_symlink.parent
if not symlink_dir.exists():
symlink_dir.mkdir()
try:
self.json_pypi_symlink.symlink_to(self.json_file)
except FileExistsError:
pass
return True | Take the JSON metadata we just fetched and save to disk |
def css_files(self):
bokehjsdir = self.bokehjsdir()
js_files = []
for root, dirnames, files in os.walk(bokehjsdir):
for fname in files:
if fname.endswith(".css"):
js_files.append(join(root, fname))
return js_files | The CSS files in the BokehJS directory. |
def _connect_signals():
for index in settings.get_index_names():
for model in settings.get_index_models(index):
_connect_model_signals(model) | Connect up post_save, post_delete signals for models. |
def ensure_context(**vars):
ctx = _context_stack.top
stacked = False
if not ctx:
ctx = Context()
stacked = True
_context_stack.push(ctx)
ctx.update(vars)
try:
yield ctx
finally:
if stacked:
_context_stack.pop() | Ensures that a context is in the stack, creates one otherwise. |
def bump(db, aid):
anime = lookup(db, aid)
if anime.complete:
return
episode = anime.watched_episodes + 1
with db:
set_watched(db, aid, get_eptype(db, 'regular').id, episode)
set_status(
db, aid,
anime.enddate and episode >= anime.episodecount,
episode) | Bump anime regular episode count. |
def valid_host_with_port(hostport):
host, port = hostport.rsplit(":", 1) if ":" in hostport else (hostport, None)
if not valid_ipv4(host) and not valid_host(host):
return False
if port is not None and not valid_port(port):
return False
return True | matches hostname or an IP, optionally with a port |
def render_items(self, placeholder, items, parent_object=None, template_name=None, cachable=None):
is_queryset = False
if hasattr(items, "non_polymorphic"):
is_queryset = True
if not items.polymorphic_disabled and items._result_cache is None:
items = items.non_polymorphic()
if not items:
logger.debug("- no items in placeholder '%s'", get_placeholder_debug_name(placeholder))
return ContentItemOutput(mark_safe(u"<!-- no items in placeholder '{0}' -->".format(escape(get_placeholder_name(placeholder)))), cacheable=True)
result = self.result_class(
request=self.request,
parent_object=parent_object,
placeholder=placeholder,
items=items,
all_cacheable=self._can_cache_merged_output(template_name, cachable),
)
if self.edit_mode:
result.set_uncachable()
if is_queryset:
self._fetch_cached_output(items, result=result)
result.fetch_remaining_instances()
else:
result.add_remaining_list(items)
if result.remaining_items:
self._render_uncached_items(result.remaining_items, result=result)
return self.merge_output(result, items, template_name) | The main rendering sequence. |
def glob_files_locally(folder_path, pattern):
pattern = os.path.join(
folder_path, pattern.lstrip('/')) if pattern else None
len_folder_path = len(folder_path) + 1
for root, _, files in os.walk(folder_path):
for f in files:
full_path = os.path.join(root, f)
if not pattern or _match_path(full_path, pattern):
yield (full_path, full_path[len_folder_path:]) | glob files in local folder based on the given pattern |
def generate_heightmap(self, buffer=False, as_array=False):
non_solids = [0, 8, 9, 10, 11, 38, 37, 32, 31]
if buffer:
return BytesIO(pack(">i", 256)+self.generate_heightmap())
else:
bytes = []
for z in range(16):
for x in range(16):
for y in range(127, -1, -1):
offset = y + z*128 + x*128*16
if (self.blocksList[offset] not in non_solids or y == 0):
bytes.append(y+1)
break
if (as_array):
return bytes
else:
return array.array('B', bytes).tostring() | Return a heightmap, representing the highest solid blocks in this chunk. |
def _getf(self, id):
try:
filename = self.__filename(id)
self.__remove_if_expired(filename)
return self.__open(filename, "rb")
except Exception:
pass | Open a cached file with the given id for reading. |
def _GenerateClientInfo(self, client_id, client_fd):
summary_dict = client_fd.ToPrimitiveDict(stringify_leaf_fields=True)
summary = yaml.Dump(summary_dict).encode("utf-8")
client_info_path = os.path.join(self.prefix, client_id, "client_info.yaml")
st = os.stat_result((0o644, 0, 0, 0, 0, 0, len(summary), 0, 0, 0))
yield self.archive_generator.WriteFileHeader(client_info_path, st=st)
yield self.archive_generator.WriteFileChunk(summary)
yield self.archive_generator.WriteFileFooter() | Yields chucks of archive information for given client. |
def download(self, project, filename, targetfilename, loadmetadata=None):
if loadmetadata is None: loadmetadata = self.loadmetadata
f = clam.common.data.CLAMOutputFile(self.url + project, filename, loadmetadata, self)
f.copy(targetfilename) | Download an output file |
def _get_query(self, **query_dict):
param_dict = query_dict.copy()
return self._send_query(do_post=False, **param_dict) | Perform a GET query against Solr and return the response as a Python dict. |
def validate_required_attributes(self, *attributes: str) -> None:
self.add_errors(
validate_required_attributes(self.fully_qualified_name, self._spec, *attributes)) | Validates that the schema contains a series of required attributes |
def _make_headers(config, kwargs):
headers = kwargs.get('headers')
headers = headers.copy() if headers is not None else {}
headers['User-Agent'] = config.args.user_agent
kwargs = kwargs.copy()
kwargs['headers'] = headers
return kwargs | Replace the kwargs with one where the headers include our user-agent |
def _init_rgb(self, r: int, g: int, b: int) -> None:
if self.rgb_mode:
self.rgb = (r, g, b)
self.hexval = rgb2hex(r, g, b)
else:
self.rgb = hex2rgb(rgb2termhex(r, g, b))
self.hexval = rgb2termhex(r, g, b)
self.code = hex2term(self.hexval) | Initialize from red, green, blue args. |
def ln(self, h=''):
"Line Feed; default value is last cell height"
self.x=self.l_margin
if(isinstance(h, basestring)):
self.y+=self.lasth
else:
self.y+=h | Line Feed; default value is last cell height |
def authentication_required(function):
def wrapped(self, *args, **kwargs):
if not (self.token or self.apiKey):
msg = "You must be authenticated to use this method"
raise AuthenticationError(msg)
else:
return function(self, *args, **kwargs)
return wrapped | Annotation for methods that require auth. |
def prepare_storage_dir(storage_directory):
storage_directory = os.path.expanduser(storage_directory)
if not os.path.exists(storage_directory):
os.mkdir(storage_directory)
return storage_directory | Prepare the storage directory. |
def _edit1(self, w):
split = [(w[:i], w[i:]) for i in range(len(w) + 1)]
delete, transpose, replace, insert = (
[a + b[1:] for a, b in split if b],
[a + b[1] + b[0] + b[2:] for a, b in split if len(b) > 1],
[a + c + b[1:] for a, b in split for c in Spelling.ALPHA if b],
[a + c + b[0:] for a, b in split for c in Spelling.ALPHA]
)
return set(delete + transpose + replace + insert) | Returns a set of words with edit distance 1 from the given word. |
def _show_organisation_logo(self):
dock_width = float(self.width())
maximum_height = 100.0
pixmap = QPixmap(self.organisation_logo_path)
if pixmap.height() < 1 or pixmap.width() < 1:
return
height_ratio = maximum_height / pixmap.height()
maximum_width = int(pixmap.width() * height_ratio)
if maximum_width > dock_width:
width_ratio = dock_width / float(pixmap.width())
maximum_height = int(pixmap.height() * width_ratio)
maximum_width = dock_width
too_high = pixmap.height() > maximum_height
too_wide = pixmap.width() > dock_width
if too_wide or too_high:
pixmap = pixmap.scaled(
maximum_width, maximum_height, Qt.KeepAspectRatio)
self.organisation_logo.setMaximumWidth(maximum_width)
self.organisation_logo.setScaledContents(False)
self.organisation_logo.setPixmap(pixmap)
self.organisation_logo.show() | Show the organisation logo in the dock if possible. |
def main():
args = parse_args()
if not args.files:
return 0
with enable_sphinx_if_possible():
status = 0
pool = multiprocessing.Pool(multiprocessing.cpu_count())
try:
if len(args.files) > 1:
results = pool.map(
_check_file,
[(name, args) for name in args.files])
else:
results = [_check_file((args.files[0], args))]
for (filename, errors) in results:
for error in errors:
line_number = error[0]
message = error[1]
if not re.match(r'\([A-Z]+/[0-9]+\)', message):
message = '(ERROR/3) ' + message
output_message('{}:{}: {}'.format(filename,
line_number,
message))
status = 1
except (IOError, UnicodeError) as exception:
output_message(exception)
status = 1
return status | Return 0 on success. |
async def _connect(self) -> "Connection":
if self._connection is None:
self._connection = await self._execute(self._connector)
return self | Connect to the actual sqlite database. |
def functions(self):
names = []
for i, k in enumerate(self.obj):
if _(self.obj[k]).isCallable():
names.append(k)
return self._wrap(sorted(names)) | Return a sorted list of the function names available on the object. |
def best_genomes(self, n):
def key(g):
return g.fitness
return sorted(self.most_fit_genomes, key=key, reverse=True)[:n] | Returns the n most fit genomes ever seen. |
def _optlist(self):
res = ""
for o in self.options:
res += o.short
if o.argName is not None:
res += ":"
return res | Get a string representation of the options in short format. |
def _get_conn(ret=None):
_options = _get_options(ret)
database = _options.get('database')
timeout = _options.get('timeout')
if not database:
raise Exception(
'sqlite3 config option "sqlite3.database" is missing')
if not timeout:
raise Exception(
'sqlite3 config option "sqlite3.timeout" is missing')
log.debug('Connecting the sqlite3 database: %s timeout: %s', database, timeout)
conn = sqlite3.connect(database, timeout=float(timeout))
return conn | Return a sqlite3 database connection |
def from_label(cls, label, datacenter=None):
result = cls.list(datacenter=datacenter)
image_labels = dict([(image['label'], image['disk_id'])
for image in result])
return image_labels.get(label) | Retrieve disk image id associated to a label. |
def register_job(self, job_details):
try:
job_details_old = self.get_details(job_details.jobname,
job_details.jobkey)
if job_details_old.status <= JobStatus.running:
job_details_old.status = job_details.status
job_details_old.update_table_row(
self._table, job_details_old.dbkey - 1)
job_details = job_details_old
except KeyError:
job_details.dbkey = len(self._table) + 1
job_details.get_file_ids(
self._file_archive, creator=job_details.dbkey)
job_details.append_to_tables(self._table, self._table_ids)
self._table_id_array = self._table_ids['file_id'].data
self._cache[job_details.fullkey] = job_details
return job_details | Register a job in this `JobArchive` |
def ready(self):
from django.contrib.gis.db import models
from rest_framework.serializers import ModelSerializer
from .fields import GeometryField
try:
field_mapping = ModelSerializer._field_mapping.mapping
except AttributeError:
field_mapping = ModelSerializer.serializer_field_mapping
field_mapping.update({
models.GeometryField: GeometryField,
models.PointField: GeometryField,
models.LineStringField: GeometryField,
models.PolygonField: GeometryField,
models.MultiPointField: GeometryField,
models.MultiLineStringField: GeometryField,
models.MultiPolygonField: GeometryField,
models.GeometryCollectionField: GeometryField
}) | update Django Rest Framework serializer mappings |
def in_pathing_grid(self, pos: Union[Point2, Point3, Unit]) -> bool:
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self._game_info.pathing_grid[pos] == 0 | Returns True if a unit can pass through a grid point. |
def heirarchical_help(shovel, prefix):
result = []
tuples = heirarchical_helper(shovel, prefix)
if not tuples:
return ''
longest = max(len(name + ' ' * level) for name, _, level in tuples)
fmt = '%%%is => %%-50s' % longest
for name, docstring, level in tuples:
if docstring == None:
result.append(' ' * level + name + '/')
else:
docstring = re.sub(r'\s+', ' ', docstring).strip()
if len(docstring) > 50:
docstring = docstring[:47] + '...'
result.append(fmt % (name, docstring))
return '\n'.join(result) | Given a shovel of tasks, display a heirarchical list of the tasks |
def serve_once(self):
request_adu = self.serial_port.read(256)
log.debug('<-- {0}'.format(hexlify(request_adu)))
if len(request_adu) == 0:
raise ValueError
response_adu = self.process(request_adu)
self.respond(response_adu) | Listen and handle 1 request. |
def from_dict(cls, d):
return Dos(d["efermi"], d["energies"],
{Spin(int(k)): v
for k, v in d["densities"].items()}) | Returns Dos object from dict representation of Dos. |
def all_query(expression):
def _all(index, expression=expression):
ev = expression() if callable(expression) else expression
try:
iter(ev)
except TypeError:
raise AttributeError('$all argument must be an iterable!')
hashed_ev = [index.get_hash_for(v) for v in ev]
store_keys = set()
if len(hashed_ev) == 0:
return []
store_keys = set(index.get_keys_for(hashed_ev[0]))
for value in hashed_ev[1:]:
store_keys &= set(index.get_keys_for(value))
return list(store_keys)
return _all | Match arrays that contain all elements in the query. |
def parse(class_, f, include_transactions=True, include_offsets=None, check_merkle_hash=True):
block = class_.parse_as_header(f)
if include_transactions:
count = parse_struct("I", f)[0]
txs = block._parse_transactions(f, count, include_offsets=include_offsets)
block.set_txs(txs, check_merkle_hash=check_merkle_hash)
return block | Parse the Block from the file-like object |
def api_request(self, method, path):
if not hasattr(self, 'api_settings'):
ValueError('api.json settings not found')
if method.lower() == 'get':
_request = requests.get
elif method.lower() == 'post':
_request = requests.post
domain = self.api_settings['host']
uri = '{}:{}/{}'.format(domain, self.api_settings['port'], path)
if self.api_settings.get('user') and self.api_settings.get('password'):
auth = (self.api_settings['user'], self.api_settings['password'])
else:
auth = ()
req = _request(uri, auth=auth)
return req | Query Sensu api for information. |
def run_queue(self, pool):
def callback(result):
self._handle_result(result)
self.job_queue.mark_done(result.node.unique_id)
while not self.job_queue.empty():
node = self.job_queue.get()
self._raise_set_error()
runner = self.get_runner(node)
if runner.node.unique_id in self._skipped_children:
cause = self._skipped_children.pop(runner.node.unique_id)
runner.do_skip(cause=cause)
args = (runner,)
self._submit(pool, args, callback)
self.job_queue.join()
self._raise_set_error()
return | Given a pool, submit jobs from the queue to the pool. |
def cut_distant_injections(workflow, inj_file, out_dir, tags=None):
"Set up a job for removing injections that are too distant to be seen"
if tags is None:
tags = []
node = Executable(workflow.cp, 'inj_cut', ifos=workflow.ifos,
out_dir=out_dir, tags=tags).create_node()
node.add_input_opt('--input', inj_file)
node.new_output_file_opt(workflow.analysis_time, '.xml', '--output-file')
workflow += node
return node.output_files[0] | Set up a job for removing injections that are too distant to be seen |
def debug_sleep(self, timeout):
fut = self.execute(b'DEBUG', b'SLEEP', timeout)
return wait_ok(fut) | Suspend connection for timeout seconds. |
def delete_network_precommit(self, context):
segments = context.network_segments
for segment in segments:
if not self.check_segment(segment):
return
vlan_id = segment.get(api.SEGMENTATION_ID)
if not vlan_id:
return
self.ucsm_db.delete_vlan_entry(vlan_id)
if any([True for ip, ucsm in CONF.ml2_cisco_ucsm.ucsms.items()
if ucsm.sp_template_list]):
self.ucsm_db.delete_sp_template_for_vlan(vlan_id)
if any([True for ip, ucsm in CONF.ml2_cisco_ucsm.ucsms.items()
if ucsm.vnic_template_list]):
self.ucsm_db.delete_vnic_template_for_vlan(vlan_id) | Delete entry corresponding to Network's VLAN in the DB. |
def atlasdb_reset_zonefile_tried_storage( con=None, path=None ):
with AtlasDBOpen(con=con, path=path) as dbcon:
sql = "UPDATE zonefiles SET tried_storage = ? WHERE present = ?;"
args = (0, 0)
cur = dbcon.cursor()
res = atlasdb_query_execute( cur, sql, args )
dbcon.commit()
return True | For zonefiles that we don't have, re-attempt to fetch them from storage. |
def convert_from_sliced_object(data):
if data.base is not None and isinstance(data, np.ndarray) and isinstance(data.base, np.ndarray):
if not data.flags.c_contiguous:
warnings.warn("Usage of np.ndarray subset (sliced data) is not recommended "
"due to it will double the peak memory cost in LightGBM.")
return np.copy(data)
return data | Fix the memory of multi-dimensional sliced object. |
def put(properties, ttl=None, ctx=None):
def put_on(elt):
return put_properties(elt=elt, properties=properties, ttl=ttl, ctx=ctx)
return put_on | Decorator dedicated to put properties on an element. |
def consistent(self,lab):
for const in self.constraints:
if not const.consistent(lab):
return False
return True | Check whether the labeling is consistent with all constraints |
def _clean_salt_variables(params, variable_prefix="__"):
list(list(map(params.pop, [k for k in params if k.startswith(variable_prefix)])))
return params | Pops out variables from params which starts with `variable_prefix`. |
def download_and_expand(self):
top_dir_name = None
if self.git_branch:
top_dir_name = self._download_and_expand_by_git()
else:
try:
top_dir_name = self._download_and_expand_from_archive_url()
except RemoteFileNotFoundError:
Log.info('Try to download by git clone.')
top_dir_name = self._download_and_expand_by_git()
return top_dir_name | Download and expand RPM Python binding. |
def run(X_train, X_test, y_train, y_test, PARAMS):
model.fit(X_train, y_train)
predict_y = model.predict(X_test)
score = r2_score(y_test, predict_y)
LOG.debug('r2 score: %s' % score)
nni.report_final_result(score) | Train model and predict result |
def _draw_linenumber(self, posno, lineno):
self._draw_text(
self._get_linenumber_pos(posno),
str(lineno).rjust(self.line_number_chars),
font=self.fonts.get_font(self.line_number_bold,
self.line_number_italic),
fill=self.line_number_fg,
) | Remember a line number drawable to paint later. |
def paste(self):
html = QApplication.clipboard().text()
if not self.isRichTextEditEnabled():
self.insertPlainText(projex.text.toAscii(html))
else:
super(XTextEdit, self).paste() | Pastes text from the clipboard into this edit. |
def update_reserved_switch_binding(switch_ip, state):
update_reserved_binding(
const.NO_VLAN_OR_VNI_ID,
switch_ip,
const.RESERVED_NEXUS_SWITCH_DEVICE_ID_R1,
state) | Update a reserved switch binding. |
def sub(self, num):
try:
val = self.value() - num
except:
val = -num
self.set(max(0, val)) | Subtracts num from the current value |
def update_bounds(self, bounds):
self.bounds = np.array(bounds, dtype='float32')
vertices, directions = self._gen_bounds(self.bounds)
self._verts_vbo.set_data(vertices)
self._directions_vbo.set_data(directions)
self.widget.update() | Update the bounds inplace |
def save(self, force_insert=False, force_update=False, using=None,
update_fields=None):
self.pk = None
super(ConfigurationModel, self).save(
force_insert,
force_update,
using,
update_fields
)
cache.delete(self.cache_key_name(*[getattr(self, key) for key in self.KEY_FIELDS]))
if self.KEY_FIELDS:
cache.delete(self.key_values_cache_key_name()) | Clear the cached value when saving a new configuration entry |
def as_raw_link(self):
if self.link_type == "raw":
return self
if self.link_type != "blob":
raise ValueError("Cannot get a download link from a tree link")
return self.__class__(
self.gitid,
"raw",
self.branch_or_commit,
self.path,
self.token
) | Returns a GithubLink to a raw content. |
def better(old_value, new_value, mode):
if (old_value is None or np.isnan(old_value)) and (new_value is not None and not np.isnan(new_value)):
return True
if mode == 'min':
return new_value < old_value
elif mode == 'max':
return new_value > old_value
else:
raise RuntimeError(f"Mode '{mode}' value is not supported") | Check if new value is better than the old value |
def populateFromRow(self, peerRecord):
self.setUrl(peerRecord.url) \
.setAttributesJson(peerRecord.attributes)
return self | This method accepts a model record and sets class variables. |
def complete_worker(self, text, line, begidx, endidx):
return [i for i in PsiturkNetworkShell.worker_commands if \
i.startswith(text)] | Tab-complete worker command. |
def start_new_log(self):
filename = self.new_log_filepath()
self.block_cnt = 0
self.logfile = open(filename, 'w+b')
print("DFLogger: logging started (%s)" % (filename))
self.prev_cnt = 0
self.download = 0
self.prev_download = 0
self.last_idle_status_printed_time = time.time()
self.last_status_time = time.time()
self.missing_blocks = {}
self.acking_blocks = {}
self.blocks_to_ack_and_nack = []
self.missing_found = 0
self.abandoned = 0 | open a new dataflash log, reset state |
def style_print(*values, **kwargs):
style = kwargs.pop("style", None)
values = [style_format(value, style) for value in values]
print(*values, **kwargs) | A convenience function that applies style_format to text before printing |
def describe_data_and_buckets(data_statistics: DataStatistics, bucket_batch_sizes: List[BucketBatchSize]):
check_condition(len(bucket_batch_sizes) == len(data_statistics.buckets),
"Number of bucket batch sizes (%d) does not match number of buckets in statistics (%d)."
% (len(bucket_batch_sizes), len(data_statistics.buckets)))
if data_statistics.length_ratio_stats_per_bucket:
for bucket_batch_size, num_seq, (lr_mean, lr_std) in zip(bucket_batch_sizes,
data_statistics.num_sents_per_bucket,
data_statistics.length_ratio_stats_per_bucket):
if num_seq > 0:
logger.info("Bucket %s: %d samples in %d batches of %d, ~%.1f tokens/batch, "
"trg/src length ratio: %.2f (+-%.2f)",
bucket_batch_size.bucket,
num_seq,
math.ceil(num_seq / bucket_batch_size.batch_size),
bucket_batch_size.batch_size,
bucket_batch_size.average_words_per_batch,
lr_mean, lr_std)
else:
for bucket_batch_size, num_seq in zip(bucket_batch_sizes, data_statistics.num_sents_per_bucket):
if num_seq > 0:
logger.info("Bucket %s: %d samples in %d batches of %d, ~%.1f tokens/batch, ",
bucket_batch_size.bucket,
num_seq,
math.ceil(num_seq / bucket_batch_size.batch_size),
bucket_batch_size.batch_size,
bucket_batch_size.average_words_per_batch) | Describes statistics across buckets |
def fit_linear(X, y):
model = linear_model.LinearRegression()
model.fit(X, y)
return model | Uses OLS to fit the regression. |
def _extract_email(gh):
return next(
(x.email for x in gh.emails() if x.verified and x.primary), None) | Get user email from github. |
def permute(self, qubits: Qubits) -> 'Channel':
vec = self.vec.permute(qubits)
return Channel(vec.tensor, qubits=vec.qubits) | Return a copy of this channel with qubits in new order |
def tbframes(tb):
'unwind traceback tb_next structure to array'
frames=[tb.tb_frame]
while tb.tb_next: tb=tb.tb_next; frames.append(tb.tb_frame)
return frames | unwind traceback tb_next structure to array |
def parse_yaml(self, y):
super(TargetPort, self).parse_yaml(y)
self.port_name = y['portName']
return self | Parse a YAML specification of a target port into this object. |
def getPlugItObject(hproPk):
from hprojects.models import HostedProject
try:
hproject = HostedProject.objects.get(pk=hproPk)
except (HostedProject.DoesNotExist, ValueError):
try:
hproject = HostedProject.objects.get(plugItCustomUrlKey=hproPk)
except HostedProject.DoesNotExist:
raise Http404
if hproject.plugItURI == '' and not hproject.runURI:
raise Http404
plugIt = PlugIt(hproject.plugItURI)
if hasattr(hproject, 'plugItCustomUrlKey') and hproject.plugItCustomUrlKey:
baseURI = reverse('plugIt.views.main', args=(hproject.plugItCustomUrlKey, ''))
else:
baseURI = reverse('plugIt.views.main', args=(hproject.pk, ''))
return (plugIt, baseURI, hproject) | Return the plugit object and the baseURI to use if not in standalone mode |
def count_tables(filename_or_fobj, encoding="utf-8", table_tag="table"):
source = Source.from_file(
filename_or_fobj, plugin_name="html", mode="rb", encoding=encoding
)
html = source.fobj.read().decode(source.encoding)
html_tree = document_fromstring(html)
tables = html_tree.xpath("//{}".format(table_tag))
result = len(tables)
if source.should_close:
source.fobj.close()
return result | Read a file passed by arg and return your table HTML tag count. |
def __response_url(self, message_id):
if self.from_.pid != 0:
path = AGENT_RESPONSE_PATH % (self.from_.pid, message_id)
return "http://%s:%s/%s" % (self.host, self.port, path) | URL for responding to agent requests. |
def _createConnection(self, connections):
for c in connections:
connection = Connection(slinkNumber=c['slinkNumber'],
upSjuncNumber=c['upSjunc'],
downSjuncNumber=c['downSjunc'])
connection.stormPipeNetworkFile = self | Create GSSHAPY Connection Objects Method |
def cli(ctx, cmd):
exit_code = util.call(cmd)
ctx.exit(exit_code) | Execute commands using Apio packages. |
def probe_sync(self):
done = threading.Event()
result = {}
def probe_done(adapter_id, status, reason):
result['success'] = status
result['failure_reason'] = reason
done.set()
self.probe_async(probe_done)
done.wait()
return result | Synchronously probe for devices on this adapter. |
def ask_path(question: str, default: str = None) -> str:
default_q = " [default: {0}]: ".format(
default) if default is not None else ""
answer = input("{0} [{1}]: ".format(question, default_q))
if answer == "":
return default
if os.path.isdir(answer):
return answer
print(
"No such directory: {answer}, please try again".format(answer=answer))
return ask_path(question, default) | Asks for a path |
def subscribe(self, request, *args, **kwargs):
self.object = self.get_object()
self.object.subscribers.add(request.user)
messages.success(self.request, self.success_message)
return HttpResponseRedirect(self.get_success_url()) | Performs the subscribe action. |
def setCachedDataKey(engineVersionHash, key, value):
cacheFile = CachedDataManager._cacheFileForHash(engineVersionHash)
return JsonDataManager(cacheFile).setKey(key, value) | Sets the cached data value for the specified engine version hash and dictionary key |
def _ScheduleGenericHunt(hunt_obj):
foreman_condition = foreman_rules.ForemanCondition(
creation_time=rdfvalue.RDFDatetime.Now(),
expiration_time=hunt_obj.init_start_time + hunt_obj.duration,
description="Hunt %s %s" % (hunt_obj.hunt_id, hunt_obj.args.hunt_type),
client_rule_set=hunt_obj.client_rule_set,
hunt_id=hunt_obj.hunt_id)
foreman_condition.Validate()
data_store.REL_DB.WriteForemanRule(foreman_condition) | Adds foreman rules for a generic hunt. |
def include(_name_or_func, *args,
_module=None, _default='includeme', **kwargs):
if callable(_name_or_func):
resolved = _name_or_func
else:
resolved = resolve_str(_name_or_func, _module, _default)
resolved(*args, **kwargs) | Resolve and call functions |
def subdomain_row_factory(cls, cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d | Dict row factory for subdomains |
async def runs(self, date: datetime.date = None, days: int = None) -> list:
endpoint = 'watering/past'
if date and days:
endpoint = '{0}/{1}/{2}'.format(
endpoint, date.strftime('%Y-%m-%d'), days)
data = await self._request('get', endpoint)
return data['pastValues'] | Return all program runs for X days from Y date. |
def csv_dict(d):
if len(d) == 0:
return "{}"
return "{" + ', '.join(["'{}': {}".format(k, quotable(v))
for k, v in d.items()]) + "}" | Format dict to a string with comma-separated values. |
def added(self):
if not self.parents:
return list(self._get_file_nodes())
return AddedFileNodesGenerator([n for n in
self._get_paths_for_status('added')], self) | Returns list of added ``FileNode`` objects. |
def add_device_net(self, name, destname=None):
if not self.running:
return False
if os.path.exists("/sys/class/net/%s/phy80211/name" % name):
with open("/sys/class/net/%s/phy80211/name" % name) as fd:
phy = fd.read().strip()
if subprocess.call(['iw', 'phy', phy, 'set', 'netns',
str(self.init_pid)]) != 0:
return False
if destname:
def rename_interface(args):
old, new = args
return subprocess.call(['ip', 'link', 'set',
'dev', old, 'name', new])
return self.attach_wait(rename_interface, (name, destname),
namespaces=(CLONE_NEWNET)) == 0
return True
if not destname:
destname = name
if not os.path.exists("/sys/class/net/%s/" % name):
return False
return subprocess.call(['ip', 'link', 'set',
'dev', name,
'netns', str(self.init_pid),
'name', destname]) == 0 | Add network device to running container. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.