code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def LoadCheckFromFile(file_path, check_id, overwrite_if_exists=True):
configs = LoadConfigsFromFile(file_path)
conf = configs.get(check_id)
check = Check(**conf)
check.Validate()
CheckRegistry.RegisterCheck(
check,
source="file:%s" % file_path,
overwrite_if_exists=overwrite_if_exists)
logging.debug("Loaded check %s from %s", check.check_id, file_path)
return check | Load a single check from a file. |
def as_for_ip(self, ip):
if not Investigate.IP_PATTERN.match(ip):
raise Investigate.IP_ERR
uri = self._uris["as_for_ip"].format(ip)
resp_json = self.get_parse(uri)
return resp_json | Gets the AS information for a given IP address. |
def setFaces(variant):
global FACES
if variant == CHALDEAN_FACES:
FACES = tables.CHALDEAN_FACES
else:
FACES = tables.TRIPLICITY_FACES | Sets the default faces variant |
def add_general_optgroup(parser):
g = parser.add_argument_group("General Options")
g.add_argument("-q", "--quiet", dest="silent",
action="store_true", default=False)
g.add_argument("-v", "--verbose", nargs=0, action=_opt_cb_verbose)
g.add_argument("-o", "--output", dest="output", default=None)
g.add_argument("-j", "--json", dest="json",
action="store_true", default=False)
g.add_argument("--show-ignored", action="store_true", default=False)
g.add_argument("--show-unchanged", action="store_true", default=False)
g.add_argument("--ignore", action=_opt_cb_ignore,
help="comma-separated list of ignores") | option group for general-use features of all javatool CLIs |
def signal_wrapper(f):
@wraps(f)
def wrapper(*args, **kwds):
args = map(convert, args)
kwds = {convert(k): convert(v) for k, v in kwds.items()}
return f(*args, **kwds)
return wrapper | Decorator converts function's arguments from dbus types to python. |
def uninstall(self, xmlpath):
from os import path
fullpath = path.abspath(path.expanduser(xmlpath))
if fullpath in self.installed:
repo = RepositorySettings(self, fullpath)
if repo.name.lower() in self.repositories:
del self.repositories[repo.name.lower()]
if repo.name.lower() in self.archive:
del self.archive[repo.name.lower()]
self._save_archive()
self.installed.remove(fullpath)
self._save_installed()
else:
warn("The repository at {} was not installed to begin with.".format(fullpath)) | Uninstalls the repository with the specified XML path from the server. |
def _parse_docstring(fh):
find_fades = re.compile(r'\b(fades)\b:').search
for line in fh:
if line.startswith("'"):
quote = "'"
break
if line.startswith('"'):
quote = '"'
break
else:
return {}
if line[1] == quote:
endquote = quote * 3
else:
endquote = quote
if endquote in line[len(endquote):]:
docstring_lines = [line[:line.index(endquote)]]
else:
docstring_lines = [line]
for line in fh:
if endquote in line:
docstring_lines.append(line[:line.index(endquote)])
break
docstring_lines.append(line)
docstring_lines = iter(docstring_lines)
for doc_line in docstring_lines:
if find_fades(doc_line):
break
else:
return {}
return _parse_requirement(list(docstring_lines)) | Parse the docstrings of a script to find marked dependencies. |
def _check_index_in_compilations(context: BaseContext, index: str):
compilations = 'compilations'
if compilations not in context.shared_data:
return False
return index in context.shared_data[compilations] | Store compilation flag at specified index in context's shared data. |
def do_annealing_poly(start:Number, end:Number, pct:float, degree:Number)->Number:
"Helper function for `anneal_poly`."
return end + (start-end) * (1-pct)**degree | Helper function for `anneal_poly`. |
def add_text(self, text, x, y, side='left', size=None,
rotation=None, ha='left', va='center',
family=None, **kws):
axes = self.axes
if side == 'right':
axes = self.get_right_axes()
dynamic_size = False
if size is None:
size = self.conf.legendfont.get_size()
dynamic_size = True
t = axes.text(x, y, text, ha=ha, va=va, size=size,
rotation=rotation, family=family, **kws)
self.conf.added_texts.append((dynamic_size, t))
self.draw() | add text at supplied x, y position |
def portalAdmin(self):
from ..manageportal import PortalAdministration
return PortalAdministration(admin_url="https://%s/portaladmin" % self.portalHostname,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initalize=False) | gets a reference to a portal administration class |
def update_pos(pos_dict, start_key, nbr=2):
"Update the `pos_dict` by moving all positions after `start_key` by `nbr`."
for key,idx in pos_dict.items():
if str.lower(key) >= str.lower(start_key): pos_dict[key] += nbr
return pos_dict | Update the `pos_dict` by moving all positions after `start_key` by `nbr`. |
def _request_with_error_ignored(self, rd):
try:
return self._request(rd)
except Exception as err:
LOG.warning(six.text_type(err)) | Send smt request, log and ignore any errors. |
def crpss(self):
crps_f = self.crps()
crps_c = self.crps_climo()
return 1.0 - float(crps_f) / float(crps_c) | Calculate the continous ranked probability skill score from existing data. |
def _get_axis(self, name_or_index: AxisIdentifier) -> int:
if isinstance(name_or_index, int):
if name_or_index < 0 or name_or_index >= self.ndim:
raise ValueError("No such axis, must be from 0 to {0}".format(self.ndim-1))
return name_or_index
elif isinstance(name_or_index, str):
if name_or_index not in self.axis_names:
named_axes = [name for name in self.axis_names if name]
raise ValueError("No axis with such name: {0}, available names: {1}. In most places, you can also use numbers."
.format(name_or_index, ", ".join(named_axes)))
return self.axis_names.index(name_or_index)
else:
raise TypeError("Argument of type {0} not understood, int or str expected.".format(type(name_or_index))) | Get a zero-based index of an axis and check its existence. |
def slice(self, start=0, end=0):
tmp = Gauged.map_new()
if tmp is None:
raise MemoryError
if not Gauged.map_concat(tmp, self.ptr, start, end, 0):
Gauged.map_free(tmp)
raise MemoryError
return SparseMap(tmp) | Slice the map from [start, end) |
def logout(request, template_name=None,
next_page=None,
redirect_field_name=REDIRECT_FIELD_NAME,
current_app=None, extra_context=None):
auth_logout(request)
if next_page is not None:
next_page = resolve_url(next_page)
if (redirect_field_name in request.POST or
redirect_field_name in request.GET):
next_page = request.POST.get(redirect_field_name,
request.GET.get(redirect_field_name))
if not is_safe_url(url=next_page, host=request.get_host()):
next_page = request.path
if next_page:
return HttpResponseRedirect(next_page)
current_site = get_current_site(request)
context = {
'site': current_site,
'site_name': current_site.name,
'title': _('Logged out')
}
if extra_context is not None:
context.update(extra_context)
if current_app is not None:
request.current_app = current_app
return TemplateResponse(request, template_name, context) | Logs out the user. |
def asgate(self) -> Gate:
gate = identity_gate(self.qubits)
for elem in self.elements:
gate = elem.asgate() @ gate
return gate | Return the action of this circuit as a gate |
def index_service(self, service_id):
from hypermap.aggregator.models import Service
service = Service.objects.get(id=service_id)
if not service.is_valid:
LOGGER.debug('Not indexing service with id %s in search engine as it is not valid' % service.id)
return
LOGGER.debug('Indexing service %s' % service.id)
layer_to_process = service.layer_set.all()
for layer in layer_to_process:
if not settings.REGISTRY_SKIP_CELERY:
index_layer(layer.id, use_cache=True)
else:
index_layer(layer.id) | Index a service in search engine. |
def filepattern(self, *args, **kwargs):
return [p.filepattern(*args, **kwargs) for p in self.problems] | Returns a list of filepatterns, one for each problem. |
def _install_requirements(path):
locations = [os.path.join(path, "_blueprint"), os.path.join(path, "_base"), path]
success = True
for location in locations:
try:
with open(os.path.join(location, "requirements.txt")):
puts("\nRequirements file found at {0}".format(os.path.join(location, "requirements.txt")))
install_reqs = raw_input("Install requirements now with pip install -r requirements.txt? [Y/n] ")
if not install_reqs or install_reqs.lower() == 'y':
pip = sh.pip.bake(_cwd=location)
puts("\nInstalling requirements...")
puts(pip("install", "-r", "requirements.txt"))
else:
success = False
puts("Not installing requirements. This may break everything! Vaya con dios.")
except IOError:
pass
return success | Install a blueprint's requirements.txt |
def validate_path_to_api_path(path, paths, basePath='', context=None, **kwargs):
if context is None:
context = {}
try:
api_path = match_path_to_api_path(
path_definitions=paths,
target_path=path,
base_path=basePath,
context=context,
)
except LookupError as err:
raise ValidationError(str(err))
except MultiplePathsFound as err:
raise ValidationError(str(err))
return api_path | Given a path, find the api_path it matches. |
def user_saw_task(self, username, courseid, taskid):
self._database.user_tasks.update({"username": username, "courseid": courseid, "taskid": taskid},
{"$setOnInsert": {"username": username, "courseid": courseid, "taskid": taskid,
"tried": 0, "succeeded": False, "grade": 0.0, "submissionid": None, "state": ""}},
upsert=True) | Set in the database that the user has viewed this task |
def _get_database(self, options):
database_name = options.get('database')
if not database_name:
if len(settings.DATABASES) > 1:
errmsg = "Because this project contains more than one database, you"\
" must specify the --database option."
raise CommandError(errmsg)
database_name = list(settings.DATABASES.keys())[0]
if database_name not in settings.DATABASES:
raise CommandError("Database %s does not exist." % database_name)
return database_name, settings.DATABASES[database_name] | Get the database to restore. |
def getXML(self):
s = ''
for element in self._svgElements:
s += element.getXML()
return s | Retrieves the pysvg elements that make up the turtles path and returns them as String in an xml representation. |
def _get_rpt_fmt(fld, val, itemid2name=None):
if fld.startswith("ratio_"):
return "{N}/{TOT}".format(N=val[0], TOT=val[1])
elif fld in set(['study_items', 'pop_items', 'alt_ids']):
if itemid2name is not None:
val = [itemid2name.get(v, v) for v in val]
return ", ".join([str(v) for v in sorted(val)])
return val | Return values in a format amenable to printing in a table. |
def apply(self, builder):
if 'theme' in self.attributes:
builder.apply_theme(
self.attributes['theme'],
builder.theme_options,
) | Apply the Slide Configuration to a Builder. |
def _get_first_all_link_record(self):
_LOGGER.debug("Starting: _get_first_all_link_record")
_LOGGER.info('Requesting ALL-Link Records')
if self.aldb.status == ALDBStatus.LOADED:
self._next_all_link_rec_nak_retries = 3
self._handle_get_next_all_link_record_nak(None)
return
self.aldb.clear()
self._next_all_link_rec_nak_retries = 0
msg = GetFirstAllLinkRecord()
self.send_msg(msg, wait_nak=True, wait_timeout=.5)
_LOGGER.debug("Ending: _get_first_all_link_record") | Request first ALL-Link record. |
def run(data):
sample = data[0][0]
work_dir = dd.get_work_dir(sample)
out_dir = os.path.join(work_dir, "mirge")
lib = _find_lib(sample)
mirge = _find_mirge(sample)
bowtie = _find_bowtie(sample)
sps = dd.get_species(sample)
species = SPS.get(sps, "")
if not species:
raise ValueError("species not supported (hsa, mmu, rno, dre, cel, dme): %s" % sps)
if not lib:
raise ValueError("-lib option is not set up in resources for mirge tool."
" Read above warnings lines.")
if not utils.file_exists(out_dir):
with tx_tmpdir() as tmp_dir:
sample_file = _create_sample_file(data, tmp_dir)
do.run(_cmd().format(**locals()), "Running miRge2.0.")
shutil.move(tmp_dir, out_dir)
return [os.path.abspath(fn) for fn in glob.glob(os.path.join(out_dir, "*", "*"))] | Proxy function to run the tool |
def listen_now_dismissed_items(self):
response = self._call(
mc_calls.ListenNowGetDismissedItems
)
dismissed_items = response.body.get('items', [])
return dismissed_items | Get a listing of items dismissed from Listen Now tab. |
def _get_login_shell(proc_cmd):
login_shell = os.environ.get('SHELL', '')
if login_shell:
proc_cmd = login_shell
else:
proc_cmd = proc_cmd[1:]
return (os.path.basename(proc_cmd).lower(), proc_cmd) | Form shell information from the SHELL environment variable if possible. |
def _link_vertex_and_edge_types(self):
for edge_class_name in self._edge_class_names:
edge_element = self._elements[edge_class_name]
if (EDGE_SOURCE_PROPERTY_NAME not in edge_element.properties or
EDGE_DESTINATION_PROPERTY_NAME not in edge_element.properties):
if edge_element.abstract:
continue
else:
raise AssertionError(u'Found a non-abstract edge class with undefined '
u'endpoint types: {}'.format(edge_element))
from_class_name = edge_element.properties[EDGE_SOURCE_PROPERTY_NAME].qualifier
to_class_name = edge_element.properties[EDGE_DESTINATION_PROPERTY_NAME].qualifier
edge_schema_element = self._elements[edge_class_name]
for from_class in self._subclass_sets[from_class_name]:
from_schema_element = self._elements[from_class]
from_schema_element.out_connections.add(edge_class_name)
edge_schema_element.in_connections.add(from_class)
for to_class in self._subclass_sets[to_class_name]:
to_schema_element = self._elements[to_class]
edge_schema_element.out_connections.add(to_class)
to_schema_element.in_connections.add(edge_class_name) | For each edge, link it to the vertex types it connects to each other. |
def parse(self):
super(AcisIO, self).parse()
for row in self.data:
if 'meta' in row:
row = row['meta']
if 'll' in row:
row['longitude'], row['latitude'] = row['ll']
del row['ll'] | Convert ACIS 'll' value into separate latitude and longitude. |
def packagePlugin(self, dir=os.getcwd(), extraArgs=[]):
distDir = os.path.join(os.path.abspath(dir), 'dist')
self.runUAT([
'BuildPlugin',
'-Plugin=' + self.getPluginDescriptor(dir),
'-Package=' + distDir
] + extraArgs) | Packages a build of the Unreal plugin in the specified directory, suitable for use as a prebuilt Engine module |
def size_request(self, widget, requisition):
requisition.width, requisition.height = self.get_desired_size()
return True | Callback function to request our desired size. |
def send_message(client, message):
print(message)
client.send("HTTP/1.1 200 OK\r\n\r\n{}".format(message).encode("utf-8"))
client.close() | Send message to client and close the connection. |
def send_comment_email(email, package_owner, package_name, commenter):
link = '{CATALOG_URL}/package/{owner}/{pkg}/comments'.format(
CATALOG_URL=CATALOG_URL, owner=package_owner, pkg=package_name)
subject = "New comment on {package_owner}/{package_name}".format(
package_owner=package_owner, package_name=package_name)
html = render_template('comment_email.html', commenter=commenter, link=link)
body = render_template('comment_email.txt', commenter=commenter, link=link)
send_email(recipients=[email], sender=DEFAULT_SENDER, subject=subject,
html=html, body=body) | Send email to owner of package regarding new comment |
def create_log(self):
return EventLog(
self.networkapi_url,
self.user,
self.password,
self.user_ldap) | Get an instance of log services facade. |
def _1_0set_screen_config(self, size_id, rotation, config_timestamp, timestamp=X.CurrentTime):
return _1_0SetScreenConfig(
display=self.display,
opcode=self.display.get_extension_major(extname),
drawable=self,
timestamp=timestamp,
config_timestamp=config_timestamp,
size_id=size_id,
rotation=rotation,
) | Sets the screen to the specified size and rotation. |
def isHouse1Asc(self):
house1 = self.getHouse(const.HOUSE1)
asc = self.getAngle(const.ASC)
dist = angle.closestdistance(house1.lon, asc.lon)
return abs(dist) < 0.0003 | Returns true if House1 is the same as the Asc. |
def _set_cdn_defaults(self):
if self._cdn_enabled is FAULT:
self._cdn_enabled = False
self._cdn_uri = None
self._cdn_ttl = DEFAULT_CDN_TTL
self._cdn_ssl_uri = None
self._cdn_streaming_uri = None
self._cdn_ios_uri = None
self._cdn_log_retention = False | Sets all the CDN-related attributes to default values. |
def calc_weight_of_multiple_paths(path_scores, impact_div=12):
number_of_paths = len(path_scores)
if number_of_paths > 1:
score_total = 0.0
highest_score = 0.0
for score in path_scores.Scores:
score_total += score
if highest_score < score:
highest_score = score
score_mean = score_total / number_of_paths
weight_count_impact = number_of_paths / float(number_of_paths +
impact_div)
new_score = highest_score + ((1 + weight_count_impact) * score_mean)
return new_score, highest_score, number_of_paths
else:
return (path_scores.Scores.iloc[0], path_scores.Scores.iloc[0],
number_of_paths) | Caluculate the weight of multipe paths. |
def read_config_files(files):
config = ConfigObj()
for _file in files:
_config = read_config_file(_file)
if bool(_config) is True:
config.merge(_config)
config.filename = _config.filename
return config | Read and merge a list of config files. |
def update_combobox(self):
index = self.current_scheme_index
self.schemes_combobox.blockSignals(True)
names = self.get_option("names")
try:
names.pop(names.index(u'Custom'))
except ValueError:
pass
custom_names = self.get_option("custom_names", [])
for n in names + custom_names:
self.scheme_choices_dict[self.get_option('{0}/name'.format(n))] = n
if custom_names:
choices = names + [None] + custom_names
else:
choices = names
combobox = self.schemes_combobox
combobox.clear()
for name in choices:
if name is None:
continue
combobox.addItem(self.get_option('{0}/name'.format(name)), name)
if custom_names:
combobox.insertSeparator(len(names))
self.schemes_combobox.blockSignals(False)
self.schemes_combobox.setCurrentIndex(index) | Recreates the combobox contents. |
def index(self):
if self.bank is None:
raise IndexError('Pedalboard not contains a bank')
return self.bank.pedalboards.index(self) | Returns the first occurrence of the pedalboard in your bank |
def getDynMeth(name):
cname, fname = name.rsplit('.', 1)
clas = getDynLocal(cname)
if clas is None:
return None
return getattr(clas, fname, None) | Retrieve and return an unbound method by python path. |
def battery_voltage(self):
msb = self.bus.read_byte_data(AXP209_ADDRESS, BATTERY_VOLTAGE_MSB_REG)
lsb = self.bus.read_byte_data(AXP209_ADDRESS, BATTERY_VOLTAGE_LSB_REG)
voltage_bin = msb << 4 | lsb & 0x0f
return voltage_bin * 1.1 | Returns voltage in mV |
def _session_key(self):
if not hasattr(self, "_cached_session_key"):
session_id_bytes = self.get_secure_cookie("session_id")
session_id = None
if session_id_bytes:
try:
session_id = session_id_bytes.decode('utf-8')
except:
pass
if not session_id:
session_id = oz.redis_sessions.random_hex(20)
session_time = oz.settings["session_time"]
kwargs = dict(
name="session_id",
value=session_id.encode('utf-8'),
domain=oz.settings.get("cookie_domain"),
httponly=True,
)
if session_time:
kwargs["expires_days"] = round(session_time/60/60/24)
self.set_secure_cookie(**kwargs)
password_salt = oz.settings["session_salt"]
self._cached_session_key = "session:%s:v4" % oz.redis_sessions.password_hash(session_id, password_salt=password_salt)
return self._cached_session_key | Gets the redis key for a session |
def trim_empty_columns(self):
if self.nrows != 0 and self.ncols != 0:
last_col = -1
for row in self.rows:
for i in range(last_col + 1, len(row)):
if not self.is_cell_empty(row[i]):
last_col = i
ncols = last_col + 1
self.rows = [row[:ncols] for row in self.rows]
self.ncols = ncols
return self | Removes all trailing empty columns. |
def startup(api=None):
def startup_wrapper(startup_function):
apply_to_api = hug.API(api) if api else hug.api.from_object(startup_function)
apply_to_api.add_startup_handler(startup_function)
return startup_function
return startup_wrapper | Runs the provided function on startup, passing in an instance of the api |
def first_sunday(self, year, month):
date = datetime(year, month, 1, 0)
days_until_sunday = 6 - date.weekday()
return date + timedelta(days=days_until_sunday) | Get the first sunday of a month. |
def _deserialize(self, stream):
self.version, self.entries, self._extension_data, conten_sha = read_cache(stream)
return self | Initialize this instance with index values read from the given stream |
def cache_result(cache_key, timeout):
def decorator(f):
cache_name = settings.WAFER_CACHE
@functools.wraps(f)
def wrapper(*args, **kw):
cache = caches[cache_name]
result = cache.get(cache_key)
if result is None:
result = f(*args, **kw)
cache.set(cache_key, result, timeout)
return result
def invalidate():
cache = caches[cache_name]
cache.delete(cache_key)
wrapper.invalidate = invalidate
return wrapper
return decorator | A decorator for caching the result of a function. |
def dir_empty(d):
flist = glob.glob(os.path.join(d,'*'))
return (len(flist) == 0) | Return True if given directory is empty, false otherwise. |
def cache(self, f):
if self._memory is None:
logger.debug("Joblib is not installed: skipping cacheing.")
return f
assert f
if 'self' in inspect.getargspec(f).args:
ignore = ['self']
else:
ignore = None
disk_cached = self._memory.cache(f, ignore=ignore)
return disk_cached | Cache a function using the context's cache directory. |
def _warn_unsafe_for_prod(self):
safe_settings_list = [
('DEBUG', False),
('DEBUG_GMN', False),
('STAND_ALONE', False),
('DATABASES.default.ATOMIC_REQUESTS', True),
('SECRET_KEY', '<Do not modify this placeholder value>'),
('STATIC_SERVER', False),
]
for setting_str, setting_safe in safe_settings_list:
setting_current = self._get_setting(setting_str)
if setting_current != setting_safe:
logger.warning(
'Setting is unsafe for use in production. setting="{}" current="{}" '
'safe="{}"'.format(setting_str, setting_current, setting_safe)
) | Warn on settings that are not safe for production. |
def XXX_REMOVEME(func):
@wraps(func)
def decorator(self, *args, **kwargs):
msg = "~~~~~~~ XXX REMOVEME marked method called: {}.{}".format(
self.__class__.__name__, func.func_name)
raise RuntimeError(msg)
return func(self, *args, **kwargs)
return decorator | Decorator for dead code removal |
def make_inst2():
I,d = multidict({1:45, 2:20, 3:30 , 4:30})
J,M = multidict({1:35, 2:50, 3:40})
c = {(1,1):8, (1,2):9, (1,3):14 ,
(2,1):6, (2,2):12, (2,3):9 ,
(3,1):10, (3,2):13, (3,3):16 ,
(4,1):9, (4,2):7, (4,3):5 ,
}
return I,J,c,d,M | creates example data set 2 |
def _fetch_socket_data(self, parsed_url):
self.log.debug("Fetching haproxy stats from socket: %s" % parsed_url.geturl())
if parsed_url.scheme == 'tcp':
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
splitted_loc = parsed_url.netloc.split(':')
host = splitted_loc[0]
port = int(splitted_loc[1])
sock.connect((host, port))
else:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(parsed_url.path)
sock.send(b"show stat\r\n")
response = ""
output = sock.recv(BUFSIZE)
while output:
response += output.decode("ASCII")
output = sock.recv(BUFSIZE)
sock.close()
return response.splitlines() | Hit a given stats socket and return the stats lines |
def target_to_ipv4_long(target):
splitted = target.split('-')
if len(splitted) != 2:
return None
try:
start_packed = inet_pton(socket.AF_INET, splitted[0])
end_packed = inet_pton(socket.AF_INET, splitted[1])
except socket.error:
return None
if end_packed < start_packed:
return None
return ipv4_range_to_list(start_packed, end_packed) | Attempt to return a IPv4 long-range list from a target string. |
def _getHeaders(self, updateParams=None):
if not updateParams:
updateParams = {}
policies = self.defaultPolicies
if len(updateParams) > 0:
for k,v in updateParams.items():
k = k.replace('-','_')
c = globals()[k](v)
try:
policies[k] = c.update_policy(self.defaultPolicies[k])
except Exception, e:
raise
return [globals()[k](v).create_header()
for k,v in policies.items() if v is not None] | create headers list for flask wrapper |
def total_memory(self, image='ubuntu'):
try:
ret = subprocess.check_output(
f,
shell=True,
stdin=subprocess.DEVNULL)
self.tot_mem = int(ret.split()[1])
except Exception:
self.tot_mem = None
return self.tot_mem | Get the available ram fo the docker machine in Kb |
def check_owner_permission(payload, allow_user_owner):
for entity_type in ['users', 'groups']:
for perm_type in ['add', 'remove']:
for perms in payload.get(entity_type, {}).get(perm_type, {}).values():
if 'owner' in perms:
if entity_type == 'users' and allow_user_owner:
continue
if entity_type == 'groups':
raise exceptions.ParseError("Owner permission cannot be assigned to a group")
raise exceptions.PermissionDenied("Only owners can grant/revoke owner permission") | Raise ``PermissionDenied``if ``owner`` found in ``data``. |
def _trivialgraph_default_namer(thing, is_edge=True):
if is_edge:
if thing.name is None or thing.name.startswith('tmp'):
return ''
else:
return '/'.join([thing.name, str(len(thing))])
elif isinstance(thing, Const):
return str(thing.val)
elif isinstance(thing, WireVector):
return thing.name or '??'
else:
try:
return thing.op + str(thing.op_param or '')
except AttributeError:
raise PyrtlError('no naming rule for "%s"' % str(thing)) | Returns a "good" string for thing in printed graphs. |
def _sort_kw_matches(skw_matches, limit=0):
sorted_keywords = list(skw_matches.items())
sorted(sorted_keywords, key=cmp_to_key(_skw_matches_comparator))
return limit and sorted_keywords[:limit] or sorted_keywords | Return a resized version of keywords to the given length. |
def unit_normal(pt_a, pt_b, pt_c):
x_val = np.linalg.det([[1, pt_a[1], pt_a[2]], [1, pt_b[1], pt_b[2]], [1, pt_c[1], pt_c[2]]])
y_val = np.linalg.det([[pt_a[0], 1, pt_a[2]], [pt_b[0], 1, pt_b[2]], [pt_c[0], 1, pt_c[2]]])
z_val = np.linalg.det([[pt_a[0], pt_a[1], 1], [pt_b[0], pt_b[1], 1], [pt_c[0], pt_c[1], 1]])
magnitude = (x_val**2 + y_val**2 + z_val**2)**.5
mag = (x_val/magnitude, y_val/magnitude, z_val/magnitude)
if magnitude < 0.00000001:
mag = (0, 0, 0)
return mag | unit normal vector of plane defined by points pt_a, pt_b, and pt_c |
def pack(self, value=None):
if isinstance(value, Match):
return value.pack()
elif value is None:
self._update_match_length()
packet = super().pack()
return self._complete_last_byte(packet)
raise PackException(f'Match can\'t unpack "{value}".') | Pack and complete the last byte by padding. |
def parse(self, lines):
cur_entry = None
for line in lines:
kv_ = _key_value(line)
if len(kv_) > 1:
key, value = kv_
if key.lower() == "host":
cur_entry = value
self.hosts_.add(value)
self.lines_.append(ConfigLine(line=line, host=cur_entry, key=key, value=value))
else:
self.lines_.append(ConfigLine(line=line)) | Parse lines from ssh config file |
def cdx_clamp(cdx_iter, from_ts, to_ts):
if from_ts and len(from_ts) < 14:
from_ts = pad_timestamp(from_ts, PAD_14_DOWN)
if to_ts and len(to_ts) < 14:
to_ts = pad_timestamp(to_ts, PAD_14_UP)
for cdx in cdx_iter:
if from_ts and cdx[TIMESTAMP] < from_ts:
continue
if to_ts and cdx[TIMESTAMP] > to_ts:
continue
yield cdx | Clamp by start and end ts |
def construct(self, force=False):
message = super().construct(force=force)
if self.tags:
raw_tags = []
for tag, value in self.tags.items():
if value == True:
raw_tags.append(tag)
else:
raw_tags.append(tag + TAG_VALUE_SEPARATOR + value)
message = TAG_INDICATOR + TAG_SEPARATOR.join(raw_tags) + ' ' + message
if len(message) > TAGGED_MESSAGE_LENGTH_LIMIT and not force:
raise protocol.ProtocolViolation('The constructed message is too long. ({len} > {maxlen})'.format(len=len(message), maxlen=TAGGED_MESSAGE_LENGTH_LIMIT), message=message)
return message | Construct raw IRC message and return it. |
def formatter(self, key, value):
template = "PUTVAL {host}/fedmsg/fedmsg_wallboard-{key} " +\
"interval={interval} {timestamp}:{value}"
timestamp = int(time.time())
interval = self.hub.config['collectd_interval']
return template.format(
host=self.host,
timestamp=timestamp,
value=value,
interval=interval,
key=key,
) | Format messages for collectd to consume. |
def _unzip_handle(handle):
if isinstance(handle, basestring):
handle = _gzip_open_filename(handle)
else:
handle = _gzip_open_handle(handle)
return handle | Transparently unzip the file handle |
def run(self):
while True:
path, params = self.path_queue.get()
errors = run(path, **params)
self.result_queue.put(errors)
self.path_queue.task_done() | Run tasks from queue. |
def update_battery(self):
if self.sk8 is None:
return
battery = self.sk8.get_battery_level()
self.lblBattery.setText('Battery: {}%'.format(battery)) | Updates the battery level in the UI for the connected SK8, if any |
def list_joysticks():
print('Available joysticks:')
print()
for jid in range(pygame.joystick.get_count()):
j = pygame.joystick.Joystick(jid)
print('({}) {}'.format(jid, j.get_name())) | Print a list of available joysticks |
def dilated_attention_1d(x,
hparams,
attention_type="masked_dilated_1d",
q_padding="VALID",
kv_padding="VALID",
gap_size=2):
x, x_shape, is_4d = maybe_reshape_4d_to_3d(x)
with tf.variable_scope("masked_dilated_1d"):
y = common_attention.multihead_attention(
x,
None,
None,
hparams.attention_key_channels or hparams.hidden_size,
hparams.attention_value_channels or hparams.hidden_size,
hparams.hidden_size,
hparams.num_heads,
hparams.attention_dropout,
attention_type=attention_type,
block_width=hparams.block_width,
block_length=hparams.block_length,
q_padding=q_padding,
kv_padding=kv_padding,
q_filter_width=hparams.q_filter_width,
kv_filter_width=hparams.kv_filter_width,
gap_size=gap_size,
num_memory_blocks=hparams.num_memory_blocks,
name="self_attention")
if is_4d:
y = tf.reshape(y, x_shape)
y.set_shape([None, None, None, hparams.hidden_size])
return y | Dilated 1d self attention. |
def on_default_shell_changed(self, combo):
citer = combo.get_active_iter()
if not citer:
return
shell = combo.get_model().get_value(citer, 0)
if shell == USER_SHELL_VALUE:
self.settings.general.reset('default-shell')
else:
self.settings.general.set_string('default-shell', shell) | Changes the activity of default_shell in dconf |
def __check_http_err(self, status_code):
if status_code == 403:
raise exceptions.APIAuthenticationError(self.api_key)
elif status_code == 503:
raise exceptions.APITimeoutError()
else:
return False | Raises an exception if we get a http error |
def all_pkgs_by_name_regex(self, regex_name, flags=0):
reg = re.compile(regex_name, flags)
return [pkg for pkg in self.all_pkgs().keys() if reg.match(pkg)] | Return a list of packages that match regex_name. |
def lincon(self, x, theta=0.01):
if x[0] < 0:
return np.NaN
return theta * x[1] + x[0] | ridge like linear function with one linear constraint |
def construct_url(ip_address: str) -> str:
if 'http://' not in ip_address and 'https://' not in ip_address:
ip_address = '{}{}'.format('http://', ip_address)
if ip_address[-1] == '/':
ip_address = ip_address[:-1]
return ip_address | Construct the URL with a given IP address. |
def __up_cmp(self, obj1, obj2):
if obj1.update_order > obj2.update_order:
return 1
elif obj1.update_order < obj2.update_order:
return -1
else:
return 0 | Defines how our updatable objects should be sorted |
def _refresh_channel(self):
self.channel = salt.transport.client.ReqChannel.factory(self.opts)
return self.channel | Reset the channel, in the event of an interruption |
def resize(self, new_data_size):
resize_bytes(
self.__fileobj, self.data_size, new_data_size, self.data_offset)
self._update_size(new_data_size) | Resize the file and update the chunk sizes |
def __get_queue_opts(queue=None, backend=None):
if queue is None:
queue = __opts__.get('runner_queue', {}).get('queue')
if backend is None:
backend = __opts__.get('runner_queue', {}).get('backend', 'pgjsonb')
return {'backend': backend,
'queue': queue} | Get consistent opts for the queued runners |
def CheckPreviousBarline(self, staff):
measure_before_last = self.getMeasureAtPosition(-2, staff)
last_measure = self.getMeasureAtPosition(-1, staff)
if last_measure is not None and measure_before_last is not None:
bline1 = measure_before_last.GetBarline("right")
bline2 = last_measure.GetBarline("left")
if bline1 is not None:
if hasattr(bline1, "ending"):
if bline2 is not None:
if not hasattr(bline2, "ending"):
bline1.ending.type = "discontinue"
else:
bline1.ending.type = "discontinue" | method which checks the bar before the current for changes we need to make to it's barlines |
async def weather_type_classe(self):
data = await self.retrieve(url=API_WEATHER_TYPE)
self.weather_type = dict()
for _type in data['data']:
self.weather_type[_type['idWeatherType']] = _type['descIdWeatherTypePT']
return self.weather_type | Retrieve translation for weather type. |
def attach_model(subscription, rgname, vmssvm_model, diskname, lun):
disk_id = '/subscriptions/' + subscription + '/resourceGroups/' + rgname + \
'/providers/Microsoft.Compute/disks/' + diskname
disk_model = {'lun': lun, 'createOption': 'Attach', 'caching': 'None',
'managedDisk': {'storageAccountType': 'Standard_LRS', 'id': disk_id}}
vmssvm_model['properties']['storageProfile']['dataDisks'].append(
disk_model)
return vmssvm_model | Attach a data disk to a VMSS VM model |
def make_multi_cols(self, num_class, name):
cols = ['c' + str(i) + '_' for i in xrange(num_class)]
cols = map(lambda x: x + name, cols)
return cols | make cols for multi-class predictions |
def save_direction(self, rootpath, raw=False, as_int=False):
self.save_array(self.direction, None, 'ang', rootpath, raw, as_int=as_int) | Saves the direction of the slope to a file |
def _write_service_config(self):
with open(self.config_path, 'w') as output:
output.write(json.dumps(self.data, sort_keys=True, indent=4)) | Will write the config out to disk. |
def sort_by_priority(iterable, reverse=False, default_priority=10):
return sorted(iterable, reverse=reverse, key=lambda o: getattr(o, 'priority', default_priority)) | Return a list or objects sorted by a priority value. |
def _getPOS( self, token, onlyFirst = True ):
if onlyFirst:
return token[ANALYSIS][0][POSTAG]
else:
return [ a[POSTAG] for a in token[ANALYSIS] ] | Returns POS of the current token. |
def _any_pandas_objects(terms):
return any(isinstance(term.value, pd.core.generic.PandasObject)
for term in terms) | Check a sequence of terms for instances of PandasObject. |
def generate(env):
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ASSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
for suffix in ASPPSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
env['AS'] = 'nasm'
env['ASFLAGS'] = SCons.Util.CLVar('')
env['ASPPFLAGS'] = '$ASFLAGS'
env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES'
env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES' | Add Builders and construction variables for nasm to an Environment. |
def CheckBlobsExist(self, blob_ids, cursor=None):
if not blob_ids:
return {}
exists = {blob_id: False for blob_id in blob_ids}
query = ("SELECT blob_id "
"FROM blobs "
"FORCE INDEX (PRIMARY) "
"WHERE blob_id IN {}".format(
mysql_utils.Placeholders(len(blob_ids))))
cursor.execute(query, [blob_id.AsBytes() for blob_id in blob_ids])
for blob_id, in cursor.fetchall():
exists[rdf_objects.BlobID.FromBytes(blob_id)] = True
return exists | Checks if given blobs exist. |
def deserialize(self, data, status_code):
if status_code == 204:
return data
return serializer.Serializer().deserialize(
data)['body'] | Deserializes a JSON string into a dictionary. |
def announce(self, discovery):
try:
url = self.__discovery_url()
logger.debug("making announce request to %s" % (url))
response = None
response = self.client.put(url,
data=self.to_json(discovery),
headers={"Content-Type": "application/json"},
timeout=0.8)
if response.status_code is 200:
self.last_seen = datetime.now()
except (requests.ConnectTimeout, requests.ConnectionError):
logger.debug("announce", exc_info=True)
finally:
return response | With the passed in Discovery class, attempt to announce to the host agent. |
def all_files(file_or_directory):
'return all files under file_or_directory.'
if os.path.isdir(file_or_directory):
return [os.path.join(dirname, filename)
for dirname, dirnames, filenames in os.walk(file_or_directory)
for filename in filenames]
else:
return [file_or_directory] | return all files under file_or_directory. |
def parse_config(app):
try:
plot_gallery = eval(app.builder.config.plot_gallery)
except TypeError:
plot_gallery = bool(app.builder.config.plot_gallery)
src_dir = app.builder.srcdir
abort_on_example_error = app.builder.config.abort_on_example_error
lang = app.builder.config.highlight_language
gallery_conf = _complete_gallery_conf(
app.config.sphinx_gallery_conf, src_dir, plot_gallery,
abort_on_example_error, lang, app.builder.name)
app.config.sphinx_gallery_conf = gallery_conf
app.config.html_static_path.append(glr_path_static())
return gallery_conf | Process the Sphinx Gallery configuration |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.