code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def convert_machine_list_time_val(text: str) -> datetime.datetime:
text = text[:14]
if len(text) != 14:
raise ValueError('Time value not 14 chars')
year = int(text[0:4])
month = int(text[4:6])
day = int(text[6:8])
hour = int(text[8:10])
minute = int(text[10:12])
second = int(text[12:14])
return datetime.datetime(year, month, day, hour, minute, second,
tzinfo=datetime.timezone.utc) | Convert RFC 3659 time-val to datetime objects. |
def gpu_mem_restore(func):
"Reclaim GPU RAM if CUDA out of memory happened, or execution was interrupted"
@functools.wraps(func)
def wrapper(*args, **kwargs):
tb_clear_frames = os.environ.get('FASTAI_TB_CLEAR_FRAMES', None)
if not IS_IN_IPYTHON or tb_clear_frames=="0":
return func(*args, **kwargs)
try:
return func(*args, **kwargs)
except Exception as e:
if ("CUDA out of memory" in str(e) or
"device-side assert triggered" in str(e) or
tb_clear_frames == "1"):
type, val, tb = get_ref_free_exc_info()
gc.collect()
if "device-side assert triggered" in str(e):
warn()
raise type(val).with_traceback(tb) from None
else: raise
return wrapper | Reclaim GPU RAM if CUDA out of memory happened, or execution was interrupted |
def since(self, ts):
while True:
items = super(TailingOplog, self).since(ts)
for doc in items:
yield doc
ts = doc['ts'] | Tail the oplog, starting from ts. |
def isinstance(self, instance, class_name):
if isinstance(instance, BaseNode):
klass = self.dynamic_node_classes.get(class_name, None)
if klass:
return isinstance(instance, klass)
return False
else:
raise TypeError("This function can only be used for BaseNode objects") | Check if a BaseNode is an instance of a registered dynamic class |
def _print_title(self):
if self.title:
self._stream_out('{}\n'.format(self.title))
self._stream_flush() | Prints tracking title at initialization. |
def count_rows_with_nans(X):
if X.ndim == 2:
return np.where(np.isnan(X).sum(axis=1) != 0, 1, 0).sum() | Count the number of rows in 2D arrays that contain any nan values. |
def todo_tasks(self):
tasks = [task for task in self.all_tasks if task._state == NewTask._PENDING]
return tasks | Return tasks in loop which its state is pending. |
def checkForDeadlocks(self):
totalRunningJobs = len(self.batchSystem.getRunningBatchJobIDs())
totalServicesIssued = self.serviceJobsIssued + self.preemptableServiceJobsIssued
if totalServicesIssued >= totalRunningJobs and totalRunningJobs > 0:
serviceJobs = [x for x in list(self.jobBatchSystemIDToIssuedJob.keys()) if isinstance(self.jobBatchSystemIDToIssuedJob[x], ServiceJobNode)]
runningServiceJobs = set([x for x in serviceJobs if self.serviceManager.isRunning(self.jobBatchSystemIDToIssuedJob[x])])
assert len(runningServiceJobs) <= totalRunningJobs
if len(runningServiceJobs) == totalRunningJobs:
if self.potentialDeadlockedJobs != runningServiceJobs:
self.potentialDeadlockedJobs = runningServiceJobs
self.potentialDeadlockTime = time.time()
elif time.time() - self.potentialDeadlockTime >= self.config.deadlockWait:
raise DeadlockException("The system is service deadlocked - all %d running jobs are active services" % totalRunningJobs)
else:
self.potentialDeadlockedJobs = set()
self.potentialDeadlockTime = 0
else:
self.potentialDeadlockedJobs = set()
self.potentialDeadlockTime = 0 | Checks if the system is deadlocked running service jobs. |
def list_securitygroup(call=None):
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes function must be called with -f or --function.'
)
params = {
'Action': 'DescribeSecurityGroups',
'RegionId': get_location(),
'PageSize': '50',
}
result = query(params)
if 'Code' in result:
return {}
ret = {}
for sg in result['SecurityGroups']['SecurityGroup']:
ret[sg['SecurityGroupId']] = {}
for item in sg:
ret[sg['SecurityGroupId']][item] = sg[item]
return ret | Return a list of security group |
def make_slice_strings(cls, slice_key):
start = slice_key.start
size = slice_key.stop - start
return (str(start), str(size)) | Converts the given slice key to start and size query parts. |
def update(self, x, w=1):
self.n += w
if len(self) == 0:
self._add_centroid(Centroid(x, w))
return
S = self._find_closest_centroids(x)
while len(S) != 0 and w > 0:
j = choice(list(range(len(S))))
c_j = S[j]
q = self._compute_centroid_quantile(c_j)
if c_j.count + w > self._threshold(q):
S.pop(j)
continue
delta_w = min(self._threshold(q) - c_j.count, w)
self._update_centroid(c_j, x, delta_w)
w -= delta_w
S.pop(j)
if w > 0:
self._add_centroid(Centroid(x, w))
if len(self) > self.K / self.delta:
self.compress()
return | Update the t-digest with value x and weight w. |
def check_pypi_exists(dependencies):
for dependency in dependencies.get('pypi', []):
logger.debug("Checking if %r exists in PyPI", dependency)
try:
exists = _pypi_head_package(dependency)
except Exception as error:
logger.error("Error checking %s in PyPI: %r", dependency, error)
raise FadesError("Could not check if dependency exists in PyPI")
else:
if not exists:
logger.error("%s doesn't exists in PyPI.", dependency)
return False
return True | Check if the indicated dependencies actually exists in pypi. |
def send_action(action, params=None):
data={"msg_type":"action", "action":action}
if params is not None:
data['params']=params
_comm.send(data) | helper method for sending actions |
def isoncurve(self, p):
return p.iszero() or p.y ** 2 == p.x ** 3 + self.a * p.x + self.b | verifies if a point is on the curve |
def _iris_cell_methods_to_str(cell_methods_obj):
cell_methods = []
for cell_method in cell_methods_obj:
names = ''.join(['{}: '.format(n) for n in cell_method.coord_names])
intervals = ' '.join(['interval: {}'.format(interval)
for interval in cell_method.intervals])
comments = ' '.join(['comment: {}'.format(comment)
for comment in cell_method.comments])
extra = ' '.join([intervals, comments]).strip()
if extra:
extra = ' ({})'.format(extra)
cell_methods.append(names + cell_method.method + extra)
return ' '.join(cell_methods) | Converts a Iris cell methods into a string |
def check_garden_requirements(self):
garden_requirements = self.config.getlist('app',
'garden_requirements', '')
if exists(self.gardenlibs_dir) and \
self.state.get('cache.gardenlibs', '') == garden_requirements:
self.debug('Garden requirements already installed, pass')
return
self.rmdir(self.gardenlibs_dir)
if not garden_requirements:
self.state['cache.gardenlibs'] = garden_requirements
return
self._ensure_virtualenv()
self.cmd('pip install Kivy-Garden==0.1.1', env=self.env_venv)
self.mkdir(self.gardenlibs_dir)
for requirement in garden_requirements:
self._install_garden_package(requirement)
self.state['cache.gardenlibs'] = garden_requirements | Ensure required garden packages are available to be included. |
def loadTextureD3D11_Async(self, textureId, pD3D11Device):
fn = self.function_table.loadTextureD3D11_Async
ppD3D11Texture2D = c_void_p()
result = fn(textureId, pD3D11Device, byref(ppD3D11Texture2D))
return result, ppD3D11Texture2D.value | Creates a D3D11 texture and loads data into it. |
def create_api_environment_vip(self):
return ApiEnvironmentVip(
self.networkapi_url,
self.user,
self.password,
self.user_ldap) | Get an instance of Api Environment Vip services facade. |
def unset_default_org(self):
for org in self.list_orgs():
org_config = self.get_org(org)
if org_config.default:
del org_config.config["default"]
self.set_org(org_config) | unset the default orgs for tasks |
def recover_all_handler(self):
for handler in self._handler_cache:
self.logger.addHandler(handler)
self._handler_cache = list() | Relink the file handler association you just removed. |
def convert_references_json(ref_content, soup=None):
"Check for references that will not pass schema validation, fix or convert them to unknown"
if (
(ref_content.get("type") == "other")
or
(ref_content.get("type") == "book-chapter" and "editors" not in ref_content)
or
(ref_content.get("type") == "journal" and "articleTitle" not in ref_content)
or
(ref_content.get("type") in ["journal", "book-chapter"]
and not "pages" in ref_content)
or
(ref_content.get("type") == "journal" and "journal" not in ref_content)
or
(ref_content.get("type") in ["book", "book-chapter", "report", "thesis", "software"]
and "publisher" not in ref_content)
or
(ref_content.get("type") == "book" and "bookTitle" not in ref_content)
or
(ref_content.get("type") == "data" and "source" not in ref_content)
or
(ref_content.get("type") == "conference-proceeding" and "conference" not in ref_content)
):
ref_content = references_json_to_unknown(ref_content, soup)
return ref_content | Check for references that will not pass schema validation, fix or convert them to unknown |
def getTileUrlsByLatLngExtent(self, xmin, ymin, xmax, ymax, level):
tileXMin, tileYMin = self.tileUtils.convertLngLatToTileXY(xmin, ymax,
level)
tileXMax, tileYMax = self.tileUtils.convertLngLatToTileXY(xmax, ymin,
level)
tileUrls = []
for y in range(tileYMax, tileYMin - 1, -1):
for x in range(tileXMin, tileXMax + 1, 1):
tileUrls.append(self.createTileUrl(x, y, level))
return tileUrls | Returns a list of tile urls by extent |
def initRnaQuantificationSet(self):
store = rnaseq2ga.RnaSqliteStore(self._args.filePath)
store.createTables() | Initialize an empty RNA quantification set |
def dim_lower_extent_dict(self):
return { d.name: d.lower_extent for d in self._dims.itervalues()} | Returns a mapping of dimension name to lower_extent |
def removeSettingsGroup(groupName, settings=None):
logger.debug("Removing settings group: {}".format(groupName))
settings = QtCore.QSettings() if settings is None else settings
settings.remove(groupName) | Removes a group from the persistent settings |
def _update_vdr_vxrheadtail(self, f, vdr_offset, VXRoffset):
self._update_offset_value(f, vdr_offset+28, 8, VXRoffset)
self._update_offset_value(f, vdr_offset+36, 8, VXRoffset) | This sets a VXR to be the first and last VXR in the VDR |
def restore(self):
signal.signal(signal.SIGINT, self.original_sigint)
signal.signal(signal.SIGTERM, self.original_sigterm)
if os.name == 'nt':
signal.signal(signal.SIGBREAK, self.original_sigbreak) | Restore signal handlers to their original settings. |
def rollback(self):
self._check_state()
database = self._session._database
api = database.spanner_api
metadata = _metadata_with_prefix(database.name)
api.rollback(self._session.name, self._transaction_id, metadata=metadata)
self._rolled_back = True
del self._session._transaction | Roll back a transaction on the database. |
def _toggle_filming(self):
if self._filming:
self.log("Stopping operation")
self._filming = False
self.timer.stop()
else:
self.log("Starting operation")
self._filming = True
self.timer.start() | Toggles the camera system recording state |
def dxdy(line):
x0, y0, x1, y1 = line
dx = float(x1 - x0)
dy = float(y1 - y0)
f = hypot(dx, dy)
return dx / f, dy / f | return normalised ascent vector |
def add_mavlink_packet(self, msg):
mtype = msg.get_type()
if mtype not in self.msg_types:
return
for i in range(len(self.fields)):
if mtype not in self.field_types[i]:
continue
f = self.fields[i]
self.values[i] = mavutil.evaluate_expression(f, self.state.master.messages)
if self.livegraph is not None:
self.livegraph.add_values(self.values) | add data to the graph |
def rounddown(ctx, number, num_digits):
number = conversions.to_decimal(number, ctx)
num_digits = conversions.to_integer(num_digits, ctx)
return decimal_round(number, num_digits, ROUND_DOWN) | Rounds a number down, toward zero |
def root(self, value):
self._xml = t2s(value)
self._root = value | Set new XML tree |
def Subtract(self, other):
for val, freq in other.Items():
self.Incr(val, -freq) | Subtracts the values in the given histogram from this histogram. |
def _get_tag(repo, name):
try:
return [x for x in _all_tags(repo) if x[0] == name][0]
except IndexError:
return False | Find the requested tag in the specified repo |
def can_view(self, user):
return user.is_admin or self == user \
or set(self.classes).intersection(user.admin_for) | Return whether or not `user` can view information about the user. |
def _extract_etag(response):
if response and response.headers:
for name, value in response.headers:
if name.lower() == 'etag':
return value
return None | Extracts the etag from the response headers. |
def ascii_text(text):
text = latinize_text(text, ascii=True)
if isinstance(text, six.text_type):
text = text.encode('ascii', 'ignore').decode('ascii')
return text | Transliterate the given text and make sure it ends up as ASCII. |
def find_spec(self, fullname, path, target=None):
if fullname.startswith(self.package_prefix):
for path in self._get_paths(fullname):
if os.path.exists(path):
return ModuleSpec(
name=fullname,
loader=self.loader_class(fullname, path),
origin=path,
is_package=(path.endswith('__init__.ipynb') or path.endswith('__init__.py')),
) | Claims modules that are under ipynb.fs |
def _handle_info(self, *args, **kwargs):
if 'version' in kwargs:
self.api_version = kwargs['version']
print("Initialized API with version %s" % self.api_version)
return
try:
info_code = str(kwargs['code'])
except KeyError:
raise FaultyPayloadError("_handle_info: %s" % kwargs)
if not info_code.startswith('2'):
raise ValueError("Info Code must start with 2! %s", kwargs)
output_msg = "_handle_info(): %s" % kwargs
log.info(output_msg)
try:
self._code_handlers[info_code]()
except KeyError:
raise UnknownWSSInfo(output_msg) | Handles info messages and executed corresponding code |
def visit_nonlocal(self, node, parent):
return nodes.Nonlocal(
node.names,
getattr(node, "lineno", None),
getattr(node, "col_offset", None),
parent,
) | visit a Nonlocal node and return a new instance of it |
def _connect_signal(self, index):
post_save_signal = ElasticSignal(index, 'build')
post_save_signal.connect(post_save, sender=index.object_type)
self.signals.append(post_save_signal)
post_delete_signal = ElasticSignal(index, 'remove_object')
post_delete_signal.connect(post_delete, sender=index.object_type)
self.signals.append(post_delete_signal)
for dependency in index.get_dependencies():
if isinstance(dependency, (models.ManyToManyField, ManyToManyDescriptor)):
dependency = ManyToManyDependency(dependency)
elif not isinstance(dependency, Dependency):
raise TypeError("Unsupported dependency type: {}".format(repr(dependency)))
signal = dependency.connect(index)
self.signals.extend(signal) | Create signals for building indexes. |
def memory_enumerator(buffer_, *args, **kwargs):
_LOGGER.debug("Enumerating through (%d) bytes of archive data.",
len(buffer_))
def opener(archive_res):
_LOGGER.debug("Opening from (%d) bytes (memory_enumerator).",
len(buffer_))
_archive_read_open_memory(archive_res, buffer_)
if 'entry_cls' not in kwargs:
kwargs['entry_cls'] = _ArchiveEntryItReadable
return _enumerator(opener,
*args,
**kwargs) | Return an enumerator that knows how to read raw memory. |
def use_plenary_sequence_rule_enabler_view(self):
self._object_views['sequence_rule_enabler'] = PLENARY
for session in self._get_provider_sessions():
try:
session.use_plenary_sequence_rule_enabler_view()
except AttributeError:
pass | Pass through to provider SequenceRuleEnablerLookupSession.use_plenary_sequence_rule_enabler_view |
def SplitV(a, splits, axis):
return tuple(np.split(np.copy(a), np.cumsum(splits), axis=axis)) | Split op with multiple split sizes. |
def fields_with_locales(self):
result = {}
for locale, fields in self._fields.items():
for k, v in fields.items():
real_field_id = self._real_field_id_for(k)
if real_field_id not in result:
result[real_field_id] = {}
result[real_field_id][locale] = self._serialize_value(v)
return result | Get fields with locales per field. |
def log(self, msg, level="info"):
if not self.config.get("log_file"):
level = LOG_LEVELS.get(level, level)
syslog(level, u"{}".format(msg))
else:
with open(self.config["log_file"], "ab") as f:
log_time = time.strftime("%Y-%m-%d %H:%M:%S")
if isinstance(msg, (dict, list, set, tuple)):
msg = pformat(msg)
if "\n" in msg:
msg = u"\n" + msg
out = u"{} {} {}\n".format(log_time, level.upper(), msg)
try:
f.write(out.encode("utf-8"))
except (AttributeError, UnicodeDecodeError):
f.write(out) | log this information to syslog or user provided logfile. |
def monthly(date=datetime.date.today()):
return datetime.date(date.year, date.month, 1) | Take a date object and return the first day of the month. |
def _clean(self, t, capitalize=None):
if self._from_bibtex:
t = latex_to_unicode(t, capitalize=capitalize)
t = ' '.join([el.rstrip('.') if el.count('.') == 1 else el for el in t.split()])
return t | Convert to normalized unicode and strip trailing full stops. |
def connect(self, name, callback):
if not self._callbacks[name]:
self._callbacks[name] = callback
else:
raise HookAlreadyConnectedError('Callback hook already connected.') | Add callback to hook. |
def remove_cts_record(file_name, map, position):
db = XonoticDB.load_path(file_name)
db.remove_cts_record(map, position)
db.save(file_name) | Remove cts record on MAP and POSITION |
def walk_subclasses(root):
classes = [root]
visited = set()
while classes:
cls = classes.pop()
if cls is type or cls in visited:
continue
classes.extend(cls.__subclasses__())
visited.add(cls)
if cls is not root:
yield cls | Does not yield the input class |
def alphafilter(request, queryset, template):
qs_filter = {}
for key in list(request.GET.keys()):
if '__istartswith' in key:
qs_filter[str(key)] = request.GET[key]
break
return render_to_response(
template,
{'objects': queryset.filter(**qs_filter),
'unfiltered_objects': queryset},
context_instance=RequestContext(request)
) | Render the template with the filtered queryset |
def _auth():
if 'auth' not in __context__:
try:
__context__['auth'] = salt.crypt.SAuth(__opts__)
except SaltClientError:
log.error('Could not authenticate with master.'
'Mine data will not be transmitted.')
return __context__['auth'] | Return the auth object |
def _define_format(self, occur):
if hasattr(self, "default"):
self.attr["nma:default"] = self.default
middle = self._chorder() if self.rng_children() else "<empty/>%s"
return (self.start_tag() + self.serialize_annots().replace("%", "%%")
+ middle + self.end_tag()) | Return the serialization format for a define node. |
def pop_viewport(self):
vp = self._vp_stack.pop()
if len(self._vp_stack) > 0:
self.context.set_viewport(*self._vp_stack[-1])
else:
self.context.set_viewport(0, 0, *self.physical_size)
self._update_transforms()
return vp | Pop a viewport from the stack. |
def add_stale_devices_callback(self, callback):
self._stale_devices_callbacks.append(callback)
_LOGGER.debug('Added stale devices callback to %s', callback) | Register as callback for when stale devices exist. |
def _random_candidates(self, n=1000):
candidates = np.zeros((n, len(self.tunables)))
for i, tunable in enumerate(self.tunables):
param = tunable[1]
lo, hi = param.range
if param.is_integer:
column = np.random.randint(lo, hi + 1, size=n)
else:
diff = hi - lo
column = lo + diff * np.random.rand(n)
candidates[:, i] = column
return candidates | Generate a matrix of random parameters, column by column. |
def add_feature(self, pr_name, pr_value):
setattr(self, pr_name, pr_value)
self.features.add(pr_name) | Add or update a node's feature. |
def psf_convolution(self, grid, grid_scale, psf_subgrid=False, subgrid_res=1):
psf_type = self.psf_type
if psf_type == 'NONE':
return grid
elif psf_type == 'GAUSSIAN':
sigma = self._sigma_gaussian/grid_scale
img_conv = ndimage.filters.gaussian_filter(grid, sigma, mode='nearest', truncate=self._truncation)
return img_conv
elif psf_type == 'PIXEL':
if psf_subgrid:
kernel = self.subgrid_pixel_kernel(subgrid_res)
else:
kernel = self._kernel_pixel
img_conv1 = signal.fftconvolve(grid, kernel, mode='same')
return img_conv1
else:
raise ValueError('PSF type %s not valid!' % psf_type) | convolves a given pixel grid with a PSF |
def trace2array(self, sl):
chain = []
for stochastic in self.stochastics:
tr = stochastic.trace.gettrace(slicing=sl)
if tr is None:
raise AttributeError
chain.append(tr)
return np.hstack(chain) | Return an array with the trace of all stochastics, sliced by sl. |
def cache_param(self, value):
if value not in self.cf_parameters:
keyname = chr(ord('A') + len(self.cf_parameters))
param = self.cf_template.add_parameter(troposphere.Parameter(
keyname, Type="String", Default=value, tags=self.tags
))
self.cf_parameters[value] = param
return troposphere.Ref(self.cf_parameters[value]) | Returns a troposphere Ref to a value cached as a parameter. |
def _read_files(files):
file_contents = [
(
_parse_title(file_path),
_read_file(file_path),
)
for file_path in files
]
return file_contents | Read the contents of a list of files |
def _parse_text(self, el, refs=None, specials=None, element_cls=Paragraph):
if specials is None:
specials = {}
if refs is None:
refs = {}
elements = self._parse_element_r(el, specials=specials, refs=refs, element_cls=element_cls)
if not elements:
return [element_cls('')]
element = elements[0]
for next_element in elements[1:]:
element += element_cls(' ') + next_element
return [element] | Like _parse_element but ensure a single element. |
def remove_file(paths):
for path in force_list(paths):
if os.path.exists(path):
os.remove(path) | Remove file from paths introduced. |
def verify_retry_data(self, retry_data):
retry_defaults = {
'until': True,
'attempts': 2,
'splay': 0,
'interval': 30,
}
expected_data = {
'until': bool,
'attempts': int,
'interval': int,
'splay': int,
}
validated_retry_data = {}
if isinstance(retry_data, dict):
for expected_key, value_type in six.iteritems(expected_data):
if expected_key in retry_data:
if isinstance(retry_data[expected_key], value_type):
validated_retry_data[expected_key] = retry_data[expected_key]
else:
log.warning(
'An invalid value was passed for the retry %s, '
'using default value \'%s\'',
expected_key, retry_defaults[expected_key]
)
validated_retry_data[expected_key] = retry_defaults[expected_key]
else:
validated_retry_data[expected_key] = retry_defaults[expected_key]
else:
log.warning(('State is set to retry, but a valid dict for retry '
'configuration was not found. Using retry defaults'))
validated_retry_data = retry_defaults
return validated_retry_data | verifies the specified retry data |
def detach(self):
from . import _ndarray_cls
hdl = NDArrayHandle()
check_call(_LIB.MXNDArrayDetach(self.handle, ctypes.byref(hdl)))
return _ndarray_cls(hdl) | Returns a new NDArray, detached from the current graph. |
def _spec_to_globs(address_mapper, specs):
patterns = set()
for spec in specs:
patterns.update(spec.make_glob_patterns(address_mapper))
return PathGlobs(include=patterns, exclude=address_mapper.build_ignore_patterns) | Given a Specs object, return a PathGlobs object for the build files that it matches. |
def complete(self):
if not self._techniques:
return False
if not any(tech._is_overriden('complete') for tech in self._techniques):
return False
return self.completion_mode(tech.complete(self) for tech in self._techniques if tech._is_overriden('complete')) | Returns whether or not this manager has reached a "completed" state. |
def cgi_parameter_exists(form: cgi.FieldStorage, key: str) -> bool:
s = get_cgi_parameter_str(form, key)
return s is not None | Does a CGI form contain the key? |
def _send(self, message):
message['command'] = 'zappa.asynchronous.route_lambda_task'
payload = json.dumps(message).encode('utf-8')
if len(payload) > LAMBDA_ASYNC_PAYLOAD_LIMIT:
raise AsyncException("Payload too large for async Lambda call")
self.response = self.client.invoke(
FunctionName=self.lambda_function_name,
InvocationType='Event',
Payload=payload
)
self.sent = (self.response.get('StatusCode', 0) == 202) | Given a message, directly invoke the lamdba function for this task. |
def create_page(slug, post_data):
logger.info('Call create Page')
if MWiki.get_by_uid(slug):
return False
title = post_data['title'].strip()
if len(title) < 2:
return False
return MWiki.__create_rec(slug, '2', post_data=post_data) | The page would be created with slug. |
def _guess_available_methods(self):
available_methods = []
for m in ["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"]:
self_method = getattr(type(self), "API_{}".format(m))
super_method = getattr(APIPage, "API_{}".format(m))
if self_method != super_method:
available_methods.append(m)
return available_methods | Guess the method implemented by the subclass |
def add_synonym(self, syn):
n = self.node(syn.class_id)
if 'meta' not in n:
n['meta'] = {}
meta = n['meta']
if 'synonyms' not in meta:
meta['synonyms'] = []
meta['synonyms'].append(syn.as_dict()) | Adds a synonym for a node |
def as_dash_app(self):
dateless_dash_app = getattr(self, '_stateless_dash_app_instance', None)
if not dateless_dash_app:
dateless_dash_app = get_stateless_by_name(self.app_name)
setattr(self, '_stateless_dash_app_instance', dateless_dash_app)
return dateless_dash_app | Return a DjangoDash instance of the dash application |
def send_request(self, method, url, headers=None,
data=None, is_retry=False):
if not self._token:
self.login()
if not headers:
headers = {}
headers['Authorization'] = 'Bearer ' + self._oauth_token
headers['ABODE-API-KEY'] = self._token
try:
response = getattr(self._session, method)(
url, headers=headers, json=data)
if response and response.status_code < 400:
return response
except RequestException:
_LOGGER.info("Abode connection reset...")
if not is_retry:
self._token = None
return self.send_request(method, url, headers, data, True)
raise AbodeException((ERROR.REQUEST)) | Send requests to Abode. |
def view_creatr(filename):
if not check():
click.echo(Fore.RED + 'ERROR: Ensure you are in a bast app to run the create:view command')
return
path = os.path.abspath('.') + '/public/templates'
if not os.path.exists(path):
os.makedirs(path)
filename_ = str(filename + ".html").lower()
view_file = open(path + "/" + filename_, 'w+')
view_file.write("")
view_file.close()
click.echo(Fore.GREEN + "View file " + filename_ + "created in public/template folder") | Name of the View File to be created |
def AddHashEntry(self, hash_entry, timestamp):
if timestamp in self._hash_entries:
message = ("Duplicated hash entry write for path '%s' of type '%s' at "
"timestamp '%s'. Old: %s. New: %s.")
message %= ("/".join(self._components), self._path_type, timestamp,
self._hash_entries[timestamp], hash_entry)
raise db.Error(message)
if timestamp not in self._path_infos:
path_info = rdf_objects.PathInfo(
path_type=self._path_type,
components=self._components,
timestamp=timestamp,
hash_entry=hash_entry)
self.AddPathInfo(path_info)
else:
self._path_infos[timestamp].hash_entry = hash_entry | Registers hash entry at a given timestamp. |
def gen_secret(length=64):
charset = string.ascii_letters + string.digits
return ''.join(random.SystemRandom().choice(charset)
for _ in range(length)) | Generates a secret of given length |
def move(self, source, destination):
if source.isfile():
source.copy(destination)
source.remove()
else:
source.copy(destination, recursive=True)
source.remove('r') | the semantic should be like unix 'mv' command |
def _write(self, item, labels, features):
data = Data([item], [labels], [features])
self._writer.write(data, self.groupname, append=True) | Writes the given item to the owned file. |
def find_learning_rate_from_args(args: argparse.Namespace) -> None:
params = Params.from_file(args.param_path, args.overrides)
find_learning_rate_model(params, args.serialization_dir,
start_lr=args.start_lr,
end_lr=args.end_lr,
num_batches=args.num_batches,
linear_steps=args.linear,
stopping_factor=args.stopping_factor,
force=args.force) | Start learning rate finder for given args |
def tamper_file_at(path, pos=0, replace_str=None):
if not replace_str:
replace_str = "\x00"
try:
with open(path, "r+b") as fh:
if pos < 0:
fsize = os.fstat(fh.fileno()).st_size
pos = fsize + pos
fh.seek(pos)
fh.write(replace_str)
except IOError:
return False
finally:
try:
fh.close()
except Exception:
pass
return True | Tamper a file at the given position and using the given string |
def stack_files(files, hemi, source, target):
import csv
import os
import numpy as np
fname = "sdist_%s_%s_%s.csv" % (hemi, source, target)
filename = os.path.join(os.getcwd(),fname)
alldist = []
for dfile in files:
alldist.append(np.genfromtxt(dfile, delimiter=','))
alldist = np.array(alldist)
alldist.tofile(filename,",")
return filename | This function takes a list of files as input and vstacks them |
def lint():
path = os.path.realpath(os.getcwd())
cmd = 'flake8 %s' % path
opt = ''
print(">>> Linting codebase with the following command: %s %s" % (cmd, opt))
try:
return_code = call([cmd, opt], shell=True)
if return_code < 0:
print(">>> Terminated by signal", -return_code, file=sys.stderr)
elif return_code != 0:
sys.exit('>>> Lint checks failed')
else:
print(">>> Lint checks passed", return_code, file=sys.stderr)
except OSError as e:
print(">>> Execution failed:", e, file=sys.stderr) | run linter on our code base. |
def load_scripts():
scrypture_dir = os.path.realpath(
os.path.abspath(
os.path.split(
inspect.getfile( inspect.currentframe() ))[0]))
if scrypture_dir not in sys.path:
sys.path.insert(0, scrypture_dir)
registered_scripts = app.config['REGISTERED_SCRIPTS']
for script in registered_scripts:
try:
s = import_module('.'+script,
package=os.path.split(app.config['SCRIPTS_DIR'])[-1])
s.package = s.__name__.split('.')[1]
script_name = script.split('.')[-1]
registered_modules[script_name] = s
except Exception as e:
logging.warning('Could not import ' + \
str(script)+': '+str(e.message))
logging.debug(str(traceback.format_exc()))
continue | Import all of the modules named in REGISTERED_SCRIPTS |
def max_pool(x_input, pool_size):
return tf.nn.max_pool(x_input, ksize=[1, pool_size, pool_size, 1],
strides=[1, pool_size, pool_size, 1], padding='SAME') | max_pool downsamples a feature map by 2X. |
def draw(self):
if not self.vao:
self.vao = VAO(indices=self.array_indices)
self._fill_vao()
if self.visible:
if self.dynamic:
for vbo in self.vbos:
vbo._buffer_subdata()
if self.drawmode == gl.GL_POINTS:
gl.glPointSize(self.point_size)
for texture in self.textures:
texture.bind()
with self.vao as vao:
self.uniforms.send()
vao.draw(mode=self.drawmode)
for texture in self.textures:
texture.unbind() | Draw the Mesh if it's visible, from the perspective of the camera and lit by the light. The function sends the uniforms |
def makeGly(segID, N, CA, C, O, geo):
res= Residue((' ', segID, ' '), "GLY", ' ')
res.add(N)
res.add(CA)
res.add(C)
res.add(O)
return res | Creates a Glycine residue |
def play(self, target=None, index=None, choose=None):
if choose:
if self.must_choose_one:
choose = card = self.choose_cards.filter(id=choose)[0]
self.log("%r: choosing %r", self, choose)
else:
raise InvalidAction("%r cannot be played with choice %r" % (self, choose))
else:
if self.must_choose_one:
raise InvalidAction("%r requires a choice (one of %r)" % (self, self.choose_cards))
card = self
if not self.is_playable():
raise InvalidAction("%r isn't playable." % (self))
if card.requires_target():
if not target:
raise InvalidAction("%r requires a target to play." % (self))
elif target not in self.play_targets:
raise InvalidAction("%r is not a valid target for %r." % (target, self))
elif target:
self.logger.warning("%r does not require a target, ignoring target %r", self, target)
self.game.play_card(self, target, index, choose)
return self | Queue a Play action on the card. |
def compute_file_hashes(file_path, hashes=frozenset(['md5'])):
if not os.path.exists(file_path):
logging.warning("%s does not exist" % file_path)
return
else:
logging.debug("Computing [%s] hashes for file [%s]" % (','.join(hashes), file_path))
try:
with open(file_path, 'rb') as fd:
return compute_hashes(fd, hashes)
except (IOError, OSError) as e:
logging.warning("Error while calculating digest(s) for file %s: %s" % (file_path, str(e)))
raise | Digests data read from file denoted by file_path. |
def all_names(self):
return [
_get_variable_names(arr) if not isinstance(arr, ArrayList) else
arr.all_names
for arr in self] | The variable names for each of the arrays in this list |
def describe_snitch(self, ):
self._seqid += 1
d = self._reqs[self._seqid] = defer.Deferred()
self.send_describe_snitch()
return d | returns the snitch used by this cluster |
def run_job(self, name):
data = self._get_schedule().get(name, {})
if 'function' in data:
func = data['function']
elif 'func' in data:
func = data['func']
elif 'fun' in data:
func = data['fun']
else:
func = None
if not isinstance(func, list):
func = [func]
for _func in func:
if _func not in self.functions:
log.error(
'Invalid function: %s in scheduled job %s.',
_func, name
)
if 'name' not in data:
data['name'] = name
log.info('Running Job: %s', name)
run = data.get('run', True)
if run:
self._run_job(_func, data) | Run a schedule job now |
def _fullqualname_method_py3(obj):
if inspect.isclass(obj.__self__):
cls = obj.__self__.__qualname__
else:
cls = obj.__self__.__class__.__qualname__
return obj.__self__.__module__ + '.' + cls + '.' + obj.__name__ | Fully qualified name for 'method' objects in Python 3. |
async def log(
self,
date: datetime.date = None,
days: int = None,
details: bool = False) -> list:
endpoint = 'watering/log'
if details:
endpoint += '/details'
if date and days:
endpoint = '{0}/{1}/{2}'.format(
endpoint, date.strftime('%Y-%m-%d'), days)
data = await self._request('get', endpoint)
return data['waterLog']['days'] | Get watering information for X days from Y date. |
def read_cluster(data, id=1):
cl = cluster(1)
names = [s.keys()[0] for s in data['seqs']]
cl.add_id_member(names, 1)
freq = defaultdict()
[freq.update({s.keys()[0]: s.values()[0]}) for s in data['freq']] | Read json cluster and populate as cluster class |
def _is_inline(button):
return isinstance(button, (
types.KeyboardButtonCallback,
types.KeyboardButtonSwitchInline,
types.KeyboardButtonUrl
)) | Returns ``True`` if the button belongs to an inline keyboard. |
def ddl(self, dialect=None, creates=True, drops=True):
dialect = self._dialect(dialect)
creator = CreateTable(self.table).compile(mock_engines[dialect])
creator = "\n".join(l for l in str(creator).splitlines() if l.strip())
comments = "\n\n".join(self._comment_wrapper.fill("in %s: %s" %
(col, self.comments[col]))
for col in self.comments)
result = []
if drops:
result.append(self._dropper(dialect) + ';')
if creates:
result.append("%s;\n%s" % (creator, comments))
for child in self.children.values():
result.append(child.ddl(dialect=dialect, creates=creates,
drops=drops))
return '\n\n'.join(result) | Returns SQL to define the table. |
def export_gpg_key(key):
cmd = flatten([gnupg_bin(), gnupg_verbose(), gnupg_home(),
"--export", key])
handle, gpg_stderr = stderr_handle()
try:
gpg_proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=gpg_stderr)
output, _err = gpg_proc.communicate()
if handle:
handle.close()
return portable_b64encode(output)
except subprocess.CalledProcessError as exception:
LOGGER.debug("GPG Command %s", ' '.join(exception.cmd))
LOGGER.debug("GPG Output %s", exception.output)
raise CryptoritoError('GPG encryption error') | Exports a GPG key and returns it |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.