code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def get_status(self, instance):
status_key, status = self._get_status(instance)
if status['state'] in ['complete', 'error']:
cache.delete(status_key)
return status | Retrives a status of a field from cache. Fields in state 'error' and
'complete' will not retain the status after the call. |
def get_timestamp_expression(self, time_grain):
label = utils.DTTM_ALIAS
db = self.table.database
pdf = self.python_date_format
is_epoch = pdf in ('epoch_s', 'epoch_ms')
if not self.expression and not time_grain and not is_epoch:
sqla_col = column(self.column_name, ty... | Getting the time component of the query |
def expire_hit(self, hit_id):
try:
self.mturk.update_expiration_for_hit(HITId=hit_id, ExpireAt=0)
except Exception as ex:
raise MTurkServiceException(
"Failed to expire HIT {}: {}".format(hit_id, str(ex))
)
return True | Expire a HIT, which will change its status to "Reviewable",
allowing it to be deleted. |
def plot(self, plot_grouped=False):
cumulative_detections(
detections=self.detections, plot_grouped=plot_grouped) | Plot the cumulative number of detections in time.
.. rubric:: Example
>>> family = Family(
... template=Template(name='a'), detections=[
... Detection(template_name='a', detect_time=UTCDateTime(0) + 200,
... no_chans=8, detect_val=4.2, threshold=1.2,
... |
def dist_sq(self, other=None):
v = self - other if other else self
return sum(map(lambda a: a * a, v)) | For fast length comparison |
def _add_ce_record(self, curr_dr_len, thislen):
if self.dr_entries.ce_record is None:
self.dr_entries.ce_record = RRCERecord()
self.dr_entries.ce_record.new()
curr_dr_len += RRCERecord.length()
self.dr_entries.ce_record.add_record(thislen)
return curr_dr_len | An internal method to add a new length to a Continuation Entry. If the
Continuation Entry does not yet exist, this method creates it.
Parameters:
curr_dr_len - The current Directory Record length.
thislen - The new length to add to the Continuation Entry.
Returns:
An... |
def _find_image_id(self, image_id):
if not self._images:
connection = self._connect()
self._images = connection.get_all_images()
image_id_cloud = None
for i in self._images:
if i.id == image_id or i.name == image_id:
image_id_cloud = i.id
... | Finds an image id to a given id or name.
:param str image_id: name or id of image
:return: str - identifier of image |
def new_from_url(cls, url, verify=True):
response = requests.get(url, verify=verify, timeout=2.5)
return cls.new_from_response(response) | Constructs a new WebPage object for the URL,
using the `requests` module to fetch the HTML.
Parameters
----------
url : str
verify: bool |
def set_margins(self, top=None, bottom=None):
if (top is None or top == 0) and bottom is None:
self.margins = None
return
margins = self.margins or Margins(0, self.lines - 1)
if top is None:
top = margins.top
else:
top = max(0, min(top - 1,... | Select top and bottom margins for the scrolling region.
:param int top: the smallest line number that is scrolled.
:param int bottom: the biggest line number that is scrolled. |
def _AbortJoin(self, timeout=None):
for pid, process in iter(self._processes_per_pid.items()):
logger.debug('Waiting for process: {0:s} (PID: {1:d}).'.format(
process.name, pid))
process.join(timeout=timeout)
if not process.is_alive():
logger.debug('Process {0:s} (PID: {1:d}) sto... | Aborts all registered processes by joining with the parent process.
Args:
timeout (int): number of seconds to wait for processes to join, where
None represents no timeout. |
def _clone(self):
cloned_self = self.__class__(
*self.flat_path, project=self.project, namespace=self.namespace
)
cloned_self._parent = self._parent
return cloned_self | Duplicates the Key.
Most attributes are simple types, so don't require copying. Other
attributes like ``parent`` are long-lived and so we re-use them.
:rtype: :class:`google.cloud.datastore.key.Key`
:returns: A new ``Key`` instance with the same data as the current one. |
def set_ownership(self):
assert self.section is not None
for t in self.children:
t.parent = self
t._section = self.section
t.doc = self.doc
t.set_ownership() | Recursivelt set the parent, section and doc for a children |
def get_version_info():
from astropy import __version__
astropy_version = __version__
from photutils import __version__
photutils_version = __version__
return 'astropy: {0}, photutils: {1}'.format(astropy_version,
photutils_version) | Return astropy and photutils versions.
Returns
-------
result : str
The astropy and photutils versions. |
def is_rotation(self, other):
if len(self) != len(other):
return False
for i in range(len(self)):
if self.rotate(i) == other:
return True
return False | Determine whether two sequences are the same, just at different
rotations.
:param other: The sequence to check for rotational equality.
:type other: coral.sequence._sequence.Sequence |
def evaluate_binop_comparison(self, operation, left, right, **kwargs):
if not operation in self.binops_comparison:
raise ValueError("Invalid comparison binary operation '{}'".format(operation))
if left is None or right is None:
return None
if not isinstance(left, (list, L... | Evaluate given comparison binary operation with given operands. |
def is_locked(self, key):
check_not_none(key, "key can't be None")
key_data = self._to_data(key)
return self._encode_invoke_on_key(map_is_locked_codec, key_data, key=key_data) | Checks the lock for the specified key. If the lock is acquired, it returns ``true``. Otherwise, it returns ``false``.
**Warning: This method uses __hash__ and __eq__ methods of binary form of the key, not the actual implementations
of __hash__ and __eq__ defined in key's class.**
:param key: (... |
def _read_config(self):
try:
self.config = self.componentmodel.find_one(
{'name': self.uniquename})
except ServerSelectionTimeoutError:
self.log("No database access! Check if mongodb is running "
"correctly.", lvl=critical)
if self.con... | Read this component's configuration from the database |
def repr2_json(obj_, **kwargs):
import utool as ut
kwargs['trailing_sep'] = False
json_str = ut.repr2(obj_, **kwargs)
json_str = str(json_str.replace('\'', '"'))
json_str = json_str.replace('(', '[')
json_str = json_str.replace(')', ']')
json_str = json_str.replace('None', 'null')
return... | hack for json reprs |
def rejoin(self, group_id):
url = utils.urljoin(self.url, 'join')
payload = {'group_id': group_id}
response = self.session.post(url, json=payload)
return Group(self, **response.data) | Rejoin a former group.
:param str group_id: the group_id of a group
:return: the group
:rtype: :class:`~groupy.api.groups.Group` |
def _repeat_length(cls, part):
repeat_len = len(part)
if repeat_len == 0:
return repeat_len
first_digit = part[0]
limit = repeat_len // 2 + 1
indices = (i for i in range(1, limit) if part[i] == first_digit)
for index in indices:
(quot, rem) = divmo... | The length of the repeated portions of ``part``.
:param part: a number
:type part: list of int
:returns: the first index at which part repeats
:rtype: int
If part does not repeat, result is the length of part.
Complexity: O(len(part)^2) |
def _updateTargetFromNode(self):
if not self.autoRangeCti or not self.autoRangeCti.configValue:
padding = 0
elif self.paddingCti.configValue == -1:
padding = None
else:
padding = self.paddingCti.configValue / 100
targetRange = self.calculateRange()
... | Applies the configuration to the target axis. |
def get_archive_name(self):
name = self.get_local_name().split('.')[0]
case = self.case_id
label = self.commons['cmdlineopts'].label
date = ''
rand = ''.join(random.choice(string.ascii_lowercase) for x in range(7))
if self.name_pattern == 'legacy':
nstr = "sos... | This function should return the filename of the archive without the
extension.
This uses the policy's name_pattern attribute to determine the name.
There are two pre-defined naming patterns - 'legacy' and 'friendly'
that give names like the following:
legacy - 'sosreport-tux.12... |
def get_health(self, consumers=2, messages=100):
data = {'consumers': consumers, 'messages': messages}
try:
self._request('GET', '/health', data=json.dumps(data))
return True
except SensuAPIException:
return False | Returns health information on transport & Redis connections. |
def _sanitizer(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
if hasattr(obj, "to_dict"):
return obj.to_dict()
return obj | Sanitizer method that will be passed to json.dumps. |
def baseimage(self, new_image):
images = self.parent_images or [None]
images[-1] = new_image
self.parent_images = images | change image of final stage FROM instruction |
def lock(self) -> asyncio.Lock:
if self.lock_key not in self.request.custom_content:
self.request.custom_content[self.lock_key] = asyncio.Lock()
return self.request.custom_content[self.lock_key] | Return and generate if required the lock for this request. |
def hacking_no_author_tags(physical_line):
for regex in AUTHOR_TAG_RE:
if regex.match(physical_line):
physical_line = physical_line.lower()
pos = physical_line.find('moduleauthor')
if pos < 0:
pos = physical_line.find('author')
return (pos, "H1... | Check that no author tags are used.
H105 don't use author tags |
def stop(self) -> None:
if self._stopped:
return
self._stopped = True
for fd, sock in self._sockets.items():
assert sock.fileno() == fd
self._handlers.pop(fd)()
sock.close() | Stops listening for new connections.
Requests currently in progress may still continue after the
server is stopped. |
def filter_roidb(self):
num_roidb = len(self._roidb)
self._roidb = [roi_rec for roi_rec in self._roidb if len(roi_rec['gt_classes'])]
num_after = len(self._roidb)
logger.info('filter roidb: {} -> {}'.format(num_roidb, num_after)) | Remove images without usable rois |
def get_assessment_bank_assignment_session(self, proxy):
if not self.supports_assessment_bank_assignment():
raise errors.Unimplemented()
return sessions.AssessmentBankAssignmentSession(proxy=proxy, runtime=self._runtime) | Gets the ``OsidSession`` associated with the assessment bank assignment service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentBankAssignmentSession) - an
``AssessmentBankAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
rais... |
def length(cls, dataset):
return np.product([len(d.points) for d in dataset.data.coords(dim_coords=True)], dtype=np.intp) | Returns the total number of samples in the dataset. |
def _filter_seqs(fn):
out_file = op.splitext(fn)[0] + "_unique.fa"
idx = 0
if not file_exists(out_file):
with open(out_file, 'w') as out_handle:
with open(fn) as in_handle:
for line in in_handle:
if line.startswith("@") or line.startswith(">"):
... | Convert names of sequences to unique ids |
def save(self, *args, **kwargs):
self.type = INTERFACE_TYPES.get('ethernet')
super(Ethernet, self).save(*args, **kwargs) | automatically set Interface.type to ethernet |
def observed(cls, _func):
def wrapper(*args, **kwargs):
self = args[0]
assert(isinstance(self, Observable))
self._notify_method_before(self, _func.__name__, args, kwargs)
res = _func(*args, **kwargs)
self._notify_method_after(self, _func.__name__, res,... | Decorate methods to be observable. If they are called on an instance
stored in a property, the model will emit before and after
notifications. |
def table(self):
if self._table is None:
column_names = []
for fileid in self.header.file_ids:
for column_name in self.header.column_names:
column_names.append("{}_{}".format(column_name, fileid))
column_names.append("ZP_{}".format(file... | The astropy.table.Table object that will contain the data result
@rtype: Table
@return: data table |
def list_nodes_min(conn=None, call=None):
if call == 'action':
raise SaltCloudSystemExit(
'The list_nodes_min function must be called with -f or --function.'
)
if conn is None:
conn = get_conn()
ret = {}
for node in conn.list_servers(bare=True):
ret[node.name]... | Return a list of VMs with minimal information
CLI Example
.. code-block:: bash
salt-cloud -f list_nodes_min myopenstack |
def import_certificate(self, certificate_data, bay_number=None):
uri = "{}/https/certificaterequest".format(self.data['uri'])
if bay_number:
uri += "?bayNumber=%d" % (bay_number)
headers = {'Content-Type': 'application/json'}
return self._helper.do_put(uri, certificate_data, ... | Imports a signed server certificate into the enclosure.
Args:
certificate_data: Dictionary with Signed certificate and type.
bay_number: OA to which the signed certificate will be imported.
Returns:
Enclosure. |
def move_up(self):
old_index = self.current_index
self.current_index -= 1
self.__wrap_index()
self.__handle_selections(old_index, self.current_index) | Try to select the button above the currently selected one.
If a button is not there, wrap down to the bottom of the menu and select the last button. |
def send_audio(chat_id, audio,
caption=None, duration=None, performer=None, title=None, reply_to_message_id=None, reply_markup=None,
disable_notification=False, parse_mode=None, **kwargs):
files = None
if isinstance(audio, InputFile):
files = [audio]
audio = None
... | Use this method to send audio files, if you want Telegram clients to display them in the music player.
Your audio must be in the .mp3 format. On success, the sent Message is returned. Bots can currently send audio
files of up to 50 MB in size, this limit may be changed in the future.
For backward compatib... |
def create_stream(self, uidList=[]):
req_hook = 'pod/v1/im/create'
req_args = json.dumps(uidList)
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | create a stream |
def get_batch_unlock(
end_state: NettingChannelEndState,
) -> Optional[MerkleTreeLeaves]:
if len(end_state.merkletree.layers[LEAVES]) == 0:
return None
lockhashes_to_locks = dict()
lockhashes_to_locks.update({
lock.lockhash: lock
for secrethash, lock in end_state.secrethashes... | Unlock proof for an entire merkle tree of pending locks
The unlock proof contains all the merkle tree data, tightly packed, needed by the token
network contract to verify the secret expiry and calculate the token amounts to transfer. |
def replace(self, year=None, month=None, day=None, hour=None, minute=None,
second=None, microsecond=None, tzinfo=None):
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if hour is Non... | Returns a new datetime.datetime or asn1crypto.util.extended_datetime
object with the specified components replaced
:return:
A datetime.datetime or asn1crypto.util.extended_datetime object |
def execution_timer(value):
def _invoke(method, key_arg_position, *args, **kwargs):
start_time = time.time()
result = method(*args, **kwargs)
duration = time.time() - start_time
key = [method.func_name]
if key_arg_position is not None:
key.append(args[key_arg_posi... | The ``execution_timer`` decorator allows for easy instrumentation of
the duration of function calls, using the method name in the key.
The following example would add duration timing with the key ``my_function``
.. code: python
@statsd.execution_timer
def my_function(foo):
pas... |
def Conditional(self, i, j, val, name=''):
pmf = Pmf(name=name)
for vs, prob in self.Items():
if vs[j] != val: continue
pmf.Incr(vs[i], prob)
pmf.Normalize()
return pmf | Gets the conditional distribution of the indicated variable.
Distribution of vs[i], conditioned on vs[j] = val.
i: index of the variable we want
j: which variable is conditioned on
val: the value the jth variable has to have
Returns: Pmf |
def gist(self, id_num):
url = self._build_url('gists', str(id_num))
json = self._json(self._get(url), 200)
return Gist(json, self) if json else None | Gets the gist using the specified id number.
:param int id_num: (required), unique id of the gist
:returns: :class:`Gist <github3.gists.Gist>` |
def expand(self, other):
if not isinstance(other, Result):
raise ValueError("Provided argument has to be instance of overpy:Result()")
other_collection_map = {Node: other.nodes, Way: other.ways, Relation: other.relations, Area: other.areas}
for element_type, own_collection in self._c... | Add all elements from an other result to the list of elements of this result object.
It is used by the auto resolve feature.
:param other: Expand the result with the elements from this result.
:type other: overpy.Result
:raises ValueError: If provided parameter is not instance of :clas... |
def get_idxs(exprs):
idxs = set()
for expr in (exprs):
for i in expr.find(sympy.Idx):
idxs.add(i)
return sorted(idxs, key=str) | Finds sympy.tensor.indexed.Idx instances and returns them. |
def ossos_release_with_metadata():
discoveries = []
observations = ossos_discoveries()
for obj in observations:
discov = [n for n in obj[0].mpc_observations if n.discovery.is_discovery][0]
tno = parameters.tno()
tno.dist = obj[1].distance
tno.ra_discov = discov.coordinate.ra.... | Wrap the objects from the Version Releases together with the objects instantiated from fitting their mpc lines |
def delete_object(self, obj, view_kwargs):
if obj is None:
url_field = getattr(self, 'url_field', 'id')
filter_value = view_kwargs[url_field]
raise ObjectNotFound('{}: {} not found'.format(self.model.__name__, filter_value),
source={'parameter... | Delete an object through sqlalchemy
:param DeclarativeMeta item: an item from sqlalchemy
:param dict view_kwargs: kwargs from the resource view |
def log(self, message, level=None):
level = _STORM_LOG_LEVELS.get(level, _STORM_LOG_INFO)
self.send_message({"command": "log", "msg": str(message), "level": level}) | Log a message to Storm optionally providing a logging level.
:param message: the log message to send to Storm.
:type message: str
:param level: the logging level that Storm should use when writing the
``message``. Can be one of: trace, debug, info, warn, or
... |
def delete_job(self, id, jobstore=None):
warnings.warn('delete_job has been deprecated, use remove_job instead.', DeprecationWarning)
self.remove_job(id, jobstore) | DEPRECATED, use remove_job instead.
Remove a job, preventing it from being run any more.
:param str id: the identifier of the job
:param str jobstore: alias of the job store that contains the job |
def compute_tls13_traffic_secrets(self):
hkdf = self.prcs.hkdf
self.tls13_master_secret = hkdf.extract(self.tls13_handshake_secret,
None)
cts0 = hkdf.derive_secret(self.tls13_master_secret,
b"client application tra... | Ciphers key and IV are updated accordingly for Application data.
self.handshake_messages should be ClientHello...ServerFinished. |
def get_authorization_url(self):
return self._format_url(
OAUTH2_ROOT + 'authorize',
query = {
'response_type': 'code',
'client_id': self.client.get('client_id', ''),
'redirect_uri': self.client.get('redirect_uri', '')
}) | Get the authorization Url for the current client. |
def _activate_texture(mesh, name):
if name == True or isinstance(name, int):
keys = list(mesh.textures.keys())
idx = 0 if not isinstance(name, int) or name == True else name
if idx > len(keys):
idx = 0
try:
name = keys[idx]
... | Grab a texture and update the active texture coordinates. This makes
sure to not destroy old texture coordinates
Parameters
----------
name : str
The name of the texture and texture coordinates to activate
Return
------
vtk.vtkTexture : The active te... |
def get_user_flagger():
user_klass = get_user_model()
try:
user = user_klass.objects.get(pk=COMMENT_FLAG_USER_ID)
except user_klass.DoesNotExist:
try:
user = user_klass.objects.get(
**{user_klass.USERNAME_FIELD: FLAGGER_USERNAME})
except user_klass.DoesNot... | Return an User instance used by the system
when flagging a comment as trackback or pingback. |
def get_by_location(cls, location, include_deactivated=False):
if include_deactivated:
view = views.service_location
else:
view = views.active_service_location
result = yield view.first(key=location, include_docs=True)
parent = cls.parent_resource(**result['doc'])... | Get a service by it's location |
def get_quoted_columns(self, platform):
columns = []
for column in self._columns.values():
columns.append(column.get_quoted_name(platform))
return columns | Returns the quoted representation of the column names
the constraint is associated with.
But only if they were defined with one or a column name
is a keyword reserved by the platform.
Otherwise the plain unquoted value as inserted is returned.
:param platform: The platform to u... |
def view(location, browser=None, new="same", autoraise=True):
try:
new = { "same": 0, "window": 1, "tab": 2 }[new]
except KeyError:
raise RuntimeError("invalid 'new' value passed to view: %r, valid values are: 'same', 'window', or 'tab'" % new)
if location.startswith("htt... | Open a browser to view the specified location.
Args:
location (str) : Location to open
If location does not begin with "http:" it is assumed
to be a file path on the local filesystem.
browser (str or None) : what browser to use (default: None)
... |
def _init_typedef(self, typedef_curr, name, lnum):
if typedef_curr is None:
return TypeDef()
msg = "PREVIOUS {REC} WAS NOT TERMINATED AS EXPECTED".format(REC=name)
self._die(msg, lnum) | Initialize new typedef and perform checks. |
def tagsOf(self, obj):
return self.store.query(
Tag,
AND(Tag.catalog == self,
Tag.object == obj)).getColumn("name") | Return an iterator of unicode strings - the tag names which apply to
the given object. |
def benchmark():
pool_size = multiprocessing.cpu_count() - 1
if pool_size < 1:
pool_size = 1
pool = multiprocessing.Pool(processes=pool_size, maxtasksperchild=1)
results = pool.imap_unordered(run_scenario, Benchmark.scenarii)
pool.close()
pool.join()
benchmark = Benchmark()
bench... | Run a benchmarking suite and measure time taken by the solver.
Each scenario is run in an isolated process, and results are appended to
CSV file. |
def forward(self, input_tensor):
ones = input_tensor.data.new_ones(input_tensor.shape[0], input_tensor.shape[-1])
dropout_mask = torch.nn.functional.dropout(ones, self.p, self.training, inplace=False)
if self.inplace:
input_tensor *= dropout_mask.unsqueeze(1)
return None
... | Apply dropout to input tensor.
Parameters
----------
input_tensor: ``torch.FloatTensor``
A tensor of shape ``(batch_size, num_timesteps, embedding_dim)``
Returns
-------
output: ``torch.FloatTensor``
A tensor of shape ``(batch_size, num_timesteps... |
def is_type_I_branch(u, v, dfs_data):
if u != a(v, dfs_data):
return False
if u == L2(v, dfs_data):
return True
return False | Determines whether a branch uv is a type I branch. |
def extract_name_max_chars(name, max_chars=64, blank=" "):
new_name = name.strip()
if len(new_name) > max_chars:
new_name = new_name[:max_chars]
if new_name.rfind(blank) > 0:
new_name = new_name[:new_name.rfind(blank)]
return new_name | Extracts max chars in name truncated to nearest word
:param name: path to edit
:param max_chars: max chars of new name
:param blank: char that represents the blank between words
:return: Name edited to contain at most max_chars |
def tridi_inverse_iteration(d, e, w, x0=None, rtol=1e-8):
eig_diag = d - w
if x0 is None:
x0 = np.random.randn(len(d))
x_prev = np.zeros_like(x0)
norm_x = np.linalg.norm(x0)
x0 /= norm_x
while np.linalg.norm(np.abs(x0) - np.abs(x_prev)) > rtol:
x_prev = x0.copy()
tridisol... | Perform an inverse iteration to find the eigenvector corresponding
to the given eigenvalue in a symmetric tridiagonal system.
Parameters
----------
d : ndarray
main diagonal of the tridiagonal system
e : ndarray
offdiagonal stored in e[:-1]
w : float
eigenvalue of the eigenve... |
def process_temporary_file(self, tmp_file):
if len(tmp_file.filename) > 100:
base_filename = tmp_file.filename[:tmp_file.filename.rfind(".")]
tmp_file.filename = "%s.%s" % (base_filename[:99-len(tmp_file.extension)], tmp_file.extension)
tmp_file.save()
data = {
... | Truncates the filename if necessary, saves the model, and returns a response |
def overlay_depth(obj):
if isinstance(obj, DynamicMap):
if isinstance(obj.last, CompositeOverlay):
return len(obj.last)
elif obj.last is None:
return None
return 1
else:
return 1 | Computes the depth of a DynamicMap overlay if it can be determined
otherwise return None. |
def encode_categorical(table, columns=None, **kwargs):
if isinstance(table, pandas.Series):
if not is_categorical_dtype(table.dtype) and not table.dtype.char == "O":
raise TypeError("series must be of categorical dtype, but was {}".format(table.dtype))
return _encode_categorical_series(t... | Encode categorical columns with `M` categories into `M-1` columns according
to the one-hot scheme.
Parameters
----------
table : pandas.DataFrame
Table with categorical columns to encode.
columns : list-like, optional, default: None
Column names in the DataFrame to be encoded.
... |
def example_bigbeds():
hits = []
d = data_dir()
for fn in os.listdir(d):
fn = os.path.join(d, fn)
if os.path.splitext(fn)[-1] == '.bigBed':
hits.append(os.path.abspath(fn))
return hits | Returns list of example bigBed files |
def _file_model_from_path(self, path, content=False, format=None):
model = base_model(path)
model["type"] = "file"
if self.fs.isfile(path):
model["last_modified"] = model["created"] = self.fs.lstat(path)["ST_MTIME"]
else:
model["last_modified"] = model["created"] ... | Build a file model from database record. |
def cool_paginate(context, **kwargs) -> dict:
names = (
'size',
'next_name',
'previous_name',
'elastic',
'page_obj',
)
return_dict = {name: value for name, value in zip(names, map(kwargs.get, names))}
if context.get('request'):
return_dict['request'] = con... | Main function for pagination process. |
def autocommit(f):
"A decorator to commit to the storage if autocommit is set to True."
@wraps(f)
def wrapper(self, *args, **kwargs):
result = f(self, *args, **kwargs)
if self._meta.commit_ready():
self.commit()
return result
return wrapper | A decorator to commit to the storage if autocommit is set to True. |
def check_nonstandard_section_name(self):
std_sections = ['.text', '.bss', '.rdata', '.data', '.rsrc', '.edata', '.idata',
'.pdata', '.debug', '.reloc', '.stab', '.stabstr', '.tls',
'.crt', '.gnu_deb', '.eh_fram', '.exptbl', '.rodata']
for i in range(200):... | Checking for an non-standard section name |
def stderr_with_input(cmd, stdin):
handle, gpg_stderr = stderr_handle()
LOGGER.debug("GPG command %s", ' '.join(cmd))
try:
gpg_proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
... | Runs a command, passing something in stdin, and returning
whatever came out from stdout |
def resolve_all(self, import_items):
for import_item in import_items:
try:
yield self.resolve_import(import_item)
except ImportException as err:
logging.info('unknown module %s', err.module_name) | Resolves a list of imports.
Yields filenames. |
def transfers_complete(self):
for transfer in self.transfers:
if not transfer.is_complete:
error = {
'errorcode': 4003,
'errormessage': 'You must complete transfer before logout.'
}
hellraiser(error) | Check if all transfers are completed. |
def kill(self, dwExitCode = 0):
hThread = self.get_handle(win32.THREAD_TERMINATE)
win32.TerminateThread(hThread, dwExitCode)
if self.pInjectedMemory is not None:
try:
self.get_process().free(self.pInjectedMemory)
self.pInjectedMemory = None
... | Terminates the thread execution.
@note: If the C{lpInjectedMemory} member contains a valid pointer,
the memory is freed.
@type dwExitCode: int
@param dwExitCode: (Optional) Thread exit code. |
def prune(self, whole=False, keys=[], names=[], filters=[]):
for node in self.climb(whole):
if not all([key in node.data for key in keys]):
continue
if names and not any(
[re.search(name, node.name) for name in names]):
continue
... | Filter tree nodes based on given criteria |
def transformer_image_decoder(targets,
encoder_output,
ed_attention_bias,
hparams,
name=None):
with tf.variable_scope(name, default_name="transformer_dec"):
batch_size = common_layers.shape_list... | Transformer image decoder over targets with local attention.
Args:
targets: Tensor of shape [batch, ...], and whose size is batch * height *
width * hparams.num_channels * hparams.hidden_size.
encoder_output: Tensor of shape [batch, length_kv, hparams.hidden_size].
ed_attention_bias: Tensor which b... |
def s_supply(self, bus):
Sg = array([complex(g.p, g.q) for g in self.generators if
(g.bus == bus) and not g.is_load], dtype=complex64)
if len(Sg):
return sum(Sg)
else:
return 0 + 0j | Returns the total complex power generation capacity. |
def from_string(contents):
lines = contents.split("\n")
num_sites = int(lines[0])
coords = []
sp = []
prop = []
coord_patt = re.compile(
r"(\w+)\s+([0-9\-\.]+)\s+([0-9\-\.]+)\s+([0-9\-\.]+)\s+" +
r"([0-9\-\.]+)"
)
for i in range(2, ... | Creates Zeo++ Voronoi XYZ object from a string.
from_string method of XYZ class is being redefined.
Args:
contents: String representing Zeo++ Voronoi XYZ file.
Returns:
ZeoVoronoiXYZ object |
def disconnect_network_gateway(self, gateway_id, body=None):
base_uri = self.network_gateway_path % gateway_id
return self.put("%s/disconnect_network" % base_uri, body=body) | Disconnect a network from the specified gateway. |
def process_data_config_section(config, data_config):
if 'connectors' in data_config:
for connector in data_config['connectors']:
config.data['connectors'][
connector['name']] = get_config_from_package(
connector['class'])
if 'sources' in data_config:
... | Processes the data configuration section from the configuration
data dict.
:param config: The config reference of the object that will hold the
configuration data from the config_data.
:param data_config: Data configuration section from a config data dict. |
def set_status(self, status):
if self._json_state['control_url']:
url = CONST.BASE_URL + self._json_state['control_url']
status_data = {
'status': str(status)
}
response = self._abode.send_request(
method="put", url=url, data=status... | Set device status. |
def statistics(self, start=None, end=None, namespace=None):
return self.make_context(start=start, end=end,
namespace=namespace).statistics() | Get write statistics for the specified namespace and date range |
def main():
description = 'Letter - a commandline interface'
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--gmail', action='store_true', help='Send via Gmail', )
args = parser.parse_args()
to = raw_input('To address > ')
subject = raw_input('Subject > ')
... | Do the things!
Return: 0
Exceptions: |
def _check_underflow(self, n):
if self._pos + n > self._end_pos:
raise self.BufferUnderflow() | Raise BufferUnderflow if there's not enough bytes to satisfy
the request. |
def create_folder_structure(self):
self.info_file, directories = create_folder_structure(self.project,
self.name)
self.project_dir, self.batch_dir, self.raw_dir = directories
logger.debug("create folders:" + str(directories)) | Creates a folder structure based on the project and batch name.
Project - Batch-name - Raw-data-dir
The info_df JSON-file will be stored in the Project folder.
The summary-files will be saved in the Batch-name folder.
The raw data (including exported cycles and ica-data) will be saved ... |
def stop_artifact_creation(self, id_or_uri, task_uri):
data = {
"taskUri": task_uri
}
uri = self.URI + '/' + extract_id_from_uri(id_or_uri) + self.STOP_CREATION_PATH
return self._client.update(data, uri=uri) | Stops creation of the selected Artifact Bundle.
Args:
id_or_uri: ID or URI of the Artifact Bundle.
task_uri: Task URI associated with the Artifact Bundle.
Returns:
string: |
def exists(self, queue_name, timeout=None):
try:
self.get_queue_metadata(queue_name, timeout=timeout)
return True
except AzureHttpError as ex:
_dont_fail_not_exist(ex)
return False | Returns a boolean indicating whether the queue exists.
:param str queue_name:
The name of queue to check for existence.
:param int timeout:
The server timeout, expressed in seconds.
:return: A boolean indicating whether the queue exists.
:rtype: bool |
def get_municipalities(self):
return sorted(list(set([
location.municipality for location in self.get_locations().values()
]))) | Return the list of unique municipalities, sorted by name. |
def setpurpose(self, purpose):
if isinstance(purpose, str):
purp_no = libcrypto.X509_PURPOSE_get_by_sname(purpose)
if purp_no <= 0:
raise X509Error("Invalid certificate purpose '%s'" % purpose)
elif isinstance(purpose, int):
purp_no = purpose
i... | Sets certificate purpose which verified certificate should match
@param purpose - number from 1 to 9 or standard strind defined
in Openssl
possible strings - sslcient,sslserver, nssslserver, smimesign,i
smimeencrypt, crlsign, any, ocsphelper |
def _create(self, cache_file):
conn = sqlite3.connect(cache_file)
cur = conn.cursor()
cur.execute("PRAGMA foreign_keys = ON")
cur.execute(
)
cur.execute(
)
conn.commit()
conn.close() | Create the tables needed to store the information. |
def intersection(a, b, scale=1):
try:
a1, a2 = a
except TypeError:
a1 = a.start
a2 = a.stop
try:
b1, b2 = b
except TypeError:
b1 = b.start
b2 = b.stop
if a2 <= b1:
return None
if a1 >= b2:
return None
if a2 <= b2:
if a1 ... | Intersection between two segments. |
def get_calendar(self, name):
canonical_name = self.resolve_alias(name)
try:
return self._calendars[canonical_name]
except KeyError:
pass
try:
factory = self._calendar_factories[canonical_name]
except KeyError:
raise InvalidCalendar... | Retrieves an instance of an TradingCalendar whose name is given.
Parameters
----------
name : str
The name of the TradingCalendar to be retrieved.
Returns
-------
calendar : calendars.TradingCalendar
The desired calendar. |
def trsm(self,B,trans='N'):
r
if trans=='N':
cp.trsm(self._L0,B)
pftrsm(self._V,self._L,self._B,B,trans='N')
elif trans=='T':
pftrsm(self._V,self._L,self._B,B,trans='T')
cp.trsm(self._L0,B,trans='T')
elif type(trans) is str:
rai... | r"""
Solves a triangular system of equations with multiple righthand
sides. Computes
.. math::
B &:= L^{-1} B \text{ if trans is 'N'}
B &:= L^{-T} B \text{ if trans is 'T'} |
def symlink(self, source, dest):
dest = self._adjust_cwd(dest)
self._log(DEBUG, "symlink({!r}, {!r})".format(source, dest))
source = b(source)
self._request(CMD_SYMLINK, source, dest) | Create a symbolic link to the ``source`` path at ``destination``.
:param str source: path of the original file
:param str dest: path of the newly created symlink |
def parse_html(html):
paragraphs = re.split("</?p[^>]*>", html)
paragraphs = [re.split("<br */?>", p) for p in paragraphs if p]
return [[get_text(l) for l in p] for p in paragraphs] | Attempt to convert html to plain text while keeping line breaks.
Returns a list of paragraphs, each being a list of lines. |
def send_confirm_password_email(person):
url = '%s/profile/login/%s/' % (
settings.REGISTRATION_BASE_URL, person.username)
context = CONTEXT.copy()
context.update({
'url': url,
'receiver': person,
})
to_email = person.email
subject, body = render_email('confirm_password',... | Sends an email to user allowing them to confirm their password. |
def to_pandas_series_rdd(self):
pd_index = self.index().to_pandas_index()
return self.map(lambda x: (x[0], pd.Series(x[1], pd_index))) | Returns an RDD of Pandas Series objects indexed with Pandas DatetimeIndexes |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.