code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def _destroy(cls, cdata):
pp = ffi.new('{} **'.format(cls.LEPTONICA_TYPENAME), cdata)
cls.cdata_destroy(pp) | Destroy some cdata |
def augmented_dickey_fuller(x, param):
res = None
try:
res = adfuller(x)
except LinAlgError:
res = np.NaN, np.NaN, np.NaN
except ValueError:
res = np.NaN, np.NaN, np.NaN
except MissingDataError:
res = np.NaN, np.NaN, np.NaN
return [('attr_"{}"'.format(config["attr... | The Augmented Dickey-Fuller test is a hypothesis test which checks whether a unit root is present in a time
series sample. This feature calculator returns the value of the respective test statistic.
See the statsmodels implementation for references and more details.
:param x: the time series to calculate ... |
def move(self, u_function):
if self.mesh:
self.u = u_function
delta = [u_function(p) for p in self.mesh.coordinates()]
movedpts = self.mesh.coordinates() + delta
self.polydata(False).GetPoints().SetData(numpy_to_vtk(movedpts))
self.poly.GetPoints().Mod... | Move a mesh by using an external function which prescribes the displacement
at any point in space.
Useful for manipulating ``dolfin`` meshes. |
def render_profile_data(self, as_parsed):
try:
return deep_map(self._render_profile_data, as_parsed)
except RecursionException:
raise DbtProfileError(
'Cycle detected: Profile input has a reference to itself',
project=as_parsed
) | Render the chosen profile entry, as it was parsed. |
def add_cell_footer(self):
logging.info('Adding footer cell')
for cell in self.nb['cells']:
if cell.cell_type == 'markdown':
if 'pynb_footer_tag' in cell.source:
logging.debug('Footer cell already present')
return
m =
se... | Add footer cell |
def _getDecoratorsName(node):
decorators = []
if not node.decorators:
return decorators
for decorator in node.decorators.nodes:
decorators.append(decorator.as_string())
return decorators | Return a list with names of decorators attached to this node.
@param node: current node of pylint |
def complex_randn(*args):
return np.random.randn(*args) + 1j*np.random.randn(*args) | Return a complex array of samples drawn from a standard normal
distribution.
Parameters
----------
d0, d1, ..., dn : int
Dimensions of the random array
Returns
-------
a : ndarray
Random array of shape (d0, d1, ..., dn) |
def add_callback(obj, callback, args=()):
callbacks = obj._callbacks
node = Node(callback, args)
if callbacks is None:
obj._callbacks = node
return node
if not isinstance(callbacks, dllist):
obj._callbacks = dllist()
obj._callbacks.insert(callbacks)
callbacks = ob... | Add a callback to an object. |
def register_token_getter(self, provider):
app = oauth.remote_apps[provider]
decorator = getattr(app, 'tokengetter')
def getter(token=None):
return self.token_getter(provider, token)
decorator(getter) | Register callback to retrieve token from session |
def get_property(self, index, doctype, name):
return self.indices[index][doctype].properties[name] | Returns a property of a given type
:return a mapped property |
def user_entry(entry_int, num_inst, command):
valid_entry = False
if not entry_int:
print("{}aborting{} - {} instance\n".
format(C_ERR, C_NORM, command))
sys.exit()
elif entry_int >= 1 and entry_int <= num_inst:
entry_idx = entry_int - 1
valid_entry = True
e... | Validate user entry and returns index and validity flag.
Processes the user entry and take the appropriate action: abort
if '0' entered, set validity flag and index is valid entry, else
return invalid index and the still unset validity flag.
Args:
entry_int (int): a number entered or 999 if a ... |
def uv_to_color(uv, image):
if image is None or uv is None:
return None
uv = np.asanyarray(uv, dtype=np.float64)
x = (uv[:, 0] * (image.width - 1))
y = ((1 - uv[:, 1]) * (image.height - 1))
x = x.round().astype(np.int64) % image.width
y = y.round().astype(np.int64) % image.height
col... | Get the color in a texture image.
Parameters
-------------
uv : (n, 2) float
UV coordinates on texture image
image : PIL.Image
Texture image
Returns
----------
colors : (n, 4) float
RGBA color at each of the UV coordinates |
def delete_objective_bank(self, objective_bank_id=None):
from dlkit.abstract_osid.id.primitives import Id as ABCId
if objective_bank_id is None:
raise NullArgument()
if not isinstance(objective_bank_id, ABCId):
raise InvalidArgument('argument type is not an osid Id')
... | Deletes an ObjectiveBank.
arg: objectiveBankId (osid.id.Id): the Id of the
ObjectiveBank to remove
raise: NotFound - objectiveBankId not found
raise: NullArgument - objectiveBankId is null
raise: OperationFailed - unable to complete request
raise: Permissi... |
def compile_regex_from_str(self, ft_str):
sequence = []
for m in re.finditer(r'\[([^]]+)\]', ft_str):
ft_mask = fts(m.group(1))
segs = self.all_segs_matching_fts(ft_mask)
sub_pat = '({})'.format('|'.join(segs))
sequence.append(sub_pat)
pattern = ''... | Given a string describing features masks for a sequence of segments,
return a regex matching the corresponding strings.
Args:
ft_str (str): feature masks, each enclosed in square brackets, in
which the features are delimited by any standard delimiter.
Returns:
... |
def itemgetters(*args):
f = itemgetter(*args)
def inner(l):
return [f(x) for x in l]
return inner | Get a handful of items from an iterable.
This is just map(itemgetter(...), iterable) with a list comprehension. |
def activate(self, user):
org_user = self.organization.add_user(user, **self.activation_kwargs())
self.invitee = user
self.save()
return org_user | Updates the `invitee` value and saves the instance
Provided as a way of extending the behavior.
Args:
user: the newly created user
Returns:
the linking organization user |
def RestrictFeedItemToAdGroup(client, feed_item, adgroup_id):
feed_item_target_service = client.GetService(
'FeedItemTargetService', 'v201809')
ad_group_target = {
'xsi_type': 'FeedItemAdGroupTarget',
'feedId': feed_item['feedId'],
'feedItemId': feed_item['feedItemId'],
'adGroupId': ad... | Restricts the feed item to an ad group.
Args:
client: an AdWordsClient instance.
feed_item: The feed item.
adgroup_id: The ad group ID. |
def build_transform(self):
cfg = self.cfg
if cfg.INPUT.TO_BGR255:
to_bgr_transform = T.Lambda(lambda x: x * 255)
else:
to_bgr_transform = T.Lambda(lambda x: x[[2, 1, 0]])
normalize_transform = T.Normalize(
mean=cfg.INPUT.PIXEL_MEAN, std=cfg.INPUT.PIXEL... | Creates a basic transformation that was used to train the models |
def log_print_request(method, url, query_params=None, headers=None, body=None):
log_msg = '\n>>>>>>>>>>>>>>>>>>>>> Request >>>>>>>>>>>>>>>>>>> \n'
log_msg += '\t> Method: %s\n' % method
log_msg += '\t> Url: %s\n' % url
if query_params is not None:
log_msg += '\t> Query params: {}\n'.format(str(q... | Log an HTTP request data in a user-friendly representation.
:param method: HTTP method
:param url: URL
:param query_params: Query parameters in the URL
:param headers: Headers (dict)
:param body: Body (raw body, string)
:return: None |
def mkpassword(length=16, chars=None, punctuation=None):
if chars is None:
chars = string.ascii_letters + string.digits
data = [random.choice(chars) for _ in range(length)]
if punctuation:
data = data[:-punctuation]
for _ in range(punctuation):
data.append(random.choice(P... | Generates a random ascii string - useful to generate authinfos
:param length: string wanted length
:type length: ``int``
:param chars: character population,
defaults to alphabet (lower & upper) + numbers
:type chars: ``str``, ``list``, ``set`` (sequence)
:param punctuation: numb... |
def stop_global_driver(force=False):
address, pid = _read_driver()
if address is None:
return
if not force:
try:
Client(address=address)
except ConnectionError:
if pid_exists(pid):
raise
try:
... | Stops the global driver if running.
No-op if no global driver is running.
Parameters
----------
force : bool, optional
By default skein will check that the process associated with the
driver PID is actually a skein driver. Setting ``force`` to
``True... |
def checkIfRemoteIsNewer(self, localfile, remote_size, remote_modify):
is_remote_newer = False
status = os.stat(localfile)
LOG.info(
"\nLocal file size: %i"
"\nLocal Timestamp: %s",
status[ST_SIZE], datetime.fromtimestamp(status.st_mtime))
remote_dt = ... | Overrides checkIfRemoteIsNewer in Source class
:param localfile: str file path
:param remote_size: str bytes
:param remote_modify: str last modify date in the form 20160705042714
:return: boolean True if remote file is newer else False |
def _create_inbound_thread(self):
inbound_thread = threading.Thread(target=self._process_incoming_data,
name=__name__)
inbound_thread.daemon = True
inbound_thread.start()
return inbound_thread | Internal Thread that handles all incoming traffic.
:rtype: threading.Thread |
def load(self, format=None, *, kwargs={}):
return load(self, format=format, kwargs=kwargs) | deserialize object from the file.
auto detect format by file extension name if `format` is None.
for example, `.json` will detect as `json`.
* raise `FormatNotFoundError` on unknown format.
* raise `SerializeError` on any serialize exceptions. |
def _get_dependencies_from_json(ireq, sources):
if os.environ.get("PASSA_IGNORE_JSON_API"):
return
if ireq.extras:
return
try:
version = get_pinned_version(ireq)
except ValueError:
return
url_prefixes = [
proc_url[:-7]
for proc_url in (
raw... | Retrieves dependencies for the install requirement from the JSON API.
:param ireq: A single InstallRequirement
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
:return: A set of dependency lines for generating new InstallRequirements.
:rtype: set(str) or None |
def waveform_to_examples(data, sample_rate):
import resampy
if len(data.shape) > 1:
data = np.mean(data, axis=1)
if sample_rate != vggish_params.SAMPLE_RATE:
data = resampy.resample(data, sample_rate, vggish_params.SAMPLE_RATE)
log_mel = mel_features.log_mel_spectrogram(
data,
audio_sample_r... | Converts audio waveform into an array of examples for VGGish.
Args:
data: np.array of either one dimension (mono) or two dimensions
(multi-channel, with the outer dimension representing channels).
Each sample is generally expected to lie in the range [-1.0, +1.0],
although this is not required.... |
def guessoffset(args):
p = OptionParser(guessoffset.__doc__)
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
fastqfile, = args
ai = iter_fastq(fastqfile)
rec = next(ai)
offset = 64
while rec:
quality = rec.quality
lowcounts = len([x... | %prog guessoffset fastqfile
Guess the quality offset of the fastqfile, whether 33 or 64.
See encoding schemes: <http://en.wikipedia.org/wiki/FASTQ_format>
SSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS...............................
..........................XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX... |
def _expect_method(self, command):
child = pexpect.spawn(self._ipmitool_path, self.args + command)
i = child.expect([pexpect.TIMEOUT, 'Password: '], timeout=10)
if i == 0:
child.terminate()
self.error = 'ipmitool command timed out'
self.status = 1
else... | Use the expect module to execute ipmitool commands
and set status |
def sel_list_pres(ds_sfc_x):
p_min, p_max = ds_sfc_x.sp.min().values, ds_sfc_x.sp.max().values
list_pres_level = [
'1', '2', '3',
'5', '7', '10',
'20', '30', '50',
'70', '100', '125',
'150', '175', '200',
'225', '250', '300',
'350', '400', '450',
'... | select proper levels for model level data download |
def _read_data_type_rpl(self, length):
_cmpr = self._read_binary(1)
_padr = self._read_binary(1)
_resv = self._read_fileng(2)
_inti = int(_cmpr[:4], base=2)
_inte = int(_cmpr[4:], base=2)
_plen = int(_padr[:4], base=2)
_ilen = 16 - _inti
_elen = 16 - _inte... | Read IPv6-Route RPL Source data.
Structure of IPv6-Route RPL Source data [RFC 6554]:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-... |
def get_cursor_left_position(self, count=1):
if count < 0:
return self.get_cursor_right_position(-count)
return - min(self.cursor_position_col, count) | Relative position for cursor left. |
def make_annotation(self):
annotation = dict()
for item in dir(self):
if len(item) > 0 and item[0] != '_' and \
not inspect.ismethod(getattr(self, item)):
annotation[item] = getattr(self, item)
annotation['action_mentions'] = list()
for act... | Returns a dictionary with all properties of the action
and each of its action mentions. |
def check_url(url):
request = urllib2.Request(url)
try:
response = urlopen(request)
return True, response.code
except urllib2.HTTPError as e:
return False, e.code | Check if resource at URL is fetchable. (by trying to fetch it and checking for 200 status.
Args:
url (str): Url to check.
Returns:
Returns a tuple of {True/False, response code} |
def _valid_folder(self, base, name):
valid = True
fullpath = os.path.join(base, name)
if (
not self.recursive or
(
self.folder_exclude_check is not None and
not self.compare_directory(fullpath[self._base_len:] if self.dir_pathname else name... | Return whether a folder can be searched. |
def intervals_to_boundaries(intervals, q=5):
return np.unique(np.ravel(np.round(intervals, decimals=q))) | Convert interval times into boundaries.
Parameters
----------
intervals : np.ndarray, shape=(n_events, 2)
Array of interval start and end-times
q : int
Number of decimals to round to. (Default value = 5)
Returns
-------
boundaries : np.ndarray
Interval boundary time... |
def hookable(cls):
assert isinstance(cls, type)
hook_definitions = []
if not issubclass(cls, Hookable):
for k, v in list(cls.__dict__.items()):
if isinstance(v, (ClassHook, InstanceHook)):
delattr(cls, k)
if v.name is None:
v.name = k
... | Initialise hookery in a class that declares hooks by decorating it with this decorator.
This replaces the class with another one which has the same name, but also inherits Hookable
which has HookableMeta set as metaclass so that sub-classes of cls will have hook descriptors
initialised properly.
When ... |
def DomainTokensCreate(self, domain_id, amount):
if self.__SenseApiCall__('/domains/{0}/tokens.json'.format(domain_id), 'POST', parameters = {"amount":amount}):
return True
else:
self.__error__ = "api call unsuccessful"
return False | This method creates tokens that can be used by users who want to join the domain.
Tokens are automatically deleted after usage.
Only domain managers can create tokens. |
def _configure_root_logger(self):
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
if self.args.verbose:
handler = logging.StreamHandler(sys.stdout)
else:
handler = logging.handlers.RotatingFileHandler(
common.LOG_FILE,
... | Initialise logging system |
def _get_servers_deque(servers, database):
key = (servers, database)
if key not in _servers_deques:
_servers_deques[key] = deque(servers)
return _servers_deques[key] | Returns deque of servers for given tuple of servers and
database name.
This deque have active server at the begining, if first server
is not accessible at the moment the deque will be rotated,
second server will be moved to the first position, thirt to the
second position etc, and previously first s... |
def icetea_main():
from icetea_lib import IceteaManager
manager = IceteaManager.IceteaManager()
return_code = manager.run()
sys.exit(return_code) | Main function for running Icetea. Calls sys.exit with the return code to exit.
:return: Nothing. |
def get_sections_2d_nts(self, sortby=None):
sections_2d_nts = []
for section_name, hdrgos_actual in self.get_sections_2d():
hdrgo_nts = self.gosubdag.get_nts(hdrgos_actual, sortby=sortby)
sections_2d_nts.append((section_name, hdrgo_nts))
return sections_2d_nts | Get high GO IDs that are actually used to group current set of GO IDs. |
def _patch(self, uri, data):
headers = self._get_headers()
response = self.session.patch(uri, headers=headers,
data=json.dumps(data))
if response.status_code == 204:
return response
else:
logging.error(response.content)
response.raise_f... | Simple PATCH operation for a given path.
The body is expected to list operations to perform to update
the data. Operations include:
- add
- remove
- replace
- move
- copy
- test
[
{ "op": "test", "path": "/a/... |
def send_http_error(self, http_code, cim_error=None,
cim_error_details=None, headers=None):
self.send_response(http_code, http_client.responses.get(http_code, ''))
self.send_header("CIMExport", "MethodResponse")
if cim_error is not None:
self.send_header("CIME... | Send an HTTP response back to the WBEM server that indicates
an error at the HTTP level. |
def _dump(self, tree):
schema = []
if tree.tables:
for table in tree.tables:
desc = self.describe(table, refresh=True, require=True)
schema.append(desc.schema)
else:
for table in self.describe_all():
schema.append(table.sche... | Run a DUMP statement |
def create_partition(self, org_name, part_name, dci_id, vrf_prof,
service_node_ip=None, desc=None):
desc = desc or org_name
res = self._create_or_update_partition(org_name, part_name,
desc, dci_id=dci_id,
... | Create partition on the DCNM.
:param org_name: name of organization to be created
:param part_name: name of partition to be created
:param dci_id: DCI ID
:vrf_prof: VRF profile for the partition
:param service_node_ip: Specifies the Default route IP address.
:param desc:... |
def fsdecode(path, os_name=os.name, fs_encoding=FS_ENCODING, errors=None):
if not isinstance(path, bytes):
return path
if not errors:
use_strict = PY_LEGACY or os_name == 'nt'
errors = 'strict' if use_strict else 'surrogateescape'
return path.decode(fs_encoding, errors=errors) | Decode given path.
:param path: path will be decoded if using bytes
:type path: bytes or str
:param os_name: operative system name, defaults to os.name
:type os_name: str
:param fs_encoding: current filesystem encoding, defaults to autodetected
:type fs_encoding: str
:return: decoded path
... |
def _general_approximating_model(self, beta, T, Z, R, Q, h_approx):
H = np.ones(self.data_length)*h_approx
mu = np.zeros(self.data_length)
return H, mu | Creates simplest kind of approximating Gaussian model
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
T, Z, R, Q : np.array
State space matrices used in KFS algorithm
h_approx : float
Value t... |
def histpath(self):
from os import path
from fortpy import settings
return path.join(settings.cache_directory, "history") | Returns the full path to the console history file. |
def get_action_cache_key(name, argument):
tokens = [str(name)]
if argument:
tokens.append(str(argument))
return '::'.join(tokens) | Get an action cache key string. |
def remove_aliases(self_or_cls, aliases):
for k,v in self_or_cls.aliases.items():
if v in aliases:
self_or_cls.aliases.pop(k) | Remove a list of aliases. |
def ChangeUserStatus(self, Status):
if self.CurrentUserStatus.upper() == Status.upper():
return
self._ChangeUserStatus_Event = threading.Event()
self._ChangeUserStatus_Status = Status.upper()
self.RegisterEventHandler('UserStatus', self._ChangeUserStatus_UserStatus)
s... | Changes the online status for the current user.
:Parameters:
Status : `enums`.cus*
New online status for the user.
:note: This function waits until the online status changes. Alternatively, use the
`CurrentUserStatus` property to perform an immediate change of stat... |
def sync_streams(self):
if self._sync_streams is None:
self._sync_streams = SyncStreamList(self._version, service_sid=self._solution['sid'], )
return self._sync_streams | Access the sync_streams
:returns: twilio.rest.sync.v1.service.sync_stream.SyncStreamList
:rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamList |
def prune(self):
pruned = []
for c in self.children:
c.prune()
if c.isempty(False):
pruned.append(c)
for p in pruned:
self.children.remove(p) | Prune the branch of empty nodes. |
def energy_ratio_by_chunks(x, param):
res_data = []
res_index = []
full_series_energy = np.sum(x ** 2)
for parameter_combination in param:
num_segments = parameter_combination["num_segments"]
segment_focus = parameter_combination["segment_focus"]
assert segment_focus < num_segmen... | Calculates the sum of squares of chunk i out of N chunks expressed as a ratio with the sum of squares over the whole
series.
Takes as input parameters the number num_segments of segments to divide the series into and segment_focus
which is the segment number (starting at zero) to return a feature on.
... |
def _handle_heading(self, token):
level = token.level
self._push()
while self._tokens:
token = self._tokens.pop()
if isinstance(token, tokens.HeadingEnd):
title = self._pop()
return Heading(title, level)
else:
se... | Handle a case where a heading is at the head of the tokens. |
def quantity(*args):
if len(args) == 1:
if isinstance(args[0], str):
return Quantity(from_string(args[0]))
elif isinstance(args[0], dict):
if hasattr(args[0]["value"], "__len__"):
return QuantVec(from_dict_v(args[0]))
else:
return Q... | Create a quantity. This can be from a scalar or vector.
Example::
q1 = quantity(1.0, "km/s")
q2 = quantity("1km/s")
q1 = quantity([1.0,2.0], "km/s") |
def connect_container_to_network(self, container, net_id,
ipv4_address=None, ipv6_address=None,
aliases=None, links=None,
link_local_ips=None):
data = {
"Container": container,
... | Connect a container to a network.
Args:
container (str): container-id/name to be connected to the network
net_id (str): network id
aliases (:py:class:`list`): A list of aliases for this endpoint.
Names in that list can be used within the network to reach the
... |
def _Moran_BV_Matrix_array(variables, w, permutations=0, varnames=None):
if varnames is None:
varnames = ['x{}'.format(i) for i in range(k)]
k = len(variables)
rk = list(range(0, k - 1))
results = {}
for i in rk:
for j in range(i + 1, k):
y1 = variables[i]
y2 ... | Base calculation for MORAN_BV_Matrix |
def isCode(self, block, column):
dataObject = block.userData()
data = dataObject.data if dataObject is not None else None
return self._syntax.isCode(data, column) | Check if character at column is a a code |
def safe_round(self, x):
val = x[self.col_name]
if np.isposinf(val):
val = sys.maxsize
elif np.isneginf(val):
val = -sys.maxsize
if np.isnan(val):
val = self.default_val
if self.subtype == 'integer':
return int(round(val))
r... | Returns a converter that takes in a value and turns it into an integer, if necessary.
Args:
col_name(str): Name of the column.
subtype(str): Numeric subtype of the values.
Returns:
function |
def load_irac_psf(channel, show_progress=False):
channel = int(channel)
if channel < 1 or channel > 4:
raise ValueError('channel must be 1, 2, 3, or 4')
fn = 'irac_ch{0}_flight.fits'.format(channel)
path = get_path(fn, location='remote', show_progress=show_progress)
hdu = fits.open(path)[0]
... | Load a Spitzer IRAC PSF image.
Parameters
----------
channel : int (1-4)
The IRAC channel number:
* Channel 1: 3.6 microns
* Channel 2: 4.5 microns
* Channel 3: 5.8 microns
* Channel 4: 8.0 microns
show_progress : bool, optional
Whether to d... |
def release(self, forceRelease=False):
if not self.held:
if forceRelease is False:
return False
else:
self.held = True
if not os.path.exists(self.lockPath):
self.held = False
self.acquiredAt = None
return True
... | release - Release the lock.
@param forceRelease <bool> default False - If True, will release the lock even if we don't hold it.
@return - True if lock is released, otherwise False |
def derive_single_object_url_pattern(slug_url_kwarg, path, action):
if slug_url_kwarg:
return r'^%s/%s/(?P<%s>[^/]+)/$' % (path, action, slug_url_kwarg)
else:
return r'^%s/%s/(?P<pk>\d+)/$' % (path, action) | Utility function called by class methods for single object views |
def python_job(self, function, parameters=None):
if not callable(function):
raise utils.StimelaCabRuntimeError('Object given as function is not callable')
if self.name is None:
self.name = function.__name__
self.job = {
'function' : function,
... | Run python function
function : Python callable to execute
name : Name of function (if not given, will used function.__name__)
parameters : Parameters to parse to function
label : Function label; for logging purposes |
def list_builds(self, field_selector=None, koji_task_id=None, running=None,
labels=None):
if running:
running_fs = ",".join(["status!={status}".format(status=status.capitalize())
for status in BUILD_FINISHED_STATES])
if not field_sele... | List builds with matching fields
:param field_selector: str, field selector for Builds
:param koji_task_id: str, only list builds for Koji Task ID
:return: BuildResponse list |
def unicode_to_hex(unicode_string):
if unicode_string is None:
return None
acc = []
for c in unicode_string:
s = hex(ord(c)).replace("0x", "").upper()
acc.append("U+" + ("0" * (4 - len(s))) + s)
return u" ".join(acc) | Return a string containing the Unicode hexadecimal codepoint
of each Unicode character in the given Unicode string.
Return ``None`` if ``unicode_string`` is ``None``.
Example::
a => U+0061
ab => U+0061 U+0062
:param str unicode_string: the Unicode string to convert
:rtype: (Unico... |
def edit_securitygroup(self, group_id, name=None, description=None):
successful = False
obj = {}
if name:
obj['name'] = name
if description:
obj['description'] = description
if obj:
successful = self.security_group.editObject(obj, id=group_id)
... | Edit security group details.
:param int group_id: The ID of the security group
:param string name: The name of the security group
:param string description: The description of the security group |
def _charlist(self, data) -> list:
char_string =
nosub = self.sas.nosub
self.sas.nosub = False
ll = self.sas.submit(char_string.format(data.libref, data.table + data._dsopts()))
self.sas.nosub = nosub
l2 = ll['LOG'].partition("VARLIST=\n")
l2 = l2[2].rpartition("V... | Private method to return the variables in a SAS Data set that are of type char
:param data: SAS Data object to process
:return: list of character variables
:rtype: list |
def delete(self):
if not self.id:
return
if not self._loaded:
self.reload()
return self.http_delete(self.id, etag=self.etag) | Deletes the object. Returns without doing anything if the object is
new. |
def generate(env):
global PDFLaTeXAction
if PDFLaTeXAction is None:
PDFLaTeXAction = SCons.Action.Action('$PDFLATEXCOM', '$PDFLATEXCOMSTR')
global PDFLaTeXAuxAction
if PDFLaTeXAuxAction is None:
PDFLaTeXAuxAction = SCons.Action.Action(PDFLaTeXAuxFunction,
st... | Add Builders and construction variables for pdflatex to an Environment. |
def _set(self):
self.__event.set()
if self._complete_func:
self.__run_completion_func(self._complete_func, self.id_) | Called internally by Client to indicate this request has finished |
def from_config(cls, cfg, **kwargs):
cfg = dict(cfg, **kwargs)
pythonpath = cfg.get('pythonpath', [])
if 'here' in cfg:
pythonpath.append(cfg['here'])
for path in pythonpath:
sys.path.append(os.path.expanduser(path))
prog = cls.server and 'irc3d' or 'irc3'... | return an instance configured with the ``cfg`` dict |
def dloglikarray(self):
assert self.dparamscurrent, "dloglikarray requires paramscurrent == True"
nparams = len(self._index_to_param)
dloglikarray = scipy.ndarray(shape=(nparams,), dtype='float')
for (i, param) in self._index_to_param.items():
if isinstance(param, str):
... | Derivative of `loglik` with respect to `paramsarray`. |
def require_editable(f):
def wrapper(self, *args, **kwargs):
if not self._edit:
raise RegistryKeyNotEditable("The key is not set as editable.")
return f(self, *args, **kwargs)
return wrapper | Makes sure the registry key is editable before trying to edit it. |
def lookup_hlr(self, phonenumber, params=None):
if params is None: params = {}
return HLR().load(self.request('lookup/' + str(phonenumber) + '/hlr', 'GET', params)) | Retrieve the information of a specific HLR lookup. |
def IsErrorSuppressedByNolint(category, linenum):
return (linenum in _error_suppressions.get(category, set()) or
linenum in _error_suppressions.get(None, set())) | Returns true if the specified error category is suppressed on this line.
Consults the global error_suppressions map populated by
ParseNolintSuppressions/ResetNolintSuppressions.
Args:
category: str, the category of the error.
linenum: int, the current line number.
Returns:
bool, True iff the error... |
def markdown(iterable, renderer=HTMLRenderer):
with renderer() as renderer:
return renderer.render(Document(iterable)) | Output HTML with default settings.
Enables inline and block-level HTML tags. |
def connect(self):
if not getattr(self._local, 'conn', None):
try:
server = self._servers.get()
logger.debug('Connecting to %s', server)
self._local.conn = ClientTransport(server, self._framed_transport,
self._timeout, self._recycle... | Create new connection unless we already have one. |
def _EnforceProcessMemoryLimit(self, memory_limit):
if resource:
if memory_limit is None:
memory_limit = 4 * 1024 * 1024 * 1024
elif memory_limit == 0:
memory_limit = resource.RLIM_INFINITY
resource.setrlimit(resource.RLIMIT_DATA, (memory_limit, memory_limit)) | Enforces a process memory limit.
Args:
memory_limit (int): maximum number of bytes the process is allowed
to allocate, where 0 represents no limit and None a default of
4 GiB. |
def _check_array(self, X, **kwargs):
if isinstance(X, np.ndarray):
X = da.from_array(X, X.shape)
X = check_array(X, **kwargs)
return X | Validate the data arguments X and y.
By default, NumPy arrays are converted to 1-block dask arrays.
Parameters
----------
X, y : array-like |
def astype(self, dtype, copy=True):
if not copy and np.dtype(dtype) == self.dtype:
return self
res = zeros(shape=self.shape, ctx=self.context,
dtype=dtype, stype=self.stype)
self.copyto(res)
return res | Return a copy of the array after casting to a specified type.
Parameters
----------
dtype : numpy.dtype or str
The type of the returned array.
copy : bool
Default `True`. By default, astype always returns a newly
allocated ndarray on the same context.... |
def construct_asset_path(self, asset_path, css_path, output_filename, variant=None):
public_path = self.absolute_path(asset_path, os.path.dirname(css_path).replace('\\', '/'))
if self.embeddable(public_path, variant):
return "__EMBED__%s" % public_path
if not posixpath.isabs(asset_pa... | Return a rewritten asset URL for a stylesheet |
def URLRabbitmqBroker(url, *, middleware=None):
warnings.warn(
"Use RabbitmqBroker with the 'url' parameter instead of URLRabbitmqBroker.",
DeprecationWarning, stacklevel=2,
)
return RabbitmqBroker(url=url, middleware=middleware) | Alias for the RabbitMQ broker that takes a connection URL as a
positional argument.
Parameters:
url(str): A connection string.
middleware(list[Middleware]): The middleware to add to this
broker. |
def _format_description(ctx):
help_string = ctx.command.help or ctx.command.short_help
if not help_string:
return
bar_enabled = False
for line in statemachine.string2lines(
help_string, tab_width=4, convert_whitespace=True):
if line == '\b':
bar_enabled = True
... | Format the description for a given `click.Command`.
We parse this as reStructuredText, allowing users to embed rich
information in their help messages if they so choose. |
def latrec(radius, longitude, latitude):
radius = ctypes.c_double(radius)
longitude = ctypes.c_double(longitude)
latitude = ctypes.c_double(latitude)
rectan = stypes.emptyDoubleVector(3)
libspice.latrec_c(radius, longitude, latitude, rectan)
return stypes.cVectorToPython(rectan) | Convert from latitudinal coordinates to rectangular coordinates.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/latrec_c.html
:param radius: Distance of a point from the origin.
:type radius: float
:param longitude: Longitude of point in radians.
:type longitude: float
:param latitude... |
def get_choice(prompt, choices):
print()
checker = []
for offset, choice in enumerate(choices):
number = offset + 1
print("\t{}): '{}'\n".format(number, choice))
checker.append(str(number))
response = get_input(prompt, tuple(checker) + ('',))
if not response:
print("E... | Asks for a single choice out of multiple items.
Given those items, and a prompt to ask the user with |
def _check_download_dir(link, download_dir, hashes):
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
logger.info('File was already downloaded %s', download_path)
if hashes:
try:
hashes.check_against_path(download_path)
... | Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None |
def getOutputElementCount(self, name):
if name in ["activeCells", "predictedCells", "predictedActiveCells",
"winnerCells"]:
return self.cellsPerColumn * self.columnCount
else:
raise Exception("Invalid output name specified: %s" % name) | Return the number of elements for the given output. |
def inspect_node(self, node_id):
url = self._url('/nodes/{0}', node_id)
return self._result(self._get(url), True) | Retrieve low-level information about a swarm node
Args:
node_id (string): ID of the node to be inspected.
Returns:
A dictionary containing data about this node.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error. |
def __type_check_attributes(self, node: yaml.Node, mapping: CommentedMap,
argspec: inspect.FullArgSpec) -> None:
logger.debug('Checking for extraneous attributes')
logger.debug('Constructor arguments: {}, mapping: {}'.format(
argspec.args, list(mapping.keys())... | Ensure all attributes have a matching constructor argument.
This checks that there is a constructor argument with a \
matching type for each existing attribute.
If the class has a yatiml_extra attribute, then extra \
attributes are okay and no error will be raised if they exist.
... |
def get_node(self, goid, goobj):
return pydot.Node(
self.get_node_text(goid, goobj),
shape="box",
style="rounded, filled",
fillcolor=self.go2color.get(goid, "white"),
color=self.objcolor.get_bordercolor(goid)) | Return pydot node. |
def minimum_eigen_vector(x, num_steps, learning_rate, vector_prod_fn):
x = tf.nn.l2_normalize(x)
for _ in range(num_steps):
x = eig_one_step(x, learning_rate, vector_prod_fn)
return x | Computes eigenvector which corresponds to minimum eigenvalue.
Args:
x: initial value of eigenvector.
num_steps: number of optimization steps.
learning_rate: learning rate.
vector_prod_fn: function which takes x and returns product H*x.
Returns:
approximate value of eigenvector.
This functio... |
def show_xticklabels_for_all(self, row_column_list=None):
if row_column_list is None:
for subplot in self.subplots:
subplot.show_xticklabels()
else:
for row, column in row_column_list:
self.show_xticklabels(row, column) | Show the x-axis tick labels for all specified subplots.
:param row_column_list: a list containing (row, column) tuples to
specify the subplots, or None to indicate *all* subplots.
:type row_column_list: list or None |
def walk(self, dirpath):
if self.is_ssh(dirpath):
self._check_ftp()
remotepath = self._get_remote(dirpath)
return self._sftp_walk(remotepath)
else:
return os.walk(dirpath) | Performs an os.walk on a local or SSH filepath. |
def as_view(cls, *class_args, **class_kwargs):
def view(*args, **kwargs):
self = view.view_class(*class_args, **class_kwargs)
return self.dispatch_request(*args, **kwargs)
if cls.decorators:
view.__module__ = cls.__module__
for decorator in cls.decorators:... | Return view function for use with the routing system, that
dispatches request to appropriate handler method. |
def hierarchical_map_vals(func, node, max_depth=None, depth=0):
if not hasattr(node, 'items'):
return func(node)
elif max_depth is not None and depth >= max_depth:
return map_dict_vals(func, node)
else:
keyval_list = [(key, hierarchical_map_vals(func, val, max_depth, depth + 1)) for ... | node is a dict tree like structure with leaves of type list
TODO: move to util_dict
CommandLine:
python -m utool.util_dict --exec-hierarchical_map_vals
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_dict import * # NOQA
>>> import utool as ut
>>> item_list ... |
def log(self, level, msg):
self._check_session()
level = level.upper()
allowed_levels = ('INFO', 'WARN', 'ERROR', 'FATAL')
if level not in allowed_levels:
raise ValueError('level must be one of: ' +
', '.join(allowed_levels))
self._rest.po... | Write a diagnostic message to a log file or to standard output.
Arguments:
level -- Severity level of entry. One of: INFO, WARN, ERROR, FATAL.
msg -- Message to write to log. |
def get_rmse(self, data_x=None, data_y=None):
if data_x is None:
data_x = np.array(self.args["x"])
if data_y is None:
data_y = np.array(self.args["y"])
if len(data_x) != len(data_y):
raise ValueError("Lengths of data_x and data_y are different")
rmse_y... | Get Root Mean Square Error using
self.bestfit_func
args:
x_min: scalar, default=min(x)
minimum x value of the line
x_max: scalar, default=max(x)
maximum x value of the line
resolution: int, default=1000
how many steps b... |
def notify(self, n: int = 1) -> None:
waiters = []
while n and self._waiters:
waiter = self._waiters.popleft()
if not waiter.done():
n -= 1
waiters.append(waiter)
for waiter in waiters:
future_set_result_unless_cancelled(waiter,... | Wake ``n`` waiters. |
def loading(self):
if getattr(self, '_initialized', False):
raise ValueError("Already loading")
self._initialized = False
yield
self._initialized = True | Context manager for when you need to instantiate entities upon unpacking |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.