code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def session_list(consul_url=None, token=None, return_list=False, **kwargs):
ret = {}
if not consul_url:
consul_url = _get_config()
if not consul_url:
log.error('No Consul URL found.')
ret['message'] = 'No Consul URL found.'
ret['res'] = False
retur... | Used to list sessions.
:param consul_url: The Consul server URL.
:param dc: By default, the datacenter of the agent is queried;
however, the dc can be provided using the "dc" parameter.
:param return_list: By default, all information about the sessions is
returned, us... |
def get_product_string(self):
self._check_device_status()
str_p = ffi.new("wchar_t[]", 255)
rv = hidapi.hid_get_product_string(self._device, str_p, 255)
if rv == -1:
raise IOError("Failed to read product string from HID device: {0}"
.format(self._get... | Get the Product String from the HID device.
:return: The Product String
:rtype: unicode |
def register_actor(name, actor_handle):
if not isinstance(name, str):
raise TypeError("The name argument must be a string.")
if not isinstance(actor_handle, ray.actor.ActorHandle):
raise TypeError("The actor_handle argument must be an ActorHandle "
"object.")
actor_na... | Register a named actor under a string key.
Args:
name: The name of the named actor.
actor_handle: The actor object to be associated with this name |
def ls(manager: Manager, url: Optional[str], namespace_id: Optional[int]):
if url:
n = manager.get_or_create_namespace(url)
if isinstance(n, Namespace):
_page(n.entries)
else:
click.echo('uncachable namespace')
elif namespace_id is not None:
_ls(manager, N... | List cached namespaces. |
def assertEqual(first, second, message=None):
if not first == second:
raise TestStepFail(
format_message(message) if message is not None else "Assert: %s != %s" % (str(first),
str(second))) | Assert that first equals second.
:param first: First part to evaluate
:param second: Second part to evaluate
:param message: Failure message
:raises: TestStepFail if not first == second |
def open(self) -> bool:
return self.state is State.OPEN and not self.transfer_data_task.done() | This property is ``True`` when the connection is usable.
It may be used to detect disconnections but this is discouraged per
the EAFP_ principle. When ``open`` is ``False``, using the connection
raises a :exc:`~websockets.exceptions.ConnectionClosed` exception.
.. _EAFP: https://docs.p... |
def isUrl(urlString):
parsed = urlparse.urlparse(urlString)
urlparseValid = parsed.netloc != '' and parsed.scheme != ''
regex = re.compile(
r'^(?:http|ftp)s?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)'
r'+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|'
r'localhost|'
r'\... | Attempts to return whether a given URL string is valid by checking
for the presence of the URL scheme and netloc using the urlparse
module, and then using a regex.
From http://stackoverflow.com/questions/7160737/ |
def modver(self, *args):
g = get_root(self).globals
if self.ok():
tname = self.val.get()
if tname in self.successes:
self.verify.config(bg=g.COL['start'])
elif tname in self.failures:
self.verify.config(bg=g.COL['stop'])
els... | Switches colour of verify button |
def get_direct_band_gap(self):
if self.is_metal():
return 0.0
dg = self.get_direct_band_gap_dict()
return min(v['value'] for v in dg.values()) | Returns the direct band gap.
Returns:
the value of the direct band gap |
def find_fields(self, classname=".*", fieldname=".*", fieldtype=".*", accessflags=".*"):
for cname, c in self.classes.items():
if re.match(classname, cname):
for f in c.get_fields():
z = f.get_field()
if re.match(fieldname, z.get_name()) and \
... | find fields by regex
:param classname: regular expression of the classname
:param fieldname: regular expression of the fieldname
:param fieldtype: regular expression of the fieldtype
:param accessflags: regular expression of the access flags
:rtype: generator of `FieldClassAnaly... |
def CallNtpdate(logger):
ntpd_inactive = subprocess.call(['service', 'ntpd', 'status'])
try:
if not ntpd_inactive:
subprocess.check_call(['service', 'ntpd', 'stop'])
subprocess.check_call(
'ntpdate `awk \'$1=="server" {print $2}\' /etc/ntp.conf`', shell=True)
if not ntpd_inactive:
su... | Sync clock using ntpdate.
Args:
logger: logger object, used to write to SysLog and serial port. |
def from_row(row):
subject = (row[5][0].upper() + row[5][1:]) if row[5] else row[5]
return Advice.objects.create(
id=row[0],
administration=cleanup(row[1]),
type=row[2],
session=datetime.strptime(row[4], '%d/%m/%Y'),
subject=cleanup(subject),
topics=[t.title() for... | Create an advice from a CSV row |
def monitors(self):
import ns1.rest.monitoring
return ns1.rest.monitoring.Monitors(self.config) | Return a new raw REST interface to monitors resources
:rtype: :py:class:`ns1.rest.monitoring.Monitors` |
def isConnected(self, fromName, toName):
for c in self.connections:
if (c.fromLayer.name == fromName and
c.toLayer.name == toName):
return 1
return 0 | Are these two layers connected this way? |
def find_needed_input(input_format):
needed_inputs = [re.cls for re in registry if re.category==RegistryCategories.inputs and re.cls.input_format == input_format]
if len(needed_inputs)>0:
return needed_inputs[0]
return None | Find a needed input class
input_format - needed input format, see utils.input.dataformats |
def register_instances(name, instances, region=None, key=None, keyid=None,
profile=None):
if isinstance(instances, six.string_types) or isinstance(instances, six.text_type):
instances = [instances]
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
... | Register instances with an ELB. Instances is either a string
instance id or a list of string instance id's.
Returns:
- ``True``: instance(s) registered successfully
- ``False``: instance(s) failed to be registered
CLI example:
.. code-block:: bash
salt myminion boto_elb.register_in... |
def GetSystemConfigurationArtifact(self, session_identifier=CURRENT_SESSION):
system_configuration = artifacts.SystemConfigurationArtifact()
system_configuration.code_page = self.GetValue(
'codepage', default_value=self._codepage)
system_configuration.hostname = self._hostnames.get(
session_... | Retrieves the knowledge base as a system configuration artifact.
Args:
session_identifier (Optional[str])): session identifier, where
CURRENT_SESSION represents the active session.
Returns:
SystemConfigurationArtifact: system configuration artifact. |
def get_crop_size(crop_w, crop_h, image_w, image_h):
scale1 = float(crop_w) / float(image_w)
scale2 = float(crop_h) / float(image_h)
scale1_w = crop_w
scale1_h = int(round(image_h * scale1))
scale2_w = int(round(image_w * scale2))
scale2_h = crop_h
if scale1_h > crop_h:
return (scale... | Determines the correct scale size for the image
when img w == crop w and img h > crop h
Use these dimensions
when img h == crop h and img w > crop w
Use these dimensions |
def galprop_gasmap(self, **kwargs):
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
self._replace_none(kwargs_copy)
localpath = NameFactory.galprop_gasmap_format.format(**kwargs_copy)
if kwargs.get('fullpath', False):
return self.fullpath(loca... | return the file name for Galprop input gasmaps |
def reload(self):
self.restarted_adapter = False
self.data.clear()
if conf.use_winpcapy:
from scapy.arch.pcapdnet import load_winpcapy
load_winpcapy()
self.load()
conf.iface = get_working_if() | Reload interface list |
def to_json(self):
result = {
'sys': {}
}
for k, v in self.sys.items():
if k in ['space', 'content_type', 'created_by',
'updated_by', 'published_by']:
v = v.to_json()
if k in ['created_at', 'updated_at', 'deleted_at',
... | Returns the JSON representation of the resource. |
def deleteVertex(self, document, waitForSync = False) :
url = "%s/vertex/%s" % (self.URL, document._id)
r = self.connection.session.delete(url, params = {'waitForSync' : waitForSync})
data = r.json()
if r.status_code == 200 or r.status_code == 202 :
return True
raise ... | deletes a vertex from the graph as well as al linked edges |
def add_middleware(self, middleware, *, before=None, after=None):
assert not (before and after), \
"provide either 'before' or 'after', but not both"
if before or after:
for i, m in enumerate(self.middleware):
if isinstance(m, before or after):
... | Add a middleware object to this broker. The middleware is
appended to the end of the middleware list by default.
You can specify another middleware (by class) as a reference
point for where the new middleware should be added.
Parameters:
middleware(Middleware): The middlewar... |
def new(namespace, name, protected=False,
attributes=dict(), api_url=fapi.PROD_API_ROOT):
r = fapi.create_workspace(namespace, name, protected, attributes, api_url)
fapi._check_response_code(r, 201)
return Workspace(namespace, name, api_url) | Create a new FireCloud workspace.
Returns:
Workspace: A new FireCloud workspace
Raises:
FireCloudServerError: API call failed. |
def _insert_new_layers(self, new_layers, start_node_id, end_node_id):
new_node_id = self._add_node(deepcopy(self.node_list[end_node_id]))
temp_output_id = new_node_id
for layer in new_layers[:-1]:
temp_output_id = self.add_layer(layer, temp_output_id)
self._add_edge(new_layer... | Insert the new_layers after the node with start_node_id. |
def bleu_score(predictions, labels, **unused_kwargs):
outputs = tf.to_int32(tf.argmax(predictions, axis=-1))
outputs = tf.squeeze(outputs, axis=[-1, -2])
labels = tf.squeeze(labels, axis=[-1, -2])
bleu = tf.py_func(compute_bleu, (labels, outputs), tf.float32)
return bleu, tf.constant(1.0) | BLEU score computation between labels and predictions.
An approximate BLEU scoring method since we do not glue word pieces or
decode the ids and tokenize the output. By default, we use ngram order of 4
and use brevity penalty. Also, this does not have beam search.
Args:
predictions: tensor, model predicti... |
def _corrupt(self, data, dpos):
ws = list(self._BLK_BE.unpack_from(data, dpos))
for t in range(16, 80):
tmp = ws[(t - 3) & 15] ^ ws[(t - 8) & 15] ^ ws[(t - 14) & 15] ^ ws[(t - 16) & 15]
ws[t & 15] = ((tmp << 1) | (tmp >> (32 - 1))) & 0xFFFFFFFF
self._BLK_LE.pack_into(data... | Corruption from SHA1 core. |
def incident(self, name, owner=None, **kwargs):
return Incident(self.tcex, name, owner=owner, **kwargs) | Create the Incident TI object.
Args:
owner:
name:
**kwargs:
Return: |
def open(self):
self._connection = sqlite3.connect(self._dbname)
self._cursor = self._connection.cursor()
self._session_info = SessionInfoTable(self._connection, self._cursor)
self._reports = ReportsTable(self._connection, self._cursor) | open the database |
def run(self):
self.init_run()
if self.debug: self.dump("AfterInit: ")
while self.step():
pass | Runs the simulation. |
def fencekml(self, layername):
if layername.startswith('"') and layername.endswith('"'):
layername = layername[1:-1]
for layer in self.allayers:
if layer.key == layername:
self.fenceloader.clear()
if len(layer.points) < 3:
retur... | set a layer as the geofence |
def override(self, obj):
for field in obj.__class__.export_fields:
setattr(self, field, getattr(obj, field)) | Overrides the plain fields of the dashboard. |
def _spec_to_globs(address_mapper, specs):
patterns = set()
for spec in specs:
patterns.update(spec.make_glob_patterns(address_mapper))
return PathGlobs(include=patterns, exclude=address_mapper.build_ignore_patterns) | Given a Specs object, return a PathGlobs object for the build files that it matches. |
def edit(self, state):
if state and state.lower() == 'active':
data = dumps({'state': state.lower()})
json = self._json(self._patch(self._api, data=data))
self._update_attributes(json)
return self | Edit the user's membership.
:param str state: (required), the state the membership should be in.
Only accepts ``"active"``.
:returns: itself |
def open_netcdf_writer(self, flatten=False, isolate=False, timeaxis=1):
self._netcdf_writer = netcdftools.NetCDFInterface(
flatten=bool(flatten),
isolate=bool(isolate),
timeaxis=int(timeaxis)) | Prepare a new |NetCDFInterface| object for writing data. |
def vmotion_disable(host, username, password, protocol=None, port=None, host_names=None):
service_instance = salt.utils.vmware.get_service_instance(host=host,
username=username,
password=passw... | Disable vMotion for a given host or list of host_names.
host
The location of the host.
username
The username used to login to the host, such as ``root``.
password
The password used to login to the host.
protocol
Optionally set to alternate protocol if the host is not ... |
def _parse_coroutine(self):
while True:
d = yield
if d == int2byte(0):
pass
elif d == IAC:
d2 = yield
if d2 == IAC:
self.received_data(d2)
elif d2 in (NOP, DM, BRK, IP, AO, AYT, EC, EL, GA):
... | Parser state machine.
Every 'yield' expression returns the next byte. |
def frmnam(frcode, lenout=_default_len_out):
frcode = ctypes.c_int(frcode)
lenout = ctypes.c_int(lenout)
frname = stypes.stringToCharP(lenout)
libspice.frmnam_c(frcode, lenout, frname)
return stypes.toPythonString(frname) | Retrieve the name of a reference frame associated with a SPICE ID code.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/frmnam_c.html
:param frcode: an integer code for a reference frame
:type frcode: int
:param lenout: Maximum length of output string.
:type lenout: int
:return: the na... |
def rename_command(source, destination):
source_ep, source_path = source
dest_ep, dest_path = destination
if source_ep != dest_ep:
raise click.UsageError(
(
"rename requires that the source and dest "
"endpoints are the same, {} != {}"
).format... | Executor for `globus rename` |
def label_set(self):
label_set = list()
for class_ in self.class_set:
samples_in_class = self.sample_ids_in_class(class_)
label_set.append(self.labels[samples_in_class[0]])
return label_set | Set of labels in the dataset corresponding to class_set. |
def _format_object(obj, format_type=None):
if json_api_settings.FORMAT_KEYS is not None:
return format_keys(obj, format_type)
return format_field_names(obj, format_type) | Depending on settings calls either `format_keys` or `format_field_names` |
def train_model(model_folder):
os.chdir(model_folder)
training = generate_training_command(model_folder)
if training is None:
return -1
logging.info(training)
os.chdir(model_folder)
os.system(training) | Train the model in ``model_folder``. |
def utime_delta(days=0, hours=0, minutes=0, seconds=0):
return (days * DAY) + (hours * HOUR) + (minutes * MINUTE) + (seconds * SECOND) | Gets time delta in microseconds.
Note: Do NOT use this function without keyword arguments.
It will become much-much harder to add extra time ranges later if positional arguments are used. |
def on_to_position(self, speed, position, brake=True, block=True):
speed = self._speed_native_units(speed)
self.speed_sp = int(round(speed))
self.position_sp = position
self._set_brake(brake)
self.run_to_abs_pos()
if block:
self.wait_until('running', timeout=W... | Rotate the motor at ``speed`` to ``position``
``speed`` can be a percentage or a :class:`ev3dev2.motor.SpeedValue`
object, enabling use of other units. |
async def reconnect(self):
_LOGGER.debug('starting Connection.reconnect')
await self._connect()
while self._closed:
await self._retry_connection()
_LOGGER.debug('ending Connection.reconnect') | Reconnect to the modem. |
def get_cursor_vertical_diff(self):
if self.in_get_cursor_diff:
self.another_sigwinch = True
return 0
cursor_dy = 0
while True:
self.in_get_cursor_diff = True
self.another_sigwinch = False
cursor_dy += self._get_cursor_vertical_diff_onc... | Returns the how far down the cursor moved since last render.
Note:
If another get_cursor_vertical_diff call is already in progress,
immediately returns zero. (This situation is likely if
get_cursor_vertical_diff is called from a SIGWINCH signal
handler, since sig... |
def make_trajectory(first, filename, restart=False):
mode = 'w'
if restart:
mode = 'a'
return Trajectory(first, filename, mode) | Factory function to easily create a trajectory object |
def read_passive_target(self, card_baud=PN532_MIFARE_ISO14443A, timeout_sec=1):
response = self.call_function(PN532_COMMAND_INLISTPASSIVETARGET,
params=[0x01, card_baud],
response_length=17)
if response is None:
retu... | Wait for a MiFare card to be available and return its UID when found.
Will wait up to timeout_sec seconds and return None if no card is found,
otherwise a bytearray with the UID of the found card is returned. |
def _get_dopants(substitutions, num_dopants, match_oxi_sign):
n_type = [pred for pred in substitutions
if pred['dopant_species'].oxi_state >
pred['original_species'].oxi_state
and (not match_oxi_sign or
np.sign(pred['dopant_species'].oxi_state) ==
... | Utility method to get n- and p-type dopants from a list of substitutions. |
def create_mapping(self, mapped_class, configuration=None):
cfg = self.__configuration.copy()
if not configuration is None:
cfg.update(configuration)
provided_ifcs = provided_by(object.__new__(mapped_class))
if IMemberResource in provided_ifcs:
base_data_element_c... | Creates a new mapping for the given mapped class and representer
configuration.
:param configuration: configuration for the new data element class.
:type configuration: :class:`RepresenterConfiguration`
:returns: newly created instance of :class:`Mapping` |
def QueryService(svc_name):
hscm = win32service.OpenSCManager(None, None,
win32service.SC_MANAGER_ALL_ACCESS)
result = None
try:
hs = win32serviceutil.SmartOpenService(hscm, svc_name,
win32service.SERVICE_ALL_ACCESS)
result = w... | Query service and get its config. |
def evaluate_forward(
distribution,
x_data,
parameters=None,
cache=None,
):
assert len(x_data) == len(distribution), (
"distribution %s is not of length %d" % (distribution, len(x_data)))
assert hasattr(distribution, "_cdf"), (
"distribution require the `_cdf` met... | Evaluate forward Rosenblatt transformation.
Args:
distribution (Dist):
Distribution to evaluate.
x_data (numpy.ndarray):
Locations for where evaluate forward transformation at.
parameters (:py:data:typing.Any):
Collection of parameters to override the def... |
def _ready_gzip_fastq(in_files, data, require_bgzip=False):
all_gzipped = all([not x or x.endswith(".gz") for x in in_files])
if require_bgzip and all_gzipped:
all_gzipped = all([not x or not _check_gzipped_input(x, data)[0] for x in in_files])
needs_convert = dd.get_quality_format(data).lower() == ... | Check if we have gzipped fastq and don't need format conversion or splitting.
Avoid forcing bgzip if we don't need indexed files. |
def _start_dev_proc(self,
device_os,
device_config):
log.info('Starting the child process for %s', device_os)
dos = NapalmLogsDeviceProc(device_os,
self.opts,
device_config)
os_p... | Start the device worker process. |
def soft_break(self, el, text):
if el.name == 'p' and el.namespace and el.namespace == self.namespaces["text"]:
text.append('\n') | Apply soft break if needed. |
def add_size_scaled_points(
self, longitude, latitude, data, shape='o',
logplot=False, alpha=1.0, colour='b', smin=2.0, sscale=2.0,
overlay=False):
if logplot:
data = np.log10(data.copy())
x, y, = self.m(longitude, latitude)
self.m.scatter(x, y,
... | Plots a set of points with size scaled according to the data
:param bool logplot:
Choose to scale according to the logarithm (base 10) of the data
:param float smin:
Minimum scale size
:param float sscale:
Scaling factor |
def get_agents(self, addr=True, agent_cls=None, as_coro=False):
return self.env.get_agents(addr=addr, agent_cls=agent_cls) | Get agents from the managed environment.
This is a managing function for the
:py:meth:`~creamas.environment.Environment.get_agents`. Returned
agent list excludes the environment's manager agent (this agent) by
design. |
def check_local() -> None:
to_check = ['./replay', './replay/toDo', './replay/archive']
for i in to_check:
if not os.path.exists(i):
os.makedirs(i) | Verify required directories exist.
This functions checks the current working directory to ensure that
the required directories exist. If they do not exist, it will create them. |
def to_utf8(value):
if isinstance(value, unicode):
return value.encode('utf-8')
assert isinstance(value, str)
return value | Returns a string encoded using UTF-8.
This function comes from `Tornado`_.
:param value:
A unicode or string to be encoded.
:returns:
The encoded string. |
def set_mypy_args(self, mypy_args=None):
if mypy_args is None:
self.mypy_args = None
else:
self.mypy_errs = []
self.mypy_args = list(mypy_args)
if not any(arg.startswith("--python-version") for arg in mypy_args):
self.mypy_args += [
... | Set MyPy arguments. |
def final_spin_from_f0_tau(f0, tau, l=2, m=2):
f0, tau, input_is_array = ensurearray(f0, tau)
a, b, c = _berti_spin_constants[l,m]
origshape = f0.shape
f0 = f0.ravel()
tau = tau.ravel()
spins = numpy.zeros(f0.size)
for ii in range(spins.size):
Q = f0[ii] * tau[ii] * numpy.pi
... | Returns the final spin based on the given frequency and damping time.
.. note::
Currently, only l = m = 2 is supported. Any other indices will raise
a ``KeyError``.
Parameters
----------
f0 : float or array
Frequency of the QNM (in Hz).
tau : float or array
Damping ... |
def create(self, set):
target_url = self.client.get_url('SET', 'POST', 'create')
r = self.client.request('POST', target_url, json=set._serialize())
return set._deserialize(r.json(), self) | Creates a new Set. |
def backward(self, out_grads=None, is_train=True):
if out_grads is None:
out_grads = []
elif isinstance(out_grads, NDArray):
out_grads = [out_grads]
elif isinstance(out_grads, dict):
out_grads = [out_grads[k] for k in self._symbol.list_outputs()]
for o... | Do backward pass to get the gradient of arguments.
Parameters
----------
out_grads : NDArray or list of NDArray or dict of str to NDArray, optional
Gradient on the outputs to be propagated back.
This parameter is only needed when bind is called
on outputs tha... |
def age(self):
if self.date_range is None:
return
dob = self.date_range.middle
today = datetime.date.today()
if (today.month, today.day) < (dob.month, dob.day):
return today.year - dob.year - 1
else:
return today.year - dob.year | int, the estimated age of the person.
Note that A DOB object is based on a date-range and the exact date is
usually unknown so for age calculation the the middle of the range is
assumed to be the real date-of-birth. |
def wait_ready(self, name, timeout=5.0, sleep_interval=0.2):
end = time() + timeout
while True:
try:
info = self.bucket_info(name).value
for node in info['nodes']:
if node['status'] != 'healthy':
raise NotReadyError.... | Wait for a newly created bucket to be ready.
:param string name: the name to wait for
:param seconds timeout: the maximum amount of time to wait
:param seconds sleep_interval: the number of time to sleep
between each probe
:raise: :exc:`.CouchbaseError` on internal HTTP erro... |
def load(path=None, root=None, db=None, load_user=True):
"Load all of the config files. "
config = load_config(path, load_user=load_user)
remotes = load_remotes(path, load_user=load_user)
if remotes:
if not 'remotes' in config:
config.remotes = AttrDict()
for k, v in remotes.... | Load all of the config files. |
def uninstall_packages():
p = server_state('packages_installed')
if p: installed = set(p)
else: return
env.uninstalled_packages[env.host] = []
packages = set(get_packages())
uninstall = installed - packages
if uninstall and env.verbosity:
print env.host,'UNINSTALLING HOST PACKAGES'
... | Uninstall unwanted packages |
def _is_ctype(self, ctype):
if not self.valid:
return False
mime = self.content_type
return self.ContentMimetypes.get(mime) == ctype | Return True iff content is valid and of the given type. |
def protocol_names(self):
l = self.protocols()
retval = [str(k.name) for k in l]
return retval | Returns all registered protocol names |
def get_file(fname, datapath=datapath):
datapath = pathlib.Path(datapath)
datapath.mkdir(parents=True, exist_ok=True)
dlfile = datapath / fname
if not dlfile.exists():
print("Attempting to download file {} from {} to {}.".
format(fname, webloc, datapath))
try:
d... | Return path of an example data file
Return the full path to an example data file name.
If the file does not exist in the `datapath` directory,
tries to download it from the ODTbrain GitHub repository. |
def set_lim(min, max, name):
scale = _context['scales'][_get_attribute_dimension(name)]
scale.min = min
scale.max = max
return scale | Set the domain bounds of the scale associated with the provided key.
Parameters
----------
name: hashable
Any variable that can be used as a key for a dictionary
Raises
------
KeyError
When no context figure is associated with the provided key. |
def _read_snc(snc_file):
snc_raw_dtype = dtype([('sampleStamp', '<i'),
('sampleTime', '<q')])
with snc_file.open('rb') as f:
f.seek(352)
snc_raw = fromfile(f, dtype=snc_raw_dtype)
sampleStamp = snc_raw['sampleStamp']
sampleTime = asarray([_filetime_to_dt(x) for... | Read Synchronization File and return sample stamp and time
Returns
-------
sampleStamp : list of int
Sample number from start of study
sampleTime : list of datetime.datetime
File time representation of sampleStamp
Notes
-----
The synchronization file is used to calculate a ... |
def make_client(instance):
neutron_client = utils.get_client_class(
API_NAME,
instance._api_version[API_NAME],
API_VERSIONS,
)
instance.initialize()
url = instance._url
url = url.rstrip("/")
client = neutron_client(username=instance._username,
... | Returns an neutron client. |
def make_parent_dirs(path, mode=0o777):
parent = os.path.dirname(path)
if parent:
make_all_dirs(parent, mode)
return path | Ensure parent directories of a file are created as needed. |
def create_dir_rec(path: Path):
if not path.exists():
Path.mkdir(path, parents=True, exist_ok=True) | Create a folder recursive.
:param path: path
:type path: ~pathlib.Path |
def main():
logging.basicConfig(format=LOGGING_FORMAT)
log = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
add_debug(parser)
add_app(parser)
add_env(parser)
add_properties(parser)
args = parser.parse_args()
logging.getLogger(__package__.split(".")[0]).setLevel(args.d... | Send Slack notification to a configured channel. |
def merge_rdf_list(rdf_list):
if isinstance(rdf_list, list):
rdf_list = rdf_list[0]
rtn_list = []
item = rdf_list
if item.get('rdf_rest') and item.get('rdf_rest',[1])[0] != 'rdf_nil':
rtn_list += merge_rdf_list(item['rdf_rest'][0])
if item.get('rdf_first'):
rtn_list += item['... | takes an rdf list and merges it into a python list
args:
rdf_list: the RdfDataset object with the list values
returns:
list of values |
def inverse_transform(self, Y, columns=None):
try:
if not hasattr(self, "data_pca"):
try:
if Y.shape[1] != self.data_nu.shape[1]:
raise ValueError
except IndexError:
raise ValueError
if co... | Transform input data `Y` to ambient data space defined by `self.data`
Takes data in the same reduced space as `self.data_nu` and transforms
it to be in the same ambient space as `self.data`.
Parameters
----------
Y : array-like, shape=[n_samples_y, n_pca]
n_features... |
def encode_numpy(array):
return {'data' : base64.b64encode(array.data).decode('utf8'),
'type' : array.dtype.name,
'shape': array.shape} | Encode a numpy array as a base64 encoded string, to be JSON serialized.
:return: a dictionary containing the fields:
- *data*: the base64 string
- *type*: the array type
- *shape*: the array shape |
def frameify(self, state, data):
try:
yield state.recv_buf + data
except FrameSwitch:
pass
finally:
state.recv_buf = '' | Yield the data as a single frame. |
def from_dict(cls, d):
def _from_dict(_d):
return AdfKey.from_dict(_d) if _d is not None else None
operation = d.get("operation")
title = d.get("title")
basis_set = _from_dict(d.get("basis_set"))
xc = _from_dict(d.get("xc"))
units = _from_dict(d.get("units"))
... | Construct a MSONable AdfTask object from the JSON dict.
Parameters
----------
d : dict
A dict of saved attributes.
Returns
-------
task : AdfTask
An AdfTask object recovered from the JSON dict ``d``. |
def _compile(self, source, filename):
if filename == '<template>':
filename = 'dbt-{}'.format(
codecs.encode(os.urandom(12), 'hex').decode('ascii')
)
filename = jinja2._compat.encode_filename(filename)
linecache.cache[filename] = (
... | Override jinja's compilation to stash the rendered source inside
the python linecache for debugging. |
def _get_opus_maximum(self):
label =
opmax = self.session.get_resource(
BASE_URI_TYPES % "opmax",
self.session.get_class(surf.ns.ECRM['E55_Type'])
)
if opmax.is_present():
return opmax
else:
opmax.rdfs_label.append(Literal(label, "e... | Instantiate an opus maximum type. |
def wsgi_proxyfix(factory=None):
def create_wsgi(app, **kwargs):
wsgi_app = factory(app, **kwargs) if factory else app.wsgi_app
if app.config.get('WSGI_PROXIES'):
return ProxyFix(wsgi_app, num_proxies=app.config['WSGI_PROXIES'])
return wsgi_app
return create_wsgi | Fix ``REMOTE_ADDR`` based on ``X-Forwarded-For`` headers.
.. note::
You must set ``WSGI_PROXIES`` to the correct number of proxies,
otherwise you application is susceptible to malicious attacks.
.. versionadded:: 1.0.0 |
def fit(self, train_set, test_set):
with tf.Graph().as_default(), tf.Session() as self.tf_session:
self.build_model()
tf.global_variables_initializer().run()
third = self.num_epochs // 3
for i in range(self.num_epochs):
lr_decay = self.lr_decay ** ... | Fit the model to the given data.
:param train_set: training data
:param test_set: test data |
def write(self, filename):
if not filename.endswith(('.mid', '.midi', '.MID', '.MIDI')):
filename = filename + '.mid'
pm = self.to_pretty_midi()
pm.write(filename) | Write the multitrack pianoroll to a MIDI file.
Parameters
----------
filename : str
The name of the MIDI file to which the multitrack pianoroll is
written. |
def _run_parallel_multiprocess(self):
_log.debug("run.parallel.multiprocess.start")
processes = []
ProcRunner.instance = self
for i in range(self._ncores):
self._status.running(i)
proc = multiprocessing.Process(target=ProcRunner.run, args=(i,))
proc.st... | Run processes from queue |
def google_storage_url(self, sat):
filename = sat['scene'] + '.tar.bz'
return url_builder([self.google, sat['sat'], sat['path'], sat['row'], filename]) | Returns a google storage url the contains the scene provided.
:param sat:
Expects an object created by scene_interpreter method
:type sat:
dict
:returns:
(String) The URL to a google storage file |
def stringify(data):
ret = []
for item in data:
if six.PY2 and isinstance(item, str):
item = salt.utils.stringutils.to_unicode(item)
elif not isinstance(item, six.string_types):
item = six.text_type(item)
ret.append(item)
return ret | Given an iterable, returns its items as a list, with any non-string items
converted to unicode strings. |
def get_eventhub_host(self):
for protocol in self.service.settings.data['publish']['protocol_details']:
if protocol['protocol'] == 'grpc':
return protocol['uri'][0:protocol['uri'].index(':')] | returns the publish grpc endpoint for ingestion. |
def edge_cost(self, node_a, node_b):
cost = float('inf')
node_object_a = self.get_node(node_a)
for edge_id in node_object_a['edges']:
edge = self.get_edge(edge_id)
tpl = (node_a, node_b)
if edge['vertices'] == tpl:
cost = edge['cost']
... | Returns the cost of moving between the edge that connects node_a to node_b.
Returns +inf if no such edge exists. |
def get_default_property_values(self, classname):
schema_element = self.get_element_by_class_name(classname)
result = {
property_name: property_descriptor.default
for property_name, property_descriptor in six.iteritems(schema_element.properties)
}
if schema_elemen... | Return a dict with default values for all properties declared on this class. |
def after_loop(self, coro):
if not (inspect.iscoroutinefunction(coro) or inspect.isawaitable(coro)):
raise TypeError('Expected coroutine or awaitable, received {0.__name__!r}.'.format(type(coro)))
self._after_loop = coro | A function that also acts as a decorator to register a coroutine to be
called after the loop finished running.
Parameters
------------
coro: :term:`py:awaitable`
The coroutine to register after the loop finishes.
Raises
-------
TypeError
... |
def add_serviceListener(self, type, listener):
self.remove_service_listener(listener)
self.browsers.append(ServiceBrowser(self, type, listener)) | Adds a listener for a particular service type. This object
will then have its update_record method called when information
arrives for that type. |
def getRequiredNodes(self):
return {nodeShape:len(self.nodeReservations[nodeShape]) for nodeShape in self.nodeShapes} | Returns a dict from node shape to number of nodes required to run the packed jobs. |
def get_status(self):
status = self.get('status')
if status == Report.PASSED:
for sr_name in self._sub_reports:
sr = self._sub_reports[sr_name]
sr_status = sr.get_status()
reason = sr.get('reason')
if sr_status == Report.ERROR:
... | Get the status of the report and its sub-reports.
:rtype: str
:return: report status ('passed', 'failed' or 'error') |
def _lease_owned(self, lease, current_uuid_path):
prev_uuid_path, prev_uuid = lease.metadata
with open(current_uuid_path) as f:
current_uuid = f.read()
return \
current_uuid_path == prev_uuid_path and \
prev_uuid == current_uuid | Checks if the given lease is owned by the prefix whose uuid is in
the given path
Note:
The prefix must be also in the same path it was when it took the
lease
Args:
path (str): Path to the lease
current_uuid_path (str): Path to the uuid to check o... |
def build(obj: Any, *applicators: Callable[..., Any]) -> Any:
if isinstance(obj, BaseChain):
return pipe(obj, copy(), *applicators)
else:
return pipe(obj, *applicators) | Run the provided object through the series of applicator functions.
If ``obj`` is an instances of :class:`~eth.chains.base.BaseChain` the
applicators will be run on a copy of the chain and thus will not mutate the
provided chain instance. |
def _has_sj_index(ref_file):
return (file_exists(os.path.join(ref_file, "sjdbInfo.txt")) and
(file_exists(os.path.join(ref_file, "transcriptInfo.tab")))) | this file won't exist if we can do on the fly splice junction indexing |
def recover(self, requeue=False, cb=None):
args = Writer()
args.write_bit(requeue)
self._recover_cb.append(cb)
self.send_frame(MethodFrame(self.channel_id, 60, 110, args))
self.channel.add_synchronous_cb(self._recv_recover_ok) | Ask server to redeliver all unacknowledged messages. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.