code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def call_ping(*args, **kwargs):
errors = dict()
for dev_id, dev_status in call_blink().items():
if not dev_status['result']:
errors[dev_id] = False
return errors or True | Ping the lamps by issuing a short inversion blink to all available devices.
CLI Example:
.. code-block:: bash
salt '*' hue.ping |
def do_chunked_gzip(infh, outfh, filename):
import gzip
gzfh = gzip.GzipFile('rawlogs', mode='wb', fileobj=outfh)
if infh.closed:
infh = open(infh.name, 'r')
else:
infh.seek(0)
readsize = 0
sys.stdout.write('Gzipping {0}: '.format(filename))
if os.stat(infh.name).st_size:
... | A memory-friendly way of compressing the data. |
def generate_notes(notes):
new_notes = []
for note in notes:
tmp_note = {}
for note_item in notes[note]:
tmp_note[note_item] = notes[note][note_item]
new_notes.append(tmp_note)
return new_notes | Generate the notes list
:param dict notes: A dict of converted notes from the old topology
:return: List of notes for the the topology
:rtype: list |
def delete(self, expected_value=None, return_values=None):
return self.table.layer2.delete_item(self, expected_value,
return_values) | Delete the item from DynamoDB.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
is the name of the attribute and the value is either the value
you are expec... |
def bulk_update_resourcedata(scenario_ids, resource_scenarios,**kwargs):
user_id = kwargs.get('user_id')
res = None
res = {}
net_ids = db.DBSession.query(Scenario.network_id).filter(Scenario.id.in_(scenario_ids)).all()
if len(set(net_ids)) != 1:
raise HydraError("Scenario IDS are not in the ... | Update the data associated with a list of scenarios. |
async def export_wallet(handle: int,
export_config_json: str) -> None:
logger = logging.getLogger(__name__)
logger.debug("export_wallet: >>> handle: %r, export_config_json: %r",
handle,
export_config_json)
if not hasattr(export_wallet, "cb"):
... | Exports opened wallet to the file.
:param handle: wallet handle returned by indy_open_wallet.
:param export_config_json: JSON containing settings for input operation.
{
"path": path of the file that contains exported wallet content
"key": string, Key or passphrase used for wallet exp... |
def get_initial_broks_from_satellites(self):
for satellites in [self.conf.brokers, self.conf.schedulers,
self.conf.pollers, self.conf.reactionners, self.conf.receivers]:
for satellite in satellites:
if not satellite.reachable:
continue
... | Get initial broks from my internal satellite links
:return: None |
def record_span(self, span):
if instana.singletons.agent.can_send() or "INSTANA_TEST" in os.environ:
json_span = None
if span.operation_name in self.registered_spans:
json_span = self.build_registered_span(span)
else:
json_span = self.build_sdk... | Convert the passed BasicSpan into an JsonSpan and
add it to the span queue |
def revisions_diff(self, doc_id, *revisions):
url = '/'.join((self.database_url, '_revs_diff'))
data = {doc_id: list(revisions)}
resp = self.r_session.post(
url,
headers={'Content-Type': 'application/json'},
data=json.dumps(data, cls=self.client.encoder)
... | Returns the differences in the current remote database for the specified
document id and specified list of revision values.
:param str doc_id: Document id to check for revision differences
against.
:param list revisions: List of document revisions values to check
against... |
def _get_verts_and_connect(self, paths):
verts = np.vstack(paths)
gaps = np.add.accumulate(np.array([len(x) for x in paths])) - 1
connect = np.ones(gaps[-1], dtype=bool)
connect[gaps[:-1]] = False
return verts, connect | retrieve vertices and connects from given paths-list |
def uid_something_colon(self, node):
node.op_pos = [
NodeWithPosition(node.uid, (node.first_line, node.first_col))
]
position = (node.body[0].first_line, node.body[0].first_col)
last, first = self.operators[':'].find_previous(position)
node.op_pos.append(NodeWithPosit... | Creates op_pos for node from uid to colon |
def _disjoint_qubits(op1: ops.Operation, op2: ops.Operation) -> bool:
return not set(op1.qubits) & set(op2.qubits) | Returns true only if the operations have qubits in common. |
def check_namespace(namespace_id):
if type(namespace_id) not in [str, unicode]:
return False
if not is_namespace_valid(namespace_id):
return False
return True | Verify that a namespace ID is well-formed
>>> check_namespace(123)
False
>>> check_namespace(None)
False
>>> check_namespace('')
False
>>> check_namespace('abcd')
True
>>> check_namespace('Abcd')
False
>>> check_namespace('a+bcd')
False
>>> check_namespace('.abcd')
... |
def predict_task(self, X, t=0, break_ties="random", **kwargs):
Y_tp = self.predict_task_proba(X, t=t, **kwargs)
Y_tph = self._break_ties(Y_tp, break_ties)
return Y_tph | Predicts int labels for an input X on task t
Args:
X: The input for the predict_task_proba method
t: The task index to predict
Returns:
An n-dim tensor of int predictions for the specified task |
def get_historical_minute_data(self, ticker: str):
start = self._start
stop = self._stop
if len(stop) > 4:
stop = stop[:4]
if len(start) > 4:
start = start[:4]
for year in range(int(start), int(stop) + 1):
beg_time = ('%s0101000000' % year)
... | Request historical 5 minute data from DTN. |
def main(argv=None):
try:
colorama.init()
if argv is None:
argv = sys.argv[1:]
_main(argv)
except RuntimeError as e:
print(colorama.Fore.RED + 'ERROR: ' +
str(e) + colorama.Style.RESET_ALL)
sys.exit(1)
else:
sys.exit(0) | Main entry point when the user runs the `trytravis` command. |
def parse_text_urls(mesg):
rval = []
loc = 0
for match in URLRE.finditer(mesg):
if loc < match.start():
rval.append(Chunk(mesg[loc:match.start()], None))
email = match.group("email")
if email and "mailto" not in email:
mailto = "mailto:{}".format(email)
... | Parse a block of text, splitting it into its url and non-url
components. |
def retrieve(self, *args, **kwargs):
lookup, key = self._lookup(*args, **kwargs)
return lookup[key] | Retrieve the permsission function for the provided things. |
def create_contour_metadata(contour_path):
metadata = {
'title': tr('Earthquake Contour'),
'layer_purpose': layer_purpose_earthquake_contour['key'],
'layer_geometry': layer_geometry_line['key'],
'layer_mode': layer_mode_classified['key'],
'inasafe_fields': {}
}
for co... | Create metadata file for contour layer.
:param contour_path: Path where the contour is located.
:type contour_path: basestring |
def locate(self, point, _verify=True):
r
if _verify:
if self._dimension != 2:
raise NotImplementedError("Only 2D surfaces supported.")
if point.shape != (self._dimension, 1):
point_dimensions = " x ".join(
str(dimension) for dim... | r"""Find a point on the current surface.
Solves for :math:`s` and :math:`t` in :math:`B(s, t) = p`.
This method acts as a (partial) inverse to :meth:`evaluate_cartesian`.
.. warning::
A unique solution is only guaranteed if the current surface is
valid. This code assume... |
def axis_to_data_points(ax, points_axis):
axis_to_data = ax.transAxes + ax.transData.inverted()
return axis_to_data.transform(points_axis) | Map points in axis coordinates to data coordinates.
Uses matplotlib.transform.
Parameters
----------
ax : matplotlib.axis
Axis object from matplotlib.
points_axis : np.array
Points in axis coordinates. |
def search(self, q, **kw):
url = '{base_url}/search/{stream}'.format(**vars(self))
params = {
'q': q,
}
params.update(self.params)
params.update(kw)
response = self.session.get(url, params=params)
response.raise_for_status()
return response.jso... | Search Gnip for given query, returning deserialized response. |
def coerce(self, value):
if isinstance(value, dict):
value = [value]
if not isiterable_notstring(value):
value = [value]
return [coerce_single_instance(self.lookup_field, v) for v in value] | Convert from whatever is given to a list of scalars for the lookup_field. |
def replace_u_start_day(day):
day = day.lstrip('-')
if day == 'uu' or day == '0u':
return '01'
if day == 'u0':
return '10'
return day.replace('u', '0') | Find the earliest legitimate day. |
def generate_modules_cache(self, modules, underlined=None,
task_handle=taskhandle.NullTaskHandle()):
job_set = task_handle.create_jobset(
'Generatig autoimport cache for modules', len(modules))
for modname in modules:
job_set.started_job('Working on... | Generate global name cache for modules listed in `modules` |
def start(name, quiet=False, path=None):
data = _do_names(name, 'start', path=path)
if data and not quiet:
__jid_event__.fire_event(
{'data': data, 'outputter': 'lxc_start'}, 'progress')
return data | Start the named container.
path
path to the container parent
default: /var/lib/lxc (system default)
.. versionadded:: 2015.8.0
.. code-block:: bash
salt-run lxc.start name |
def _set_attributes(self):
config = obj(self._config_dict)
for k, v in self._config_dict.items():
setattr(self, k, getattr(config, k)) | Recursively transforms config dictionaries into instance attrs to make
for easy dot attribute access instead of dictionary access. |
def value(self, new_value):
if self.unit != units.Undefined and new_value.unit != self.unit:
raise AttributeError("%s must be in %s" % (
self.__class__, self.unit))
self._value = new_value | Set the value of this measurement.
Raises:
AttributeError: if the new value isn't of the correct units. |
def setup_logging(namespace):
loglevel = {
0: logging.ERROR,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}.get(namespace.verbosity, logging.DEBUG)
if namespace.verbosity > 1:
logformat = '%(levelname)s csvpandas %(lineno)s %(message)s'
else:
log... | setup global logging |
def interval(coro, interval=1, times=None, loop=None):
assert_corofunction(coro=coro)
times = int(times or 0) or float('inf')
@asyncio.coroutine
def schedule(times, *args, **kw):
while times > 0:
times -= 1
yield from coro(*args, **kw)
yield from asyncio.sleep... | Schedules the execution of a coroutine function every `x` amount of
seconds.
The function returns an `asyncio.Task`, which implements also an
`asyncio.Future` interface, allowing the user to cancel the execution
cycle.
This function can be used as decorator.
Arguments:
coro (coroutine... |
def MessageToDict(message,
including_default_value_fields=False,
preserving_proto_field_name=False):
printer = _Printer(including_default_value_fields,
preserving_proto_field_name)
return printer._MessageToJsonObject(message) | Converts protobuf message to a JSON dictionary.
Args:
message: The protocol buffers message instance to serialize.
including_default_value_fields: If True, singular primitive fields,
repeated fields, and map fields will always be serialized. If
False, only serialize non-empty fields. Singul... |
def get_vendor(self, mac):
data = {
self._SEARCH_F: mac,
self._FORMAT_F: self._VERBOSE_T
}
response = self.__decode_str(self.__call_api(self.__url, data), 'utf-8')
return response | Get vendor company name.
Keyword arguments:
mac -- MAC address or OUI for searching |
def _sinusoid(x, p, L, y):
N = int(len(p)/2)
n = np.linspace(0, N, N+1)
k = n*np.pi/L
func = 0
for n in range(0, N):
func += p[2*n]*np.sin(k[n]*x)+p[2*n+1]*np.cos(k[n]*x)
return func | Return the sinusoid cont func evaluated at input x for the continuum.
Parameters
----------
x: float or np.array
data, input to function
p: ndarray
coefficients of fitting function
L: float
width of x data
y: float or np.array
output data corresponding to input ... |
def make_function_arguments(args,
kwonly,
varargs,
varkwargs,
defaults,
kw_defaults,
annotations):
return ast.arguments(
args=[ast.arg(arg=a... | Make an ast.arguments from the args parsed out of a code object. |
def tokenize(text):
stem = PorterStemmer().stem
tokens = re.finditer('[a-z]+', text.lower())
for offset, match in enumerate(tokens):
unstemmed = match.group(0)
yield {
'stemmed': stem(unstemmed),
'unstemmed': unstemmed,
'offset': offset
... | Yield tokens.
Args:
text (str): The original text.
Yields:
dict: The next token. |
def start_http_server(self, port, host='0.0.0.0', endpoint=None):
if self.should_start_http_server():
pc_start_http_server(port, host, registry=self.registry) | Start an HTTP server for exposing the metrics, if the
`should_start_http_server` function says we should, otherwise just return.
Uses the implementation from `prometheus_client` rather than a Flask app.
:param port: the HTTP port to expose the metrics endpoint on
:param host: the HTTP h... |
def parse_csv_header(line):
units = {}
names = []
for var in line.split(','):
start = var.find('[')
if start < 0:
names.append(str(var))
continue
else:
names.append(str(var[:start]))
end = var.find(']', start)
unitstr = var[start + ... | Parse the CSV header returned by TDS. |
def ledger(self, start=None, end=None):
DEBIT_IN_DB = self._DEBIT_IN_DB()
flip = 1
if self._positive_credit():
flip *= -1
qs = self._entries_range(start=start, end=end)
qs = qs.order_by("transaction__t_stamp", "transaction__tid")
balance = Decimal("0.00")
... | Returns a list of entries for this account.
Ledger returns a sequence of LedgerEntry's matching the criteria
in chronological order. The returned sequence can be boolean-tested
(ie. test that nothing was returned).
If 'start' is given, only entries on or after that datetime are
... |
def find_hass_config():
if "HASSIO_TOKEN" in os.environ:
return "/config"
config_dir = default_hass_config_dir()
if os.path.isdir(config_dir):
return config_dir
raise ValueError(
"Unable to automatically find the location of Home Assistant "
"config. Please pass it in."
... | Try to find HASS config. |
def get_nt_7z_dir ():
try:
import _winreg as winreg
except ImportError:
import winreg
try:
key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r"SOFTWARE\7-Zip")
try:
return winreg.QueryValueEx(key, "Path")[0]
finally:
winreg.CloseKey(key)
exce... | Return 7-Zip directory from registry, or an empty string. |
def get_month(datestring):
convert_written = re.compile(r"jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec", re.IGNORECASE)
month = convert_written.search(datestring)
month_number = None
if month:
month_number = strptime(month.group(), "%b").tm_mon
if month_number < 10:
month_... | Transforms a written month into corresponding month number.
E.g. November -> 11, or May -> 05.
Keyword arguments:
datestring -- a string
Returns:
String, or None if the transformation fails |
def salt_ssh_create_dirs(self):
logger.debug('Creating salt-ssh dirs into: %s', self.settings_dir)
utils.create_dir(os.path.join(self.settings_dir, 'salt'))
utils.create_dir(os.path.join(self.settings_dir, 'pillar'))
utils.create_dir(os.path.join(self.settings_dir, 'etc', 'salt'))
... | Creates the `salt-ssh` required directory structure |
def color(string, status=True, warning=False, bold=True):
attr = []
if status:
attr.append('32')
if warning:
attr.append('31')
if bold:
attr.append('1')
return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), string) | Change text color for the linux terminal, defaults to green.
Set "warning=True" for red. |
def get_url_distribution(self, params=None):
params = params or {}
all_responses = {}
api_name = 'virustotal-url-distribution'
response_chunks = self._request_reports(list(params.keys()), list(params.values()), 'url/distribution')
self._extract_response_chunks(all_responses, resp... | Retrieves a live feed with the latest URLs submitted to VT.
Args:
resources: a dictionary with name and value for optional arguments
Returns:
A dict with the VT report. |
def membuf_tempfile(memfile):
memfile.seek(0, 0)
tmpfd, tmpname = mkstemp(suffix='.rar')
tmpf = os.fdopen(tmpfd, "wb")
try:
while True:
buf = memfile.read(BSIZE)
if not buf:
break
tmpf.write(buf)
tmpf.close()
except:
tmpf.cl... | Write in-memory file object to real file. |
def to_image(self, shape):
if len(shape) != 2:
raise ValueError('input shape must have 2 elements.')
image = np.zeros(shape)
if self.bbox.ixmin < 0 or self.bbox.iymin < 0:
return self._to_image_partial_overlap(image)
try:
image[self.bbox.slices] = self... | Return an image of the mask in a 2D array of the given shape,
taking any edge effects into account.
Parameters
----------
shape : tuple of int
The ``(ny, nx)`` shape of the output array.
Returns
-------
result : `~numpy.ndarray`
A 2D arra... |
def http_reply(self):
data = {
'status': self.status,
'error': self.code.upper(),
'error_description': str(self)
}
if self.error_caught:
data['error_caught'] = pformat(self.error_caught)
if self.error_id:
data['error_id'] = self... | Return a Flask reply object describing this error |
def date_to_datetime(date, fraction=0.0):
day_seconds = (60 * 60 * 24) - 1
total_seconds = int(day_seconds * fraction)
delta = datetime.timedelta(seconds=total_seconds)
time = datetime.time()
dt = datetime.datetime.combine(date, time) + delta
return dt | fraction is how much through the day you are. 0=start of the day, 1=end of the day. |
def filebrowser(request):
try:
fb_url = reverse('fb_browse')
except:
fb_url = reverse('filebrowser:fb_browse')
return HttpResponse(jsmin(render_to_string('tinymce/filebrowser.js',
context={'fb_url': fb_url},
... | JavaScript callback function for `django-filebrowser`_
:param request: Django http request
:type request: django.http.request.HttpRequest
:return: Django http response with filebrowser JavaScript code for for TinyMCE 4
:rtype: django.http.HttpResponse
.. _django-filebrowser: https://github.com/seh... |
def _taskify(func):
if not isinstance(func, _Task):
func = _Task(func)
spec = inspect.getargspec(func.func)
if spec.args:
num_args = len(spec.args)
num_kwargs = len(spec.defaults or [])
isflag = lambda x, y: '' if x.defaults[y] is False else '='
func.args = spec.args[:(num_args - num_kwargs)]
func... | Convert a function into a task. |
def step_prev(self):
window_start = around(self.parent.value('window_start') -
self.parent.value('window_length') /
self.parent.value('window_step'), 2)
if window_start < 0:
return
self.parent.overview.update_position(window... | Go to the previous step. |
def image_summary(predictions, targets, hparams):
del hparams
results = tf.cast(tf.argmax(predictions, axis=-1), tf.uint8)
gold = tf.cast(targets, tf.uint8)
summary1 = tf.summary.image("prediction", results, max_outputs=2)
summary2 = tf.summary.image("data", gold, max_outputs=2)
summary = tf.summary.merge([... | Reshapes predictions and passes it to tensorboard.
Args:
predictions : The predicted image (logits).
targets : The ground truth.
hparams: model hparams.
Returns:
summary_proto: containing the summary images.
weights: A Tensor of zeros of the same shape as predictions. |
def _pp(dict_data):
for key, val in dict_data.items():
print('{0:<11}: {1}'.format(key, val)) | Pretty print. |
def cs_axis_mapping(cls,
part_info,
axes_to_move
):
cs_ports = set()
axis_mapping = {}
for motor_info in cls.filter_values(part_info):
if motor_info.scannable in axes_to_move:
assert motor_info.cs... | Given the motor infos for the parts, filter those with scannable
names in axes_to_move, check they are all in the same CS, and return
the cs_port and mapping of cs_axis to MotorInfo |
def _parseIsTag(self):
el = self._element
self._istag = el and el[0] == "<" and el[-1] == ">" | Detect whether the element is HTML tag or not.
Result is saved to the :attr:`_istag` property. |
def _contiguous_slices(self):
k = j = None
for i in self._sorted():
if k is None:
k = j = i
if i - j > 1:
yield slice(k, j + 1, 1)
k = i
j = i
if k is not None:
yield slice(k, j + 1, 1) | Internal iterator over contiguous slices in RangeSet. |
def get_mount_points():
def decode_path(path):
return path.replace(br"\011", b"\011").replace(br"\040", b"\040").replace(br"\012", b"\012").replace(br"\134", b"\134")
with open("/proc/self/mounts", "rb") as mounts:
for mount in mounts:
source, target, fstype, options, unused1, unused... | Get all current mount points of the system.
Changes to the mount points during iteration may be reflected in the result.
@return a generator of (source, target, fstype, options),
where options is a list of bytes instances, and the others are bytes instances
(this avoids encoding problems with mount poin... |
def minimum_valid_values_in_any_group(df, levels=None, n=1, invalid=np.nan):
df = df.copy()
if levels is None:
if 'Group' in df.columns.names:
levels = [df.columns.names.index('Group')]
if invalid is np.nan:
dfx = ~np.isnan(df)
else:
dfx = df != invalid
dfc = dfx.... | Filter ``DataFrame`` by at least n valid values in at least one group.
Taking a Pandas ``DataFrame`` with a ``MultiIndex`` column index, filters rows to remove
rows where there are less than `n` valid values per group. Groups are defined by the `levels` parameter indexing
into the column index. For example... |
def check(self):
return programs.is_module_installed(self.modname,
self.required_version,
self.installed_version) | Check if dependency is installed |
def _call_brew(cmd, failhard=True):
user = __salt__['file.get_user'](_homebrew_bin())
runas = user if user != __opts__['user'] else None
cmd = '{} {}'.format(salt.utils.path.which('brew'), cmd)
result = __salt__['cmd.run_all'](cmd,
runas=runas,
... | Calls the brew command with the user account of brew |
def describe(value):
if isinstance(value, types.ModuleType):
return describe_file(value)
elif isinstance(value, messages.Field):
return describe_field(value)
elif isinstance(value, messages.Enum):
return describe_enum_value(value)
elif isinstance(value, type):
if issubcla... | Describe any value as a descriptor.
Helper function for describing any object with an appropriate descriptor
object.
Args:
value: Value to describe as a descriptor.
Returns:
Descriptor message class if object is describable as a descriptor, else
None. |
def run_total_dos(self,
sigma=None,
freq_min=None,
freq_max=None,
freq_pitch=None,
use_tetrahedron_method=True):
if self._mesh is None:
msg = "run_mesh has to be done before DOS calculation.... | Calculate total DOS from phonons on sampling mesh.
Parameters
----------
sigma : float, optional
Smearing width for smearing method. Default is None
freq_min, freq_max, freq_pitch : float, optional
Minimum and maximum frequencies in which range DOS is computed
... |
def debug(self, value):
self.__debug = value
if self.__debug:
for _, logger in iteritems(self.logger):
logger.setLevel(logging.DEBUG)
httplib.HTTPConnection.debuglevel = 1
else:
for _, logger in iteritems(self.logger):
logger.se... | Sets the debug status.
:param value: The debug status, True or False.
:type: bool |
def plot_posterior_marginal(self, idx_param=0, res=100, smoothing=0,
range_min=None, range_max=None, label_xaxis=True,
other_plot_args={}, true_model=None
):
res = plt.plot(*self.posterior_marginal(
idx_param, res, smoothing,
range_min, range_max
)... | Plots a marginal of the requested parameter.
:param int idx_param: Index of parameter to be marginalized.
:param int res1: Resolution of of the axis.
:param float smoothing: Standard deviation of the Gaussian kernel
used to smooth; same units as parameter.
:param float range... |
def is_equal_strings_ignore_case(first, second):
if first and second:
return first.upper() == second.upper()
else:
return not (first or second) | The function compares strings ignoring case |
def _init_kws(self):
if 'fmtgo' not in self.kws:
self.kws['fmtgo'] = self.grprdflt.gosubdag.prt_attr['fmt'] + "\n"
if 'fmtgo2' not in self.kws:
self.kws['fmtgo2'] = self.grprdflt.gosubdag.prt_attr['fmt'] + "\n"
if 'fmtgene' not in self.kws:
if 'itemid2name' no... | Fill default values for keyword args, if necessary. |
def get_current_user(self):
from google.appengine.api import users
if _IS_DEVELOPMENT_SERVER:
return users.get_current_user()
else:
from google.appengine.api import oauth
try:
user = oauth.get_current_user()
except oauth.OAuthReques... | Override get_current_user for Google AppEngine
Checks for oauth capable request first, if this fails fall back to standard users API |
def _Insert(cursor, table, values):
precondition.AssertIterableType(values, dict)
if not values:
return
column_names = list(sorted(values[0]))
for value_dict in values:
if set(column_names) != set(value_dict):
raise ValueError("Given value dictionaries must have identical keys. "
... | Inserts one or multiple rows into the given table.
Args:
cursor: The MySQL cursor to perform the insertion.
table: The table name, where rows should be inserted.
values: A list of dicts, associating column names to values. |
def option_hook(self, function):
sig = Signature(function)
if "options" not in sig.arguments:
raise KeyError("option_hook functions must have an argument called"
" 'options', but got {}".format(sig.arguments))
self.option_hooks.append(function)
retu... | Decorator for adding an option hook function.
An option hook is a function that is called right before a run
is created. It receives (and potentially modifies) the options
dictionary. That is, the dictionary of commandline options used for
this run.
.. note::
The de... |
def options(self, new):
options = self._create_options(new)
if self.widget.value:
self.widget.set_param(options=options, value=list(options.values())[:1])
else:
self.widget.options = options
self.widget.value = list(options.values())[:1] | Set options from list, or instance of named item
Over-writes old options |
def validateAQLQuery(self, query, bindVars = None, options = None) :
"returns the server answer is the query is valid. Raises an AQLQueryError if not"
if bindVars is None :
bindVars = {}
if options is None :
options = {}
payload = {'query' : query, 'bindVars' : bi... | returns the server answer is the query is valid. Raises an AQLQueryError if not |
def extensions(self):
_tmp_extensions = self.mimes.encodings_map.keys() + \
self.mimes.suffix_map.keys() + \
self.mimes.types_map[1].keys() + \
cfg['CFG_BIBDOCFILE_ADDITIONAL_KNOWN_FILE_EXTENSIONS']
extensions = []
for ext in _tmp_extensions:
if ex... | Generate the regular expression to match all the known extensions.
@return: the regular expression.
@rtype: regular expression object |
def load(self, instance, xblock):
if djpyfs:
return djpyfs.get_filesystem(scope_key(instance, xblock))
else:
raise NotImplementedError("djpyfs not available") | Get the filesystem for the field specified in 'instance' and the
xblock in 'xblock' It is locally scoped. |
def recipe_create(backend, kitchen, name):
err_str, use_kitchen = Backend.get_kitchen_from_user(kitchen)
if use_kitchen is None:
raise click.ClickException(err_str)
click.secho("%s - Creating Recipe %s for Kitchen '%s'" % (get_datetime(), name, use_kitchen), fg='green')
check_and_print(DKCloudCo... | Create a new Recipe |
def writeBinaryItemContainer(filelike, binaryItemContainer, compress=True):
allMetadata = dict()
binarydatafile = io.BytesIO()
for index, binaryItem in enumerate(viewvalues(binaryItemContainer)):
metadataList = _dumpArrayDictToFile(binarydatafile, binaryItem.arrays)
allMetadata[index] = [bin... | Serializes the binaryItems contained in binaryItemContainer and writes
them into a zipfile archive.
Examples of binaryItem classes are :class:`maspy.core.Ci` and
:class:`maspy.core.Sai`. A binaryItem class has to define the function
``_reprJSON()`` which returns a JSON formated string representation of... |
def describe_topic_rule(ruleName,
region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
rule = conn.get_topic_rule(ruleName=ruleName)
if rule and 'rule' in rule:
rule = rule['rule']
k... | Given a topic rule name describe its properties.
Returns a dictionary of interesting properties.
CLI Example:
.. code-block:: bash
salt myminion boto_iot.describe_topic_rule myrule |
def create_pars_from_dict(name, pars_dict, rescale=True, update_bounds=False):
o = get_function_defaults(name)
pars_dict = pars_dict.copy()
for k in o.keys():
if not k in pars_dict:
continue
v = pars_dict[k]
if not isinstance(v, dict):
v = {'name': k, 'value':... | Create a dictionary for the parameters of a function.
Parameters
----------
name : str
Name of the function.
pars_dict : dict
Existing parameter dict that will be merged with the
default dictionary created by this method.
rescale : bool
Rescale parameter values... |
def get_changes_since(self, change_number, app_changes=True, package_changes=False):
return self.send_job_and_wait(MsgProto(EMsg.ClientPICSChangesSinceRequest),
{
'since_change_number': change_number,
... | Get changes since a change number
:param change_number: change number to use as stating point
:type change_number: :class:`int`
:param app_changes: whether to inclued app changes
:type app_changes: :class:`bool`
:param package_changes: whether to inclued package changes
... |
def _dy(self):
min_y = max_y = self._start_y
for drawing_operation in self:
if hasattr(drawing_operation, 'y'):
min_y = min(min_y, drawing_operation.y)
max_y = max(max_y, drawing_operation.y)
return max_y - min_y | Return integer height of this shape's path in local units. |
def merge_ownership_periods(mappings):
return valmap(
lambda v: tuple(
OwnershipPeriod(
a.start,
b.start,
a.sid,
a.value,
) for a, b in sliding_window(
2,
concatv(
sort... | Given a dict of mappings where the values are lists of
OwnershipPeriod objects, returns a dict with the same structure with
new OwnershipPeriod objects adjusted so that the periods have no
gaps.
Orders the periods chronologically, and pushes forward the end date
of each period to match the start da... |
def _combine_attr_fast_update(self, attr, typ):
values = dict(getattr(self, attr, {}))
for base in self._class_data.bases:
vals = dict(getattr(base, attr, {}))
preserve_attr_data(vals, values)
values = combine(vals, values)
setattr(self, attr, typ(values)) | Avoids having to call _update for each intermediate base. Only
works for class attr of type UpdateDict. |
def draw_image(self, ax, image):
self.renderer.draw_image(imdata=utils.image_to_base64(image),
extent=image.get_extent(),
coordinates="data",
style={"alpha": image.get_alpha(),
... | Process a matplotlib image object and call renderer.draw_image |
def add_styles(self, **styles):
for stylename in sorted(styles):
self._doc.styles.addElement(styles[stylename]) | Add ODF styles to the current document. |
def mogrify(self, sql, params):
conn = self.engine.raw_connection()
cursor = conn.cursor()
return cursor.mogrify(sql, params) | Return the query string with parameters added |
def _score_macro_average(self, n_classes):
all_fpr = np.unique(np.concatenate([self.fpr[i] for i in range(n_classes)]))
avg_tpr = np.zeros_like(all_fpr)
for i in range(n_classes):
avg_tpr += interp(all_fpr, self.fpr[i], self.tpr[i])
avg_tpr /= n_classes
self.fpr[MACRO... | Compute the macro average scores for the ROCAUC curves. |
def readBuffer(self, newLength):
result = Buffer(self.buf, self.offset, newLength)
self.skip(newLength)
return result | Read next chunk as another buffer. |
def get_chart(chart_type, time_span=None, rolling_average=None, api_code=None):
resource = 'charts/' + chart_type + '?format=json'
if time_span is not None:
resource += '×pan=' + time_span
if rolling_average is not None:
resource += '&rollingAverage=' + rolling_average
if api_code i... | Get chart data of a specific chart type.
:param str chart_type: type of chart
:param str time_span: duration of the chart.
Default is 1 year for most charts, 1 week for mempool charts (optional)
(Example: 5weeks)
:param str rolling_average: duration over which the data should be averaged (optional)... |
def listflat(path, ext=None):
if os.path.isdir(path):
if ext:
if ext == 'tif' or ext == 'tiff':
files = glob.glob(os.path.join(path, '*.tif'))
files = files + glob.glob(os.path.join(path, '*.tiff'))
else:
files = glob.glob(os.path.join(... | List files without recursion |
def recover(self, requeue=False):
if not isinstance(requeue, bool):
raise AMQPInvalidArgument('requeue should be a boolean')
recover_frame = specification.Basic.Recover(requeue=requeue)
return self._channel.rpc_request(recover_frame) | Redeliver unacknowledged messages.
:param bool requeue: Re-queue the messages
:raises AMQPInvalidArgument: Invalid Parameters
:raises AMQPChannelError: Raises if the channel encountered an error.
:raises AMQPConnectionError: Raises if the connection
... |
def _needs_region_update(out_file, samples):
nblock_files = [x["regions"]["nblock"] for x in samples if "regions" in x]
for nblock_file in nblock_files:
test_old = nblock_file.replace("-nblocks", "-analysisblocks")
if os.path.exists(test_old):
return False
for noblock_file in nbl... | Check if we need to update BED file of regions, supporting back compatibility. |
def status(self, **kwargs):
path = '/geo_nodes/%s/status' % self.get_id()
return self.manager.gitlab.http_get(path, **kwargs) | Get the status of the geo node.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabGetError: If the server failed to perform the request
Returns:
dict: The st... |
def get_build_platform():
from sysconfig import get_platform
plat = get_platform()
if sys.platform == "darwin" and not plat.startswith('macosx-'):
try:
version = _macosx_vers()
machine = os.uname()[4].replace(" ", "_")
return "macosx-%d.%d-%s" % (
... | Return this platform's string for platform-specific distributions
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
needs some hacks for Linux and Mac OS X. |
def min(self, expr, extra_constraints=(), solver=None, model_callback=None):
if self._solver_required and solver is None:
raise BackendError("%s requires a solver for evaluation" % self.__class__.__name__)
return self._min(self.convert(expr), extra_constraints=self.convert_list(extra_constra... | Return the minimum value of `expr`.
:param expr: expression (an AST) to evaluate
:param solver: a solver object, native to the backend, to assist in
the evaluation (for example, a z3.Solver)
:param extra_constraints: extra constraints (as ASTs) to add to the solver for th... |
def load_mldataset(filename):
user = []
item = []
score = []
with open(filename) as f:
for line in f:
tks = line.strip().split('\t')
if len(tks) != 4:
continue
user.append(int(tks[0]))
item.append(int(tks[1]))
score.appe... | Not particularly fast code to parse the text file and load it into three NDArray's
and product an NDArrayIter |
def showAddColumnDialog(self, triggered):
if triggered:
dialog = AddAttributesDialog(self)
dialog.accepted.connect(self.addColumn)
dialog.rejected.connect(self.uncheckButton)
dialog.show() | Display the dialog to add a column to the model.
This method is also a slot.
Args:
triggered (bool): If the corresponding button was
activated, the dialog will be created and shown. |
def _extend_settings(settings, configurator_config, prefix=None):
for key in configurator_config:
settings_key = '.'.join([prefix, key]) if prefix else key
if hasattr(configurator_config[key], 'keys') and\
hasattr(configurator_config[key], '__getitem__'):
_extend_settings... | Extend settings dictionary with content of yaml's configurator key.
.. note::
This methods changes multilayered subkeys defined
within **configurator** into dotted keys in settings dictionary:
.. code-block:: yaml
configurator:
sqlalchemy:
... |
def cfg_lldp_interface(self, protocol_interface, phy_interface=None):
if phy_interface is None:
phy_interface = protocol_interface
self.create_attr_obj(protocol_interface, phy_interface)
ret = self.pub_lldp.enable_lldp(protocol_interface)
attr_obj = self.get_attr_obj(protocol... | Cfg LLDP on interface and create object. |
def load_data(self, table_name, obj, database=None, **kwargs):
_database = self.db_name
self.set_database(database)
self.con.load_table(table_name, obj, **kwargs)
self.set_database(_database) | Wraps the LOAD DATA DDL statement. Loads data into an MapD table by
physically moving data files.
Parameters
----------
table_name : string
obj: pandas.DataFrame or pyarrow.Table
database : string, default None (optional) |
def get_induced_subhypergraph(self, nodes):
sub_H = self.copy()
sub_H.remove_nodes(sub_H.get_node_set() - set(nodes))
return sub_H | Gives a new hypergraph that is the subhypergraph of the current
hypergraph induced by the provided set of nodes. That is, the induced
subhypergraph's node set corresponds precisely to the nodes provided,
and the coressponding hyperedges in the subhypergraph are only those
from the origin... |
def _get_args_to_parse(args, sys_argv):
arguments = args if args is not None else sys_argv[1:]
_LOG.debug("Parsing arguments: %s", arguments)
return arguments | Return the given arguments if it is not None else sys.argv if it contains
something, an empty list otherwise.
Args:
args: argument to be parsed
sys_argv: arguments of the command line i.e. sys.argv |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.