code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def validate(self, value):
cast_callback = self.cast_callback if self.cast_callback else self.cast_type
try:
return value if isinstance(value, self.cast_type) else cast_callback(value)
except Exception:
raise NodeTypeError('Invalid value `{}` for {}.'.format(value, self.cast_type)) | Base validation method. Check if type is valid, or try brute casting.
Args:
value (object): A value for validation.
Returns:
Base_type instance.
Raises:
SchemaError, if validation or type casting fails. |
def connect_entry_signals():
post_save.connect(
ping_directories_handler, sender=Entry,
dispatch_uid=ENTRY_PS_PING_DIRECTORIES)
post_save.connect(
ping_external_urls_handler, sender=Entry,
dispatch_uid=ENTRY_PS_PING_EXTERNAL_URLS)
post_save.connect(
flush_similar_cache_handler, sender=Entry,
dispatch_uid=ENTRY_PS_FLUSH_SIMILAR_CACHE)
post_delete.connect(
flush_similar_cache_handler, sender=Entry,
dispatch_uid=ENTRY_PD_FLUSH_SIMILAR_CACHE) | Connect all the signals on Entry model. |
def update_idxs(self):
"set root idx highest, tip idxs lowest ordered as ladderized"
idx = self.ttree.nnodes - 1
for node in self.ttree.treenode.traverse("levelorder"):
if not node.is_leaf():
node.add_feature("idx", idx)
if not node.name:
node.name = str(idx)
idx -= 1
for node in self.ttree.treenode.get_leaves():
node.add_feature("idx", idx)
if not node.name:
node.name = str(idx)
idx -= 1 | set root idx highest, tip idxs lowest ordered as ladderized |
def updateIncomeProcess(self):
if self.cycles == 0:
tax_rate = (self.IncUnemp*self.UnempPrb)/((1.0-self.UnempPrb)*self.IndL)
TranShkDstn = deepcopy(approxMeanOneLognormal(self.TranShkCount,sigma=self.TranShkStd[0],tail_N=0))
TranShkDstn[0] = np.insert(TranShkDstn[0]*(1.0-self.UnempPrb),0,self.UnempPrb)
TranShkDstn[1] = np.insert(TranShkDstn[1]*(1.0-tax_rate)*self.IndL,0,self.IncUnemp)
PermShkDstn = approxMeanOneLognormal(self.PermShkCount,sigma=self.PermShkStd[0],tail_N=0)
self.IncomeDstn = [combineIndepDstns(PermShkDstn,TranShkDstn)]
self.TranShkDstn = TranShkDstn
self.PermShkDstn = PermShkDstn
self.addToTimeVary('IncomeDstn')
else:
EstimationAgentClass.updateIncomeProcess(self) | An alternative method for constructing the income process in the infinite horizon model.
Parameters
----------
none
Returns
-------
none |
def _load_resources(self):
for resource in self.RESOURCES:
if isinstance(resource, goldman.ModelsResource):
route = '/%s' % resource.rtype
elif isinstance(resource, goldman.ModelResource):
route = '/%s/{rid}' % resource.rtype
elif isinstance(resource, goldman.RelatedResource):
route = '/%s/{rid}/{related}' % resource.rtype
else:
raise TypeError('unsupported resource type')
self.add_route(*(route, resource)) | Load all the native goldman resources.
The route or API endpoint will be automatically determined
based on the resource object instance passed in.
INFO: Only our Model based resources are supported when
auto-generating API endpoints. |
def min_row_dist_sum_idx(dists):
row_sums = np.apply_along_axis(arr=dists, axis=0, func1d=np.sum)
return row_sums.argmin() | Find the index of the row with the minimum row distance sum
This should return the index of the row index with the least distance overall
to all other rows.
Args:
dists (np.array): must be square distance matrix
Returns:
int: index of row with min dist row sum |
def _generate_tokens(self, text):
for index, tok in enumerate(tokenize.generate_tokens(io.StringIO(text).readline)):
tok_type, tok_str, start, end, line = tok
yield Token(tok_type, tok_str, start, end, line, index,
self._line_numbers.line_to_offset(start[0], start[1]),
self._line_numbers.line_to_offset(end[0], end[1])) | Generates tokens for the given code. |
def like_button_js_tag(context):
if FACEBOOK_APP_ID is None:
log.warning("FACEBOOK_APP_ID isn't setup correctly in your settings")
if FACEBOOK_APP_ID:
request = context.get('request', None)
if request:
return {"LIKE_BUTTON_IS_VALID": True,
"facebook_app_id": FACEBOOK_APP_ID,
"channel_base_url": request.get_host()}
return {"LIKE_BUTTON_IS_VALID": False} | This tag will check to see if they have the FACEBOOK_LIKE_APP_ID setup
correctly in the django settings, if so then it will pass the data
along to the intercom_tag template to be displayed.
If something isn't perfect we will return False, which will then not
install the javascript since it isn't needed. |
def _subtract(summary, o):
found = False
row = [_repr(o), 1, _getsizeof(o)]
for r in summary:
if r[0] == row[0]:
(r[1], r[2]) = (r[1] - row[1], r[2] - row[2])
found = True
if not found:
summary.append([row[0], -row[1], -row[2]])
return summary | Remove object o from the summary by subtracting it's size. |
def _schedule_ad(self, delay=None, response_future=None):
if not self.running:
return
if delay is None:
delay = self.interval_secs
delay += random.uniform(0, self.interval_max_jitter_secs)
self._next_ad = self.io_loop.call_later(delay, self._ad,
response_future) | Schedules an ``ad`` request.
:param delay:
Time in seconds to wait before making the ``ad`` request. Defaults
to self.interval_secs. Regardless of value, a jitter of
self.interval_max_jitter_secs is applied to this.
:param response_future:
If non-None, the result of the advertise request is filled into
this future. |
def has_child_objective_banks(self, objective_bank_id):
if self._catalog_session is not None:
return self._catalog_session.has_child_catalogs(catalog_id=objective_bank_id)
return self._hierarchy_session.has_children(id_=objective_bank_id) | Tests if an objective bank has any children.
arg: objective_bank_id (osid.id.Id): the ``Id`` of an
objective bank
return: (boolean) - ``true`` if the ``objective_bank_id`` has
children, ``false`` otherwise
raise: NotFound - ``objective_bank_id`` is not found
raise: NullArgument - ``objective_bank_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.* |
def safe_mongocall(call):
def _safe_mongocall(*args, **kwargs):
for i in range(4):
try:
return call(*args, **kwargs)
except pymongo.errors.AutoReconnect:
print ('AutoReconnecting, try %d' % i)
time.sleep(pow(2, i))
return call(*args, **kwargs)
return _safe_mongocall | Decorator for automatic handling of AutoReconnect-exceptions. |
def _get_interfaces(self):
ios_cfg = self._get_running_config()
parse = HTParser(ios_cfg)
itfcs_raw = parse.find_lines("^interface GigabitEthernet")
itfcs = [raw_if.strip().split(' ')[1] for raw_if in itfcs_raw]
LOG.debug("Interfaces on hosting device: %s", itfcs)
return itfcs | Get a list of interfaces on this hosting device.
:return: List of the interfaces |
def executable_script(src_file, gallery_conf):
filename_pattern = gallery_conf.get('filename_pattern')
execute = re.search(filename_pattern, src_file) and gallery_conf[
'plot_gallery']
return execute | Validate if script has to be run according to gallery configuration
Parameters
----------
src_file : str
path to python script
gallery_conf : dict
Contains the configuration of Sphinx-Gallery
Returns
-------
bool
True if script has to be executed |
def delete_taskset(self, taskset_id):
s = self.restore_taskset(taskset_id)
if s:
s.delete() | Delete a saved taskset result. |
def update_GUI_with_new_interpretation(self):
self.update_fit_bounds_and_statistics()
self.draw_interpretations()
self.calculate_high_levels_data()
self.plot_high_levels_data() | update statistics boxes and figures with a new interpretatiom when
selecting new temperature bound |
def get_auto_allocated_topology(self, project_id, **_params):
return self.get(
self.auto_allocated_topology_path % project_id,
params=_params) | Fetch information about a project's auto-allocated topology. |
def load_mode(node):
obs_mode = ObservingMode()
obs_mode.__dict__.update(node)
load_mode_validator(obs_mode, node)
load_mode_builder(obs_mode, node)
load_mode_tagger(obs_mode, node)
return obs_mode | Load one observing mdode |
def createConnection(self):
connection = RemoteCardConnection(self.readerobj.createConnection())
daemon = PyroDaemon.PyroDaemon()
uri = daemon.connect(connection)
return uri | Return a card connection thru the reader. |
def create_dispatcher(self):
before_context = max(self.args.before_context, self.args.context)
after_context = max(self.args.after_context, self.args.context)
if self.args.files_with_match is not None or self.args.count or self.args.only_matching or self.args.quiet:
return UnbufferedDispatcher(self._channels)
elif before_context == 0 and after_context == 0:
return UnbufferedDispatcher(self._channels)
elif self.args.thread:
return ThreadedDispatcher(self._channels, before_context, after_context)
else:
return LineBufferDispatcher(self._channels, before_context, after_context) | Return a dispatcher for configured channels. |
def async_call(func, *args, callback=None):
def do_call():
result = None
error = None
try:
result = func(*args)
except Exception:
error = traceback.format_exc()
logger.error(error)
if callback:
GLib.idle_add(callback, result, error)
thread = threading.Thread(target=do_call)
thread.daemon = True
thread.start() | Call `func` in background thread, and then call `callback` in Gtk main thread.
If error occurs in `func`, error will keep the traceback and passed to
`callback` as second parameter. Always check `error` is not None. |
def users_text(self):
if self._users_text is None:
self.chain.connection.log("Getting connected users text")
self._users_text = self.driver.get_users_text()
if self._users_text:
self.chain.connection.log("Users text collected")
else:
self.chain.connection.log("Users text not collected")
return self._users_text | Return connected users information and collect if not available. |
def export_args(subparsers):
export_parser = subparsers.add_parser('export')
export_parser.add_argument('directory',
help='Path where secrets will be exported into')
secretfile_args(export_parser)
vars_args(export_parser)
base_args(export_parser) | Add command line options for the export operation |
def add_annotation(self, description, **attrs):
at = attributes.Attributes(attrs)
self.add_time_event(time_event_module.TimeEvent(datetime.utcnow(),
time_event_module.Annotation(description, at))) | Add an annotation to span.
:type description: str
:param description: A user-supplied message describing the event.
The maximum length for the description is 256 bytes.
:type attrs: kwargs
:param attrs: keyworded arguments e.g. failed=True, name='Caching' |
def filter_keys_by_dataset_id(did, key_container):
keys = iter(key_container)
for key in DATASET_KEYS:
if getattr(did, key) is not None:
if key == "wavelength":
keys = [k for k in keys
if (getattr(k, key) is not None and
DatasetID.wavelength_match(getattr(k, key),
getattr(did, key)))]
else:
keys = [k for k in keys
if getattr(k, key) is not None and getattr(k, key)
== getattr(did, key)]
return keys | Filer provided key iterable by the provided `DatasetID`.
Note: The `modifiers` attribute of `did` should be `None` to allow for
**any** modifier in the results.
Args:
did (DatasetID): Query parameters to match in the `key_container`.
key_container (iterable): Set, list, tuple, or dict of `DatasetID`
keys.
Returns (list): List of keys matching the provided parameters in no
specific order. |
def _get_function_id(self):
if self.is_for_driver_task:
return ray.FunctionID.nil()
function_id_hash = hashlib.sha1()
function_id_hash.update(self.module_name.encode("ascii"))
function_id_hash.update(self.function_name.encode("ascii"))
function_id_hash.update(self.class_name.encode("ascii"))
function_id_hash.update(self._function_source_hash)
function_id = function_id_hash.digest()
return ray.FunctionID(function_id) | Calculate the function id of current function descriptor.
This function id is calculated from all the fields of function
descriptor.
Returns:
ray.ObjectID to represent the function descriptor. |
def get_palette(samples, options, return_mask=False, kmeans_iter=40):
if not options.quiet:
print(' getting palette...')
bg_color = get_bg_color(samples, 6)
fg_mask = get_fg_mask(bg_color, samples, options)
centers, _ = kmeans(samples[fg_mask].astype(np.float32),
options.num_colors-1,
iter=kmeans_iter)
palette = np.vstack((bg_color, centers)).astype(np.uint8)
if not return_mask:
return palette
else:
return palette, fg_mask | Extract the palette for the set of sampled RGB values. The first
palette entry is always the background color; the rest are determined
from foreground pixels by running K-means clustering. Returns the
palette, as well as a mask corresponding to the foreground pixels. |
def run(self):
try:
self._connect()
self._register()
while True:
try:
body = self.command_queue.get(block=True, timeout=1 * SECOND)
except queue.Empty:
body = None
if body is not None:
result = self._send(body)
if result:
self.command_queue.task_done()
else:
self._disconnect()
self._connect()
self._register()
if self._stop_event.is_set():
logger.debug("CoreAgentSocket thread stopping.")
break
except Exception:
logger.debug("CoreAgentSocket thread exception.")
finally:
self._started_event.clear()
self._stop_event.clear()
self._stopped_event.set()
logger.debug("CoreAgentSocket thread stopped.") | Called by the threading system |
def get_permissions(self):
permissions = set()
permissions.update(self.resource_manager.get_permissions())
for f in self.resource_manager.filters:
permissions.update(f.get_permissions())
for a in self.resource_manager.actions:
permissions.update(a.get_permissions())
return permissions | get permissions needed by this policy |
def authorized_connect_apps(self):
if self._authorized_connect_apps is None:
self._authorized_connect_apps = AuthorizedConnectAppList(
self._version,
account_sid=self._solution['sid'],
)
return self._authorized_connect_apps | Access the authorized_connect_apps
:returns: twilio.rest.api.v2010.account.authorized_connect_app.AuthorizedConnectAppList
:rtype: twilio.rest.api.v2010.account.authorized_connect_app.AuthorizedConnectAppList |
def check_call(self, cmd):
ret, _ = self._call(cmd, False)
if ret != 0:
raise RemoteCommandFailure(command=cmd, ret=ret) | Calls a command through SSH. |
def last_year(date_):
day = 28 if date_.day == 29 and date_.month == 2 else date_.day
return datetime.date(date_.year-1, date_.month, day) | Returns the same date 1 year ago.
Args:
date (datetime or datetime.date)
Returns:
(datetime or datetime.date)
Raises:
- |
def get_uniquely_named_objects_by_name(object_list):
if not object_list:
return dict()
result = dict()
for obj in object_list:
name = obj.name.value
if name in result:
raise GraphQLCompilationError(u'Found duplicate object key: '
u'{} {}'.format(name, object_list))
result[name] = obj
return result | Return dict of name -> object pairs from a list of objects with unique names.
Args:
object_list: list of objects, each X of which has a unique name accessible as X.name.value
Returns:
dict, { X.name.value: X for x in object_list }
If the list is empty or None, returns an empty dict. |
def run(self):
self.exc = None
try:
self.task()
except BaseException:
self.exc = sys.exc_info() | Overwrites `threading.Thread.run`, to allow handling of exceptions thrown by threads
from within the main app. |
async def untilTrue(condition, *args, timeout=5) -> bool:
result = False
start = time.perf_counter()
elapsed = 0
while elapsed < timeout:
result = condition(*args)
if result:
break
await asyncio.sleep(.1)
elapsed = time.perf_counter() - start
return result | Keep checking the condition till it is true or a timeout is reached
:param condition: the condition to check (a function that returns bool)
:param args: the arguments to the condition
:return: True if the condition is met in the given timeout, False otherwise |
def get(self, mac):
data = {
self._FORMAT_F: 'json',
self._SEARCH_F: mac
}
response = self.__decode_str(self.__call_api(self.__url, data), 'utf-8')
if len(response) > 0:
return self.__parse(response)
raise EmptyResponseException() | Get data from API as instance of ResponseModel.
Keyword arguments:
mac -- MAC address or OUI for searching |
def show_message(self, message, duration=2500):
self.setText(message)
self.__duration = duration
self.__set_position()
if message:
self.__fade_in()
else:
self.__fade_out()
return True | Shows given message.
:param message: Message.
:type message: unicode
:param duration: Notification duration in milliseconds.
:type duration: int
:return: Method success.
:rtype: bool |
def insert(self, data):
if not data:
return
data = self._compress_by_md5(data)
name = self.key + str(int(data[0:2], 16) % self.block_num)
for h in self.hash_function:
local_hash = h.hash(data)
self.server.setbit(name, local_hash, 1) | Insert 1 into each bit by local_hash |
def _getsie(self):
try:
value, newpos = self._readsie(0)
if value is None or newpos != self.len:
raise ReadError
except ReadError:
raise InterpretError("Bitstring is not a single interleaved exponential-Golomb code.")
return value | Return data as signed interleaved exponential-Golomb code.
Raises InterpretError if bitstring is not a single exponential-Golomb code. |
def save(self):
with open(self.path, "w") as f:
self.config["project_templates"] = list(filter(lambda template: template.get("url"), self.config["project_templates"]))
yaml.dump(self.config, f, default_flow_style=False) | Save settings. |
def get_doc(project, source_code, offset, resource=None, maxfixes=1):
fixer = fixsyntax.FixSyntax(project, source_code, resource, maxfixes)
pyname = fixer.pyname_at(offset)
if pyname is None:
return None
pyobject = pyname.get_object()
return PyDocExtractor().get_doc(pyobject) | Get the pydoc |
def update_trial_stats(self, trial, result):
assert trial in self._live_trials
assert self._get_result_time(result) >= 0
delta = self._get_result_time(result) - \
self._get_result_time(self._live_trials[trial])
assert delta >= 0
self._completed_progress += delta
self._live_trials[trial] = result | Update result for trial. Called after trial has finished
an iteration - will decrement iteration count.
TODO(rliaw): The other alternative is to keep the trials
in and make sure they're not set as pending later. |
def convert(self, value, view):
if isinstance(value, BASESTRING):
if self.pattern and not self.regex.match(value):
self.fail(
u"must match the pattern {0}".format(self.pattern),
view
)
return value
else:
self.fail(u'must be a string', view, True) | Check that the value is a string and matches the pattern. |
def download_file_insecure(url, target):
src = urlopen(url)
try:
data = src.read()
finally:
src.close()
with open(target, "wb") as dst:
dst.write(data) | Use Python to download the file, without connection authentication. |
def memberships_assignable(self, group, include=None):
return self._get(self._build_url(self.endpoint.memberships_assignable(id=group, include=include))) | Return memberships that are assignable for this group.
:param include: list of objects to sideload. `Side-loading API Docs
<https://developer.zendesk.com/rest_api/docs/core/side_loading>`__.
:param group: Group object or id |
def _auth_req_callback_func(self, context, internal_request):
state = context.state
state[STATE_KEY] = {"requester": internal_request.requester}
try:
state_dict = context.state[consent.STATE_KEY]
except KeyError:
state_dict = context.state[consent.STATE_KEY] = {}
finally:
state_dict.update({
"filter": internal_request.attributes or [],
"requester_name": internal_request.requester_name,
})
satosa_logging(logger, logging.INFO,
"Requesting provider: {}".format(internal_request.requester), state)
if self.request_micro_services:
return self.request_micro_services[0].process(context, internal_request)
return self._auth_req_finish(context, internal_request) | This function is called by a frontend module when an authorization request has been
processed.
:type context: satosa.context.Context
:type internal_request: satosa.internal.InternalData
:rtype: satosa.response.Response
:param context: The request context
:param internal_request: request processed by the frontend
:return: response |
def cg(output,
show,
verbose,
classname,
methodname,
descriptor,
accessflag,
no_isolated,
apk):
androcg_main(verbose=verbose,
APK=apk,
classname=classname,
methodname=methodname,
descriptor=descriptor,
accessflag=accessflag,
no_isolated=no_isolated,
show=show,
output=output) | Create a call graph and export it into a graph format.
classnames are found in the type "Lfoo/bar/bla;".
Example:
\b
$ androguard cg APK |
def get_aliases(self, includename=True):
alias_quanta = self.get(self._KEYS.ALIAS, [])
aliases = [aq[QUANTITY.VALUE] for aq in alias_quanta]
if includename and self[self._KEYS.NAME] not in aliases:
aliases = [self[self._KEYS.NAME]] + aliases
return aliases | Retrieve the aliases of this object as a list of strings.
Arguments
---------
includename : bool
Include the 'name' parameter in the list of aliases. |
def child_url_record(self, url: str, inline: bool=False,
link_type: Optional[LinkType]=None,
post_data: Optional[str]=None,
level: Optional[int]=None):
url_record = URLRecord()
url_record.url = url
url_record.status = Status.todo
url_record.try_count = 0
url_record.level = self.url_record.level + 1 if level is None else level
url_record.root_url = self.url_record.root_url or self.url_record.url
url_record.parent_url = self.url_record.url
url_record.inline_level = (self.url_record.inline_level or 0) + 1 if inline else 0
url_record.link_type = link_type
url_record.post_data = post_data
return url_record | Return a child URLRecord.
This function is useful for testing filters before adding to table. |
def delete(self):
if not self.email_enabled:
raise EmailNotEnabledError("See settings.EMAIL_ENABLED")
return requests.delete(
f"{self.api_url}/{self.address}", auth=("api", self.api_key)
) | Returns a response after attempting to delete the list. |
def _start_remaining_containers(self, containers_remaining, tool_d):
s_containers = []
f_containers = []
for container in containers_remaining:
s_containers, f_containers = self._start_container(container,
tool_d,
s_containers,
f_containers)
return (s_containers, f_containers) | Select remaining containers that didn't have priorities to start |
def find_blocked_biomass_precursors(reaction, model):
LOGGER.debug("Finding blocked biomass precursors")
precursors = find_biomass_precursors(model, reaction)
blocked_precursors = list()
_, ub = helpers.find_bounds(model)
for precursor in precursors:
with model:
dm_rxn = model.add_boundary(
precursor,
type="safe-demand",
reaction_id="safe_demand",
lb=0,
ub=ub
)
flux = helpers.run_fba(model, dm_rxn.id, direction='max')
if np.isnan(flux) or abs(flux) < 1E-08:
blocked_precursors.append(precursor)
return blocked_precursors | Return a list of all biomass precursors that cannot be produced.
Parameters
----------
reaction : cobra.core.reaction.Reaction
The biomass reaction of the model under investigation.
model : cobra.Model
The metabolic model under investigation.
Returns
-------
list
Metabolite objects that are reactants of the biomass reaction excluding
ATP and H2O that cannot be produced by flux balance analysis. |
def get_vars(self):
if self.method() != 'GET':
raise RuntimeError('Unable to return get vars for non-get method')
re_search = WWebRequestProto.get_vars_re.search(self.path())
if re_search is not None:
return urllib.parse.parse_qs(re_search.group(1), keep_blank_values=1) | Parse request path and return GET-vars
:return: None or dictionary of names and tuples of values |
def launch_keyword_wizard(self):
if self.iface.activeLayer() != self.output_layer:
return
keyword_wizard = WizardDialog(
self.iface.mainWindow(), self.iface, self.dock_widget)
keyword_wizard.set_keywords_creation_mode(self.output_layer)
keyword_wizard.exec_() | Launch keyword creation wizard. |
def gen_colors(img):
color_cmd = ColorThief(img).get_palette
for i in range(0, 10, 1):
raw_colors = color_cmd(color_count=8 + i)
if len(raw_colors) >= 8:
break
elif i == 10:
logging.error("ColorThief couldn't generate a suitable palette.")
sys.exit(1)
else:
logging.warning("ColorThief couldn't generate a palette.")
logging.warning("Trying a larger palette size %s", 8 + i)
return [util.rgb_to_hex(color) for color in raw_colors] | Loop until 16 colors are generated. |
def get_func(func, aliasing, implementations):
try:
func_str = aliasing[func]
except KeyError:
if callable(func):
return func
else:
if func_str in implementations:
return func_str
if func_str.startswith('nan') and \
func_str[3:] in funcs_no_separate_nan:
raise ValueError("%s does not have a nan-version".format(func_str[3:]))
else:
raise NotImplementedError("No such function available")
raise ValueError("func %s is neither a valid function string nor a "
"callable object".format(func)) | Return the key of a found implementation or the func itself |
def log_with_color(level):
def wrapper(text):
color = log_colors_config[level.upper()]
getattr(logger, level.lower())(coloring(text, color))
return wrapper | log with color by different level |
def _metaconfigure(self, argv=None):
metaconfig = self._get_metaconfig_class()
if not metaconfig:
return
if self.__class__ is metaconfig:
return
override = {
'conflict_handler': 'resolve',
'add_help': False,
'prog': self._parser_kwargs.get('prog'),
}
self._metaconf = metaconfig(**override)
metaparser = self._metaconf.build_parser(
options=self._metaconf._options, permissive=False, **override)
self._parser_kwargs.setdefault('parents', [])
self._parser_kwargs['parents'].append(metaparser)
self._metaconf._values = self._metaconf.load_options(
argv=argv)
self._metaconf.provision(self) | Initialize metaconfig for provisioning self. |
def aggregate_key(self, aggregate_key):
aggregation = self.data_dict[aggregate_key]
data_dict_keys = {y for x in aggregation for y in x.keys()}
for key in data_dict_keys:
stacked = np.stack([d[key] for d in aggregation], axis=0)
self.data_dict[key] = np.mean(stacked, axis=0) | Aggregate values from key and put them into the top-level dictionary |
def LookupNamespace(self, prefix):
ret = libxml2mod.xmlTextReaderLookupNamespace(self._o, prefix)
return ret | Resolves a namespace prefix in the scope of the current
element. |
def available_backends(self, hub=None, group=None, project=None, access_token=None, user_id=None):
if access_token:
self.req.credential.set_token(access_token)
if user_id:
self.req.credential.set_user_id(user_id)
if not self.check_credentials():
raise CredentialsError('credentials invalid')
else:
url = get_backend_url(self.config, hub, group, project)
ret = self.req.get(url)
if (ret is not None) and (isinstance(ret, dict)):
return []
return [backend for backend in ret
if backend.get('status') == 'on'] | Get the backends available to use in the QX Platform |
def ListLanguageIdentifiers(self):
table_view = views.ViewsFactory.GetTableView(
self._views_format_type, column_names=['Identifier', 'Language'],
title='Language identifiers')
for language_id, value_list in sorted(
language_ids.LANGUAGE_IDENTIFIERS.items()):
table_view.AddRow([language_id, value_list[1]])
table_view.Write(self._output_writer) | Lists the language identifiers. |
def kill_tweens(self, obj = None):
if obj is not None:
try:
del self.current_tweens[obj]
except:
pass
else:
self.current_tweens = collections.defaultdict(set) | Stop tweening an object, without completing the motion or firing the
on_complete |
def _to_numeric(val):
if isinstance(val, (int, float, datetime.datetime, datetime.timedelta)):
return val
return float(val) | Helper function for conversion of various data types into numeric representation. |
def delete_edge_by_nodes(self, node_a, node_b):
node = self.get_node(node_a)
edge_ids = []
for e_id in node['edges']:
edge = self.get_edge(e_id)
if edge['vertices'][1] == node_b:
edge_ids.append(e_id)
for e in edge_ids:
self.delete_edge_by_id(e) | Removes all the edges from node_a to node_b from the graph. |
def scores(self, result, add_new_line=True):
if result.goalsHomeTeam > result.goalsAwayTeam:
homeColor, awayColor = (self.colors.WIN, self.colors.LOSE)
elif result.goalsHomeTeam < result.goalsAwayTeam:
homeColor, awayColor = (self.colors.LOSE, self.colors.WIN)
else:
homeColor = awayColor = self.colors.TIE
click.secho('%-25s %2s' % (result.homeTeam, result.goalsHomeTeam),
fg=homeColor, nl=False)
click.secho(" vs ", nl=False)
click.secho('%2s %s' % (result.goalsAwayTeam,
result.awayTeam.rjust(25)), fg=awayColor,
nl=add_new_line) | Prints out the scores in a pretty format |
def _parse_state(self, config):
value = STATE_RE.search(config).group('value')
return dict(state=value) | _parse_state scans the provided configuration block and extracts
the vlan state value. The config block is expected to always return
the vlan state config. The return dict is inteded to be merged into
the response dict.
Args:
config (str): The vlan configuration block from the nodes
running configuration
Returns:
dict: resource dict attribute |
def settle_timeout(self) -> int:
filter_args = get_filter_args_for_specific_event_from_channel(
token_network_address=self.token_network.address,
channel_identifier=self.channel_identifier,
event_name=ChannelEvent.OPENED,
contract_manager=self.contract_manager,
)
events = self.token_network.proxy.contract.web3.eth.getLogs(filter_args)
assert len(events) > 0, 'No matching ChannelOpen event found.'
event = decode_event(
self.contract_manager.get_contract_abi(CONTRACT_TOKEN_NETWORK),
events[-1],
)
return event['args']['settle_timeout'] | Returns the channels settle_timeout. |
def compute_discounts(self, precision=None):
gross = self.compute_gross(precision)
return min(gross,
sum([d.compute(gross, precision) for d in self.__discounts])) | Returns the total amount of discounts for this line with a specific
number of decimals.
@param precision:int number of decimal places
@return: Decimal |
def getBigIndexFromIndices(self, indices):
return reduce(operator.add, [self.dimProd[i]*indices[i]
for i in range(self.ndims)], 0) | Get the big index from a given set of indices
@param indices
@return big index
@note no checks are performed to ensure that the returned
indices are valid |
def clear_hidden(self, iso_path=None, rr_path=None, joliet_path=None):
if not self._initialized:
raise pycdlibexception.PyCdlibInvalidInput('This object is not yet initialized; call either open() or new() to create an ISO')
if len([x for x in (iso_path, rr_path, joliet_path) if x is not None]) != 1:
raise pycdlibexception.PyCdlibInvalidInput('Must provide exactly one of iso_path, rr_path, or joliet_path')
if iso_path is not None:
rec = self._find_iso_record(utils.normpath(iso_path))
elif rr_path is not None:
rec = self._find_rr_record(utils.normpath(rr_path))
elif joliet_path is not None:
joliet_path_bytes = self._normalize_joliet_path(joliet_path)
rec = self._find_joliet_record(joliet_path_bytes)
rec.change_existence(False) | Clear the ISO9660 hidden attribute on a file or directory. This will
cause the file or directory to show up when listing entries on the ISO.
Exactly one of iso_path, rr_path, or joliet_path must be specified.
Parameters:
iso_path - The path on the ISO to clear the hidden bit from.
rr_path - The Rock Ridge path on the ISO to clear the hidden bit from.
joliet_path - The Joliet path on the ISO to clear the hidden bit from.
Returns:
Nothing. |
def _enable_lock(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.is_concurrent:
only_read = kwargs.get('only_read')
if only_read is None or only_read:
with self._rwlock:
return func(*args, **kwargs)
else:
self._rwlock.acquire_writer()
try:
return func(*args, **kwargs)
finally:
self._rwlock.release()
else:
return func(*args, **kwargs)
return wrapper | The decorator for ensuring thread-safe when current cache instance is concurrent status. |
def selected_display_item(self) -> typing.Optional[DisplayItem.DisplayItem]:
display_item = self.focused_display_item
if not display_item:
selected_display_panel = self.selected_display_panel
display_item = selected_display_panel.display_item if selected_display_panel else None
return display_item | Return the selected display item.
The selected display is the display ite that has keyboard focus in the data panel or a display panel. |
def _get_column_width(self, complete_state):
return max(get_cwidth(c.display) for c in complete_state.current_completions) + 1 | Return the width of each column. |
async def parse_response(response: ClientResponse, schema: dict) -> Any:
try:
data = await response.json()
response.close()
if schema is not None:
jsonschema.validate(data, schema)
return data
except (TypeError, json.decoder.JSONDecodeError) as e:
raise jsonschema.ValidationError("Could not parse json : {0}".format(str(e))) | Validate and parse the BMA answer
:param response: Response of aiohttp request
:param schema: The expected response structure
:return: the json data |
def template_cycles(self) -> int:
return sum((int(re.sub(r'\D', '', op)) for op in self.template_tokens)) | The number of cycles dedicated to template. |
def get_list_class(context, list):
return "list_%s_%s" % (list.model._meta.app_label, list.model._meta.model_name) | Returns the class to use for the passed in list. We just build something up
from the object type for the list. |
def get_oa_policy(doi):
try:
request = requests.get("%s%s" % (DISSEMIN_API, doi))
request.raise_for_status()
result = request.json()
assert result["status"] == "ok"
return ([i
for i in result["paper"]["publications"]
if i["doi"] == doi][0])["policy"]
except (AssertionError, ValueError,
KeyError, RequestException, IndexError):
return None | Get OA policy for a given DOI.
.. note::
Uses beta.dissem.in API.
:param doi: A canonical DOI.
:returns: The OpenAccess policy for the associated publications, or \
``None`` if unknown.
>>> tmp = get_oa_policy('10.1209/0295-5075/111/40005'); (tmp["published"], tmp["preprint"], tmp["postprint"], tmp["romeo_id"])
('can', 'can', 'can', '1896')
>>> get_oa_policy('10.1215/9780822387268') is None
True |
def draw_roundedrect(self, x, y, w, h, r=10):
context = self.context
context.save
context.move_to(x+r, y)
context.line_to(x+w-r, y)
context.curve_to(x+w, y, x+w, y, x+w, y+r)
context.line_to(x+w, y+h-r)
context.curve_to(x+w, y+h, x+w, y+h, x+w-r, y+h)
context.line_to(x+r, y+h)
context.curve_to(x, y+h, x, y+h, x, y+h-r)
context.line_to(x, y+r)
context.curve_to(x, y, x, y, x+r, y)
context.restore | Draws a rounded rectangle |
def create(self, check, notification_plan, criteria=None,
disabled=False, label=None, name=None, metadata=None):
uri = "/%s" % self.uri_base
body = {"check_id": utils.get_id(check),
"notification_plan_id": utils.get_id(notification_plan),
}
if criteria:
body["criteria"] = criteria
if disabled is not None:
body["disabled"] = disabled
label_name = label or name
if label_name:
body["label"] = label_name
if metadata:
body["metadata"] = metadata
resp, resp_body = self.api.method_post(uri, body=body)
if resp.status_code == 201:
alarm_id = resp.headers["x-object-id"]
return self.get(alarm_id) | Creates an alarm that binds the check on the given entity with a
notification plan.
Note that the 'criteria' parameter, if supplied, should be a string
representing the DSL for describing alerting conditions and their
output states. Pyrax does not do any validation of these criteria
statements; it is up to you as the developer to understand the language
and correctly form the statement. This alarm language is documented
online in the Cloud Monitoring section of http://docs.rackspace.com. |
def paginator(limit, offset, record_count, base_uri, page_nav_tpl='&limit={}&offset={}'):
total_pages = int(math.ceil(record_count / limit))
next_cond = limit + offset <= record_count
prev_cond = offset >= limit
next_page = base_uri + page_nav_tpl.format(limit, offset + limit) if next_cond else None
prev_page = base_uri + page_nav_tpl.format(limit, offset - limit) if prev_cond else None
return OrderedDict([
('total_count', record_count),
('total_pages', total_pages),
('next_page', next_page),
('prev_page', prev_page)
]) | Compute pagination info for collection filtering.
Args:
limit (int): Collection filter limit.
offset (int): Collection filter offset.
record_count (int): Collection filter total record count.
base_uri (str): Collection filter base uri (without limit, offset)
page_nav_tpl (str): Pagination template.
Returns:
A mapping of pagination info. |
def run_checks(collector):
artifact = collector.configuration["dashmat"].artifact
chosen = artifact
if chosen in (None, "", NotSpecified):
chosen = None
dashmat = collector.configuration["dashmat"]
modules = collector.configuration["__active_modules__"]
config_root = collector.configuration["config_root"]
module_options = collector.configuration["modules"]
datastore = JsonDataStore(os.path.join(config_root, "data.json"))
if dashmat.redis_host:
datastore = RedisDataStore(redis.Redis(dashmat.redis_host))
scheduler = Scheduler(datastore)
for name, module in modules.items():
if chosen is None or name == chosen:
server = module.make_server(module_options[name].server_options)
scheduler.register(module, server, name)
scheduler.twitch(force=True) | Just run the checks for our modules |
def CreateDevice(self, device_address):
device_name = 'dev_' + device_address.replace(':', '_').upper()
adapter_path = self.path
path = adapter_path + '/' + device_name
if path not in mockobject.objects:
raise dbus.exceptions.DBusException(
'Could not create device for %s.' % device_address,
name='org.bluez.Error.Failed')
adapter = mockobject.objects[self.path]
adapter.EmitSignal(ADAPTER_IFACE, 'DeviceCreated',
'o', [dbus.ObjectPath(path, variant_level=1)])
return dbus.ObjectPath(path, variant_level=1) | Create a new device |
async def gantry_position(
self,
mount: top_types.Mount,
critical_point: CriticalPoint = None) -> top_types.Point:
cur_pos = await self.current_position(mount, critical_point)
return top_types.Point(x=cur_pos[Axis.X],
y=cur_pos[Axis.Y],
z=cur_pos[Axis.by_mount(mount)]) | Return the position of the critical point as pertains to the gantry
This ignores the plunger position and gives the Z-axis a predictable
name (as :py:attr:`.Point.z`).
`critical_point` specifies an override to the current critical point to
use (see :py:meth:`current_position`). |
def _send_msg(self, header, payload):
if self.verbose:
print('->', repr(header))
print('..', repr(payload))
assert header.payload == len(payload)
try:
sent = self.socket.send(header + payload)
except IOError as err:
raise ConnError(*err.args)
if sent < len(header + payload):
raise ShortWrite(sent, len(header + payload))
assert sent == len(header + payload), sent | send message to server |
def _GetVal(self, obj, key):
if "." in key:
lhs, rhs = key.split(".", 1)
obj2 = getattr(obj, lhs, None)
if obj2 is None:
return None
return self._GetVal(obj2, rhs)
else:
return getattr(obj, key, None) | Recurse down an attribute chain to the actual result data. |
def done(self):
logger.info('Marking %s as done', self)
fn = self.get_path()
try:
os.makedirs(os.path.dirname(fn))
except OSError:
pass
open(fn, 'w').close() | Creates temporary file to mark the task as `done` |
def update_display(cb, pool, params, plane, qwertz):
cb.clear()
draw_panel(cb, pool, params, plane)
update_position(params)
draw_menu(cb, params, qwertz)
cb.refresh() | Draws everything.
:param cb: Cursebox instance.
:type cb: cursebox.Cursebox
:param params: Current application parameters.
:type params: params.Params
:param plane: Plane containing the current Mandelbrot values.
:type plane: plane.Plane
:return: |
def url_report(self, scan_url, apikey):
url = self.base_url + "url/report"
params = {"apikey": apikey, 'resource': scan_url}
rate_limit_clear = self.rate_limit()
if rate_limit_clear:
response = requests.post(url, params=params, headers=self.headers)
if response.status_code == self.HTTP_OK:
json_response = response.json()
return json_response
elif response.status_code == self.HTTP_RATE_EXCEEDED:
time.sleep(20)
else:
self.logger.error("sent: %s, HTTP: %d", scan_url, response.status_code)
time.sleep(self.public_api_sleep_time) | Send URLS for list of past malicous associations |
def extract(json_object, args, csv_writer):
found = [[]]
for attribute in args.attributes:
item = attribute.getElement(json_object)
if len(item) == 0:
for row in found:
row.append("NA")
else:
found1 = []
for value in item:
if value is None:
value = "NA"
new = copy.deepcopy(found)
for row in new:
row.append(value)
found1.extend(new)
found = found1
for row in found:
csv_writer.writerow(row)
return len(found) | Extract and write found attributes. |
def save_tabs_when_changed(func):
def wrapper(*args, **kwargs):
func(*args, **kwargs)
log.debug("mom, I've been called: %s %s", func.__name__, func)
clsname = args[0].__class__.__name__
g = None
if clsname == 'Guake':
g = args[0]
elif getattr(args[0], 'get_guake', None):
g = args[0].get_guake()
elif getattr(args[0], 'get_notebook', None):
g = args[0].get_notebook().guake
elif getattr(args[0], 'guake', None):
g = args[0].guake
elif getattr(args[0], 'notebook', None):
g = args[0].notebook.guake
if g and g.settings.general.get_boolean('save-tabs-when-changed'):
g.save_tabs()
return wrapper | Decorator for save-tabs-when-changed |
def hash(self, algorithm: Algorithm = None) -> str:
key = self._validate_enum(algorithm, Algorithm)
if hasattr(hashlib, key):
fn = getattr(hashlib, key)
return fn(self.uuid().encode()).hexdigest() | Generate random hash.
To change hashing algorithm, pass parameter ``algorithm``
with needed value of the enum object :class:`~mimesis.enums.Algorithm`
:param algorithm: Enum object :class:`~mimesis.enums.Algorithm`.
:return: Hash.
:raises NonEnumerableError: if algorithm is not supported. |
def flash_spi_attach(self, hspi_arg):
arg = struct.pack('<I', hspi_arg)
if not self.IS_STUB:
is_legacy = 0
arg += struct.pack('BBBB', is_legacy, 0, 0, 0)
self.check_command("configure SPI flash pins", ESP32ROM.ESP_SPI_ATTACH, arg) | Send SPI attach command to enable the SPI flash pins
ESP8266 ROM does this when you send flash_begin, ESP32 ROM
has it as a SPI command. |
def min_date(self, symbol):
res = self._collection.find_one({SYMBOL: symbol}, projection={ID: 0, START: 1},
sort=[(START, pymongo.ASCENDING)])
if res is None:
raise NoDataFoundException("No Data found for {}".format(symbol))
return utc_dt_to_local_dt(res[START]) | Return the minimum datetime stored for a particular symbol
Parameters
----------
symbol : `str`
symbol name for the item |
def from_sequence(cls, sequence, phos_3_prime=False):
strand1 = NucleicAcidStrand(sequence, phos_3_prime=phos_3_prime)
duplex = cls(strand1)
return duplex | Creates a DNA duplex from a nucleotide sequence.
Parameters
----------
sequence: str
Nucleotide sequence.
phos_3_prime: bool, optional
If false the 5' and the 3' phosphor will be omitted. |
def html(text, lazy_images=False):
extensions = [
'markdown.extensions.nl2br',
'markdown.extensions.sane_lists',
'markdown.extensions.toc',
'markdown.extensions.tables',
OEmbedExtension()
]
if lazy_images:
extensions.append(LazyImageExtension())
return markdown.markdown(text, extensions=extensions) | To render a markdown format text into HTML.
- If you want to also build a Table of Content inside of the markdow,
add the tags: [TOC]
It will include a <ul><li>...</ul> of all <h*>
:param text:
:param lazy_images: bool - If true, it will activate the LazyImageExtension
:return: |
def _set_content(self, value, oktypes):
if value is None:
value = []
self._content = ListContainer(*value, oktypes=oktypes, parent=self) | Similar to content.setter but when there are no existing oktypes |
def p(value, bits=None, endian=None, target=None):
return globals()['p%d' % _get_bits(bits, target)](value, endian=endian, target=target) | Pack a signed pointer for a given target.
Args:
value(int): The value to pack.
bits(:class:`pwnypack.target.Target.Bits`): Override the default
word size. If ``None`` it will look at the word size of
``target``.
endian(:class:`~pwnypack.target.Target.Endian`): Override the default
byte order. If ``None``, it will look at the byte order of
the ``target`` argument.
target(:class:`~pwnypack.target.Target`): Override the default byte
order. If ``None``, it will look at the byte order of
the global :data:`~pwnypack.target.target`. |
def open_sciobj_file_by_pid_ctx(pid, write=False):
abs_path = get_abs_sciobj_file_path_by_pid(pid)
with open_sciobj_file_by_path_ctx(abs_path, write) as sciobj_file:
yield sciobj_file | Open the file containing the Science Object bytes of ``pid`` in the default
location within the tree of the local SciObj store.
If ``write`` is True, the file is opened for writing and any missing directories are
created. Return the file handle and file_url with the file location in a suitable
form for storing in the DB.
If nothing was written to the file, it is deleted. |
def start_notebook(self, name, context: dict, fg=False):
assert context
assert type(context) == dict
assert "context_hash" in context
assert type(context["context_hash"]) == int
http_port = self.pick_port()
assert http_port
context = context.copy()
context["http_port"] = http_port
if "websocket_url" not in context:
context["websocket_url"] = "ws://localhost:{port}".format(port=http_port)
if "{port}" in context["websocket_url"]:
context["websocket_url"] = context["websocket_url"].format(port=http_port)
pid = self.get_pid(name)
assert "terminated" not in context
comm.set_context(pid, context)
if fg:
self.exec_notebook_daemon_command(name, "fg", port=http_port)
else:
self.exec_notebook_daemon_command(name, "start", port=http_port) | Start new IPython Notebook daemon.
:param name: The owner of the Notebook will be *name*. He/she gets a new Notebook content folder created where all files are placed.
:param context: Extra context information passed to the started Notebook. This must contain {context_hash:int} parameter used to identify the launch parameters for the notebook |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.