docstring stringlengths 52 499 | function stringlengths 67 35.2k | __index_level_0__ int64 52.6k 1.16M |
|---|---|---|
Deletes the pipeline specified by the key
Args:
returns (status code for the DELETE request, success message dict)
expect (200 , {'success': 'true'}) for successful execution} | def delete_pipeline(self, pipeline_key):
if pipeline_key:
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline_key
])
return self._req('delete', uri)
else:
return requests.codes.bad_request, None | 1,109,413 |
Deletes all pipelines
Args:
returns OK for overall success or last error code, resp data. | def delete_all_pipelines(self):
code, data = self.get_pipeline()
if code == requests.codes.ok:
for pl_data in data:
c, d = self.delete_pipeline(pl_data['pipelineKey'])
if c != requests.codes.ok:
code = c
data = d
return code, data | 1,109,414 |
Creates a pipeline with the provided attributes.
Args:
name required name string
kwargs {name, description, orgWide, aclEntries} user
specifiable ones only
return (status code, pipeline_dict) (as created) | def create_pipeline(self, name, description, **kwargs):
#req sanity check
if not (name and description):
return requests.codes.bad_request, None
kwargs.update({'name':name, 'description':description})
new_pl = StreakPipeline(**kwargs)
uri = '/'.join([
self.api_uri,
self.pipelines_suffix
... | 1,109,415 |
Updates a pipeline with the provided attributes.
Args:
key required identifier for the pipeline
pipeline StreakPipeline object
return (status code, pipeline_dict) | def update_pipeline(self, pipeline):
#req sanity check
payload = None
if type(pipeline) is not StreakPipeline:
return requests.codes.bad_request, None
payload = pipeline.to_dict(rw = True)
try:
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline.attributes['pipelineKey... | 1,109,416 |
Gets a list of one/all box objects. Performs a single GET.
To go deeper individual boxes need to be polled for their contents.
This is a directory for what we could ask for.
Args:
box_key key for the target box (default: None i.e. ALL)
sort_by in desc order by 'creationTimestamp' or 'lastUpdatedTimestamp'... | def get_box(self, box_key = None, sort_by = None):
uri = '/'.join([
self.api_uri,
self.boxes_suffix
])
if box_key:
uri = '/'.join([
uri,
box_key
])
if sort_by:
if sort_by in ['creationTimestamp', 'lastUpdatedTimestamp']:
uri += self.sort_by_postfix + sort_by
... | 1,109,417 |
Gets a list of all box objects in a pipeline. Performs a single GET.
Args:
pipeline_key key for pipeline
sort_by in desc order by 'creationTimestamp' or 'lastUpdatedTimestamp'
Not sure if it is supported
returns (status code for the GET request, dict of boxes) | def get_pipeline_boxes(self, pipeline_key, sort_by = None):
if not pipeline_key:
return requests.codes.bad_request, None
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline_key
])
if sort_by:
if sort_by in ['creationTimestamp', 'lastUpdatedTimestamp']:
uri += ... | 1,109,418 |
Deletes the box specified by the key
Args:
returns (status code for the DELETE request, success message dict) | def delete_box(self, key):
if key:
uri = self.box_root_uri + '/' + key
return self._req('delete', uri)
else:
return requests.codes.bad_request, None | 1,109,419 |
Creates a box int the pipeline specified with the provided attributes.
Args:
name required name string
kwargs {...} see StreakBox object for details
return (status code, box dict) | def create_pipeline_box(self, pipeline_key, name, **kwargs):
#req sanity check
if not (pipeline_key and name):
return requests.codes.bad_request, None
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline_key,
self.boxes_suffix
])
kwargs.update({'name':name})
n... | 1,109,420 |
Updates a box with the provided attributes.
Args:
box StreakBox object with updated info
return (status code, box in dict form) | def update_box(self, box):
#req sanity check
payload = None
if type(box) is not StreakBox:
return requests.codes.bad_request, None
payload = box.to_dict(rw = True)
try:
uri = self.box_root_uri + '/' + box.attributes['boxKey']
except KeyError:
return requests.codes.bad_request, None
code, ... | 1,109,421 |
Takes a keyword and returns the search results.
Works for boxes only?
Args:
kw keyword (str) to search for.
return (code, list(dicts)) | def search(self, kw):
if not kw:
return requests.codes.bad_request, None
code, data = self._req('get', self.search_uri + kw)
return code, data | 1,109,422 |
Get all/one specific snippet by its key
Args:
key snippet key (default: None i.e. ALL)
return (status code, snippet dict or list thereof) | def get_snippet(self, snippet_key = None):
uri = '/'.join([
self.api_uri,
self.snippets_suffix
])
if snippet_key:
uri = '/'.join([
uri,
snippet_key
])
code, data = self._req('get', uri)
return code, data | 1,109,423 |
Gets a list of one/all stage objects in a pipeline. Performs a single GET.
Args:
pipeline_key key for pipeline
stage_key key for stage (default: None i.e. ALL)
sort_by in desc order by 'creationTimestamp' or 'lastUpdatedTimestamp'
may or may not be supported
returns (status code for the GET r... | def get_pipeline_stage(self, pipeline_key, stage_key = None, sort_by = None):
if not pipeline_key:
return requests.codes.bad_request, None
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline_key,
self.stages_suffix
])
if stage_key:
uri = '/'.join([
uri,
... | 1,109,424 |
Creates a pipeline stage with the provided attributes.
Args:
name required name string
kwargs {..} see StreakStage object for details
return (status code, stage dict) | def create_pipeline_stage(self, pipeline_key, name, **kwargs):
#req sanity check
if not (pipeline_key and name):
return requests.codes.bad_request, None
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline_key,
self.stages_suffix])
kwargs.update({'name':name})
new_... | 1,109,425 |
Deletes a stage in the pipeline by stage key and pipeline key
Args:
pipeline_key key for pipeline
stage_key key for stage
sort_by in desc order by 'creationTimestamp' or 'lastUpdatedTimestamp'
returns (status code for the GET request, dict of op report) | def delete_pipeline_stage(self, pipeline_key, stage_key, sort_by = None):
if not (pipeline_key and stage_key):
return requests.codes.bad_request, None
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline_key,
self.stages_suffix,
stage_key
])
code, data = sel... | 1,109,426 |
Updates a box with the provided attributes.
Args:
pipeline_key reqiured identifier for the pipeline
stage StreakStage object
kwargs {name}
return (status code, stage dict) | def update_pipeline_stage(self, stage):
#req sanity check
payload = None
if type(stage) is not StreakStage:
return requests.codes.bad_request, None
payload = stage.to_dict(rw = True)
#print(new_pl.attributes)
#print(new_pl.to_dict())
#raw_input()
try:
uri = '/'.join([self.api_uri,
se... | 1,109,427 |
Creates a field with the provided attributes.
Args:
uri base uri for the field (pipeline or box uri)
name required name string
field_type required type string [TEXT_INPUT, DATE or PERSON]
kwargs {}
return (status code, field dict) | def _create_field(self, uri , name, field_type, **kwargs):
#req sanity check
if not (name and (field_type in ['TEXT_INPUT', 'DATE', 'PERSON'])):
return requests.codes.bad_request, {'success' : 'False',
'error': 'name needs to be provided and field_type needs to be \'TEXT_INPUT\', \'DATE\' or \'PE... | 1,109,428 |
Updates a field with the provided attributes.
Args:
key reqiured identifier for the pipeline or box
field StreakField object
kwargs {name, type} see StreakField for details
return (status code, field dict) | def _update_field(self, uri, field):
#req sanity check
payload = None
if type(field) is not StreakField:
return requests.codes.bad_request, None
payload = field.to_dict(rw = True)
#print(new_pl.attributes)
#print(new_pl.to_dict())
#raw_input()
try:
uri = '/'.join([
uri,
field... | 1,109,429 |
Gets one/all field in a pipeline
Args:
pipeline_key key for pipeline
field_key key for field (default: None i.e. ALL)
returns status code, field dict or list thereof | def get_pipeline_field(self, pipeline_key, field_key = None):
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline_key,
self.fields_suffix
])
if field_key:
uri = '/'.join([uri, field_key])
return self._req('get', uri) | 1,109,430 |
Creates a pipeline field with the provided attributes.
Args:
pipeline_key specifying the pipeline to add the field to
name required name string
field_type required type string [TEXT_INPUT, DATE or PERSON]
kwargs {}
return (status code, field dict) | def create_pipeline_field(self, pipeline_key, name, field_type, **kwargs):
uri = '/'.join([self.api_uri,
self.pipelines_suffix,
pipeline_key,
self.fields_suffix
])
code, data = self._create_field(uri, name, field_type, **kwargs)
return code, data | 1,109,431 |
Upates pipeline field as specified
Args:
pipeline_key key for pipeline where the fields lives
field StreakField object with fresh data
returns (status code, updated field dict) | def update_pipeline_field(self, pipeline_key, field):
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline_key,
self.fields_suffix
])
return self._update_field(uri, field) | 1,109,432 |
Gets one/all field in a box
Args:
box_key key for pipeline
field_key key for field (default: None i.e. ALL)
returns status code, field dict or list thereof | def get_box_field(self, box_key, field_key = None):
#does not work
self._raise_unimplemented_error()
uri = '/'.join([self.api_uri,
self.boxes_suffix,
box_key,
self.fields_suffix
])
if field_key:
uri = '/'.join([uri, field_key])
return self._req('get', uri) | 1,109,433 |
Creates a box field with the provided attributes.
Args:
box_key specifying the box to add the field to
name required name string
field_type required type string [TEXT_INPUT, DATE or PERSON]
kwargs {}
return (status code, field dict) | def create_box_field(self, box_key, name, field_type, **kwargs):
#does not work
self._raise_unimplemented_error()
uri = '/'.join([self.api_uri,
self.boxes_suffix,
box_key,
self.fields_suffix
])
code, data = self._create_field(uri, name, field_type, **kwargs)
return code, d... | 1,109,434 |
Upates box field as specified
Args:
box_key key for pipeline where the fields lives
field StreakField object with fresh data
returns (status code, updated field dict) | def update_box_field(self, box_key, field):
#does not work
self._raise_unimplemented_error()
uri = '/'.join([self.api_uri,
self.boxes_suffix,
box_key,
self.fields_suffix
])
return self._update_field(uri, field) | 1,109,435 |
General purpose function to get newsfeeds
Args:
uri uri for the feed base
detail_level arguments for req str ['ALL', 'CONDENSED']
return list of feed dicts parse at your convenience | def _get_newsfeeds(self, uri, detail_level = None):
if detail_level:
if detail_level not in ['ALL', 'CONDENSED']:
return requests.codes.bad_request, {'success' : 'False',
'error': 'detailLevel needs to be provided and field_type needs to be \'ALL\' or \'CONDENSED\''}
uri += self.detail_leve... | 1,109,436 |
Function to get newsfeed for a pipeline
Args:
pipeline_key pipeline key
detail_level arguments for req str ['ALL', 'CONDENSED']
return list of feed dicts parse at your convenience | def get_pipeline_newsfeeds(self, pipeline_key, detail_level = None):
uri = '/'.join([
self.api_uri,
self.pipelines_suffix,
pipeline_key,
self.newsfeed_suffix
])
return self._get_newsfeeds(uri, detail_level) | 1,109,437 |
Function to get newsfeed for a pipeline
Args:
box pipeline key
detail_level arguments for req str ['ALL', 'CONDENSED']
return list of feed dicts parse at your convenience | def get_box_newsfeeds(self, box_key, detail_level = None):
uri = '/'.join([
self.api_uri,
self.boxes_suffix,
box_key,
self.newsfeed_suffix
])
return self._get_newsfeeds(uri, detail_level) | 1,109,438 |
Gets a thread specified by thread_key
Args:
thread_key thread to get
returns a thread dict | def get_thread(self, thread_key):
uri = '/'.join([self.api_uri,
self.threads_suffix,
thread_key
])
return self._req('get', uri) | 1,109,439 |
Gets all threads in a specified box
Args:
box_key box to look in
returns a list of thread dicts | def get_box_threads(self, box_key):
uri = '/'.join([
self.api_uri,
self.boxes_suffix,
box_key,
self.threads_suffix
])
return self._req('get', uri) | 1,109,440 |
Creates a comments in a box with the provided attributes.
Args:
box_key key for box
message message string
kwargs {} see StreakComment object for more information
return (status code, comment dict) | def create_box_comments(self, box_key, message, **kwargs):
uri = '/'.join([
self.api_uri,
self.boxes_suffix,
box_key,
self.comments_suffix
])
if not (box_key and message):
return requests.codes.bad_request, None
kwargs.update({'message':message})
new_cmt = StreakComment(**... | 1,109,441 |
Gets comments in a box with the provided attributes.
Args:
box_key key for box
return (status code, list of comment dicts) | def get_box_comments(self, box_key):
uri = '/'.join([
self.api_uri,
self.boxes_suffix,
box_key,
self.comments_suffix
])
return self._req('get', uri) | 1,109,442 |
Deletes comment in a box with the comment_key
Args:
box_key key for box
return (status code, list of comment dicts) | def delete_box_comment(self, box_key, comment_key):
#does not work
self._raise_unimplemented_error()
uri = '/'.join([self.api_uri,
self.boxes_suffix,
box_key,
self.comments_suffix,
comment_key
])
return self._req('delete', uri) | 1,109,443 |
Creates a reminder with the provided attributes.
Args:
box_key specifying the box to add the field to
message message for the reminder
remind_date date to remind on in ticks.
remind_followers true/false
kwargs {..} see StreakReminder object for details
return (status code, reminder dic... | def create_box_reminder(self, box_key, message, remind_date, remind_follwers, **kwargs):
uri = '/'.join([
self.api_uri,
self.boxes_suffix,
box_key,
self.reminders_suffix
])
kwargs.update({ 'message':message,
'remindDate':remind_date,
'remindFollowers': remind_follwers... | 1,109,444 |
Creates a reminder with the provided attributes.
Args:
reminder updated reminder of StreakReminder type
return (status code, reminder dict) | def update_reminder(self, reminder):
uri = '/'.join([self.api_uri,
self.reminders_suffix,
])
#req sanity check
payload = None
if type(reminder) is not StreakReminder:
return requests.codes.bad_request, None
payload = reminder.to_dict(rw = True)
try:
uri = '/'.join([uri, reminder.att... | 1,109,445 |
Gets all reminders for a box
Args:
reminder updated reminder of StreakReminder type
return (status code, reminder dict) | def get_box_reminders(self, box_key):
#required sanity check
if box_key:
return requests.codes.bad_request, None
uri = '/'.join([self.api_uri,
self.boxes_suffix,
box_key,
self.reminders_suffix
])
return self._req('get', uri) | 1,109,446 |
Gets one reminder
Args:
reminder_key key for the reminder to get
return (status code, reminder dict) | def get_reminder(self, reminder_key):
#required sanity check
if reminder_key:
return requests.codes.bad_request, None
uri = '/'.join([
self.api_uri,
self.reminders_suffix,
reminder_key
])
return self._req('get', uri) | 1,109,447 |
Gets file information
Args:
file_key key for the file to get
return (status code, dict of file info) | def get_file(self, file_key):
uri = '/'.join([
self.api_uri,
self.files_suffix,
file_key
])
return self._req('get', uri) | 1,109,448 |
Gets file contents
Args:
file_key key for the file
return (status code, ?) | def get_file_contents(self, file_key):
#does not work
self._raise_unimplemented_error()
uri = '/'.join([self.api_uri,
self.files_suffix,
file_key,
self.file_contents_suffix,
])
return self._req('get', uri) | 1,109,449 |
Gets link to file
Args:
file_key key for the file
return (status code, ?) | def get_file_link(self, file_key):
#does not work
self._raise_unimplemented_error()
uri = '/'.join([self.api_uri,
self.files_suffix,
file_key,
self.file_link_suffix,
])
return self._req('get', uri) | 1,109,450 |
Gets to file infos in a single box.
Args:
box_key key for the file
return (status code, list of file info dicts) | def get_box_files(self, box_key):
uri = '/'.join([self.api_uri,
self.boxes_suffix,
box_key,
self.files_suffix
])
return self._req('get', uri) | 1,109,451 |
Dips the brush in paint.
Arguments:
index - an integer between 0 and 7, inclusive. Tells the bot which color you want. | def get_color(index):
if index in range(0, 8):
# Send the turtle to the top-left corner of the window to imitate the position of the WCB's brush.
state['turtle'].goto(-WCB_WIDTH / 2, -WCB_HEIGHT / 2)
_make_cnc_request("tool.color./" + str(index))
# This is the order of the col... | 1,109,511 |
Moves the brush to a particular position.
Arguments:
x - a number between -250 and 250.
y - a number between -180 and 180. | def move_to(x, y):
_make_cnc_request("coord/{0}/{1}".format(x, y))
state['turtle'].goto(x, y) | 1,109,512 |
Turns the brush's "turtle" to the left.
Arguments:
relative_angle - a number like 10.
A bigger number makes the turtle turn farther to the left. | def turn_left(relative_angle):
assert int(relative_angle) == relative_angle, "turn_left() only accepts integers, but you gave it " + str(relative_angle)
_make_cnc_request("move.left./" + str(relative_angle))
state['turtle'].left(relative_angle) | 1,109,513 |
Turns the brush's "turtle" to the right.
Arguments:
relative_angle - a number like 10.
A bigger number makes the turtle turn farther to the right. | def turn_right(relative_angle):
assert int(relative_angle) == relative_angle, "turn_right() only accepts integers, but you gave it " + str(relative_angle)
_make_cnc_request("move.right./" + str(relative_angle))
state['turtle'].right(relative_angle) | 1,109,514 |
Applies an instance method with name `fqdn` to `o`.
Args:
fqdn (str): fully-qualified domain name of the object.
o: object to apply instance method to. | def _instance_transform(fqdn, o, *args, **kwargs):
return _package_transform(o, fqdn, start=0, *args, **kwargs) | 1,109,522 |
Enables/disables logs to be written to files
Arguments:
set_file (:obj:`bool`): False disables, True enables | def set_log_rotate_handler(self, set_file):
if hasattr(self, 'debug_handler'):
if set_file:
self.log.addHandler(self.debug_handler)
self.log.addHandler(self.error_handler)
else:
try:
self.log.removeHandler(self.... | 1,109,813 |
Configures class log level
Arguments:
log_level (:obj:`str`): log level ('NOTSET','DEBUG','INFO' 'WARNING',
'ERROR', 'CRITICAL') | def set_log_level(self, log_level):
if log_level == 'DEBUG':
self.log.setLevel(logging.DEBUG)
self.log.debug("Changing log level to "+log_level)
elif log_level == 'INFO':
self.log.setLevel(logging.INFO)
self.log.info("Changing log level to "+log_l... | 1,109,814 |
Configures log format
Arguments:
log_type (:obj:`str`): log type (error, debug or stream)
log_format (:obj:`str`): log format (ex:"Log: %(message)s | Log level:%(levelname)s |
Date:%(asctime)s',datefmt='%m/%d/%Y %I:%M:%S") | def set_log_format(self, log_type, log_format):
if not (log_type == 'error' or log_type == 'stream' or log_type == 'debug'):
self.log.debug('Log type must be error, stream, or debug')
else:
self.default_formatter = logging.Formatter(log_format)
if log_type ==... | 1,109,815 |
Returns a list of all the projects and tasks available in the `acorn`
database directory.
Args:
target (str): directory to list the projects for. Defaults to the configured
database directory.
Returns:
dict: keys are project names; values are lists of tasks associated with th... | def list_tasks(target=None):
from os import getcwd, chdir
from glob import glob
original = getcwd()
if target is None:# pragma: no cover
target = _dbdir()
chdir(target)
result = {}
for filename in glob("*.*.json"):
project, task = filename.split('.')[0:2]
... | 1,110,069 |
Sets the active project and task. All subsequent logging will be saved to
the database with that project and task.
Args:
project_ (str): active project name; a project can have multiple tasks.
task_ (str): active task name. Logging is separated at the project and task
level. | def set_task(project_, task_):
global project, task
project = project_
task = task_
msg.okay("Set project name to {}.{}".format(project, task), 2) | 1,110,070 |
Returns the :class:`Instance` of the specified object if it is one that
we track by default.
Args:
obj (object): any python object passed as an argument to a method.
Returns:
Instance: if the object is trackable, the Instance instance of
that object; else None. | def tracker(obj):
import types as typ
global oids, uuids
import six
from inspect import isclass
untracked = (six.string_types, six.integer_types, float,
complex, six.text_type)
semitrack = (list, dict, set, tuple)
if six.PY3: # pragma: no cover
semitrack = semi... | 1,110,072 |
Cleans the specified python `dict` by converting any tuple keys to
strings so that they can be serialized by JSON.
Args:
d (dict): python dictionary to clean up.
Returns:
dict: cleaned-up dictionary. | def _json_clean(d):
result = {}
compkeys = {}
for k, v in d.items():
if not isinstance(k, tuple):
result[k] = v
else:
#v is a list of entries for instance methods/constructors on the
#UUID of the key. Instead of using the composite tuple keys, we
... | 1,110,074 |
Saves the specified image to disk.
Args:
byteio (bytes): image bytes to save to disk.
imgfmt (str): used as the extension of the saved file.
Returns:
str: a uuid for the saved image that can be added to the database entry. | def save_image(byteio, imgfmt):
from os import path, mkdir
ptdir = "{}.{}".format(project, task)
uuid = str(uuid4())
#Save the image within the project/task specific folder.
idir = path.join(dbdir, ptdir)
if not path.isdir(idir):
mkdir(idir)
ipath = path.join(idir, "{}... | 1,110,075 |
Records the specified entry to the key-value store under the specified
entity key.
Args:
ekey (str): fqdn/uuid of the method/object to store the entry for.
entry (dict): attributes and values gleaned from the execution.
diff (bool): when True, the "c" element of `entry` will be diffed
... | def record(ekey, entry, diff=False):
taskdb = active_db()
taskdb.record(ekey, entry, diff)
# The task database save method makes sure that we only save as often as
# specified in the configuration file.
taskdb.save() | 1,110,077 |
Logs the object with the specified `uuid` to `self.uuids` if
possible.
Args:
uuid (str): string value of :meth:`uuid.uuid4` value for the
object. | def log_uuid(self, uuid):
#We only need to try and describe an object once; if it is already in
#our database, then just move along.
if uuid not in self.uuids and uuid in uuids:
self.uuids[uuid] = uuids[uuid].describe() | 1,110,079 |
Records the specified entry to the key-value store under the specified
entity key.
Args:
ekey (str): fqdn/uuid of the method/object to store the entry for.
entry (dict): attributes and values gleaned from the execution.
diff (bool): when True, the "c" element of `ent... | def record(self, ekey, entry, diff=False):
if ekey not in self.entities:
self.entities[ekey] = []
#See if we need to diff the code to compress it.
if diff and len(self.entities[ekey]) > 0:
#Compress the code element of the current entry that we are s... | 1,110,080 |
Serializes the database file to disk.
Args:
force (bool): when True, the elapsed time since last save is ignored
and the database is saved anyway (subject to global
:data:`writeable` setting). | def save(self, force=False):
from time import time
# Since the DBs can get rather large, we don't want to save them every
# single time a method is called. Instead, we only save them at the
# frequency specified in the global settings file.
from datetime import datetime... | 1,110,083 |
Stops all active threads and rejects new tasks to be added
Args:
block (bool): If True, block until all threads are closed | def stop(self, block=True):
self._stop = True
# Removing tasks in queue
self.empty_queue()
# All active threads
# With the DoNothing function
# Because self._stop is True each thread will process at most one of the DoNothing functions
# Hence it is ensu... | 1,110,163 |
Get the appropriate supervisor to use and pre-apply the function.
Args:
func: A function. | def get_supervisor(func: types.AnyFunction) -> types.Supervisor:
if not callable(func):
raise TypeError("func is not callable")
if asyncio.iscoroutinefunction(func):
supervisor = _async_supervisor
else:
supervisor = _sync_supervisor
return functools.partial(supervisor, func) | 1,111,333 |
Supervisor for running an animation with an asynchronous function.
Args:
func: A function to be run alongside an animation.
animation_: An infinite generator that produces
strings for the animation.
step: Seconds between each animation frame.
*args: Arguments for func.
... | async def _async_supervisor(func, animation_, step, *args, **kwargs):
with ThreadPoolExecutor(max_workers=2) as pool:
with _terminating_event() as event:
pool.submit(animate_cli, animation_, step, event)
result = await func(*args, **kwargs)
return result | 1,111,334 |
Analyzes the result of a generic fit operation performed by `sklearn`.
Args:
fqdn (str): full-qualified name of the method that was called.
result: result of calling the method with `fqdn`.
argl (tuple): positional arguments passed to the method call.
argd (dict): keyword arguments ... | def fit(fqdn, result, *argl, **argd):
#Check the arguments to see what kind of data we are working with, then
#choose the appropriate function below to return the analysis dictionary.
#The first positional argument will be the instance of the machine that was
#used. Check its name against a list.
... | 1,111,456 |
Analyzes the result of a generic predict operation performed by
`sklearn`.
Args:
fqdn (str): full-qualified name of the method that was called.
result: result of calling the method with `fqdn`.
argl (tuple): positional arguments passed to the method call.
argd (dict): keyword ar... | def predict(fqdn, result, *argl, **argd):
#Check the arguments to see what kind of data we are working with, then
#choose the appropriate function below to return the analysis dictionary.
out = None
if len(argl) > 0:
machine = argl[0]
if isclassifier(machine):
out = clas... | 1,111,457 |
Performs the generic fit tests that are common to both classifier and
regressor; uses `scorer` to score the predicted values given by the machine
when tested against its training set.
Args:
scorer (function): called on the result of `machine.predict(Xtrain,
ytrain)`. | def _generic_fit(fqdn, result, scorer, yP=None, *argl, **argd):
out = None
if len(argl) > 0:
machine = argl[0]
out = {}
if hasattr(machine, "best_score_"):
out["score"] = machine.best_score_
#With fitting it is often useful to know how well the fitti... | 1,111,459 |
Returns the percent match for the specified prediction call; requires
that the data was split before using an analyzed method.
Args:
out (dict): output dictionary to save the result to. | def _percent_match(result, out, yP=None, *argl):
if len(argl) > 1:
if yP is None:
Xt = argl[1]
key = id(Xt)
if key in _splits:
yP = _splits[key][3]
if yP is not None:
import math
out["%"] = round(1.-sum... | 1,111,461 |
Register a new Subscription on this collection's parent object.
Args:
callback_url (str): URI of an active endpoint which can receive
notifications.
Returns:
A round.Subscription object if successful. | def create(self, callback_url):
resource = self.resource.create({'subscribed_to': 'address',
'callback_url': callback_url})
subscription = self.wrap(resource)
self.add(subscription)
return subscription | 1,111,590 |
Convert one line from the extended log to dict.
Args:
line (str): Line which will be converted.
Returns:
dict: dict with ``timestamp``, ``command``, ``username`` and ``path`` \
keys.
Note:
Typical line looks like this::
/home/ftp/xex/asd bsd.dat, xex, ST... | def _parse_line(line):
line, timestamp = line.rsplit(",", 1)
line, command = line.rsplit(",", 1)
path, username = line.rsplit(",", 1)
return {
"timestamp": timestamp.strip(),
"command": command.strip(),
"username": username.strip(),
"path": path,
} | 1,111,683 |
Process the extended ProFTPD log.
Args:
file_iterator (file): any file-like iterator for reading the log or
stdin (see :func:`_read_stdin`).
Yields:
ImportRequest: with each import. | def process_log(file_iterator):
for line in file_iterator:
if "," not in line:
continue
parsed = _parse_line(line)
if not parsed["command"].upper() in ["DELE", "DEL"]:
continue
# don't react to anything else, than trigger in form of deleted
# "... | 1,111,684 |
resolve prefix to a namespaceURI. If None or
empty str, return default namespace or None.
Parameters:
celt -- element node
prefix -- xmlns:prefix, or empty str or None | def _resolve_prefix(celt, prefix):
namespace = None
while _is_element(celt):
if prefix:
namespaceURI = _find_xmlns_prefix(celt, prefix)
else:
namespaceURI = _find_default_namespace(celt)
if namespaceURI: break
celt = celt.parentNode
else:
... | 1,112,027 |
Return the index of a fieldset in the ``fieldsets`` list.
Args:
fieldsets (list): The original ``fieldsets`` list.
index_or_name (int or str): The value of the reference element, or directly its numeric index.
Returns:
(int) The index of the fieldset in the ``fieldsets`` list. | def get_fieldset_index(fieldsets, index_or_name):
if isinstance(index_or_name, six.integer_types):
return index_or_name
for key, value in enumerate(fieldsets):
if value[0] == index_or_name:
return key
raise KeyError("Key not found: '{}'.".format(index_or_name)) | 1,112,075 |
Return the index of an element in the list.
Args:
lst (list): The list.
index_or_name (int or str): The value of the reference element, or directly its numeric index.
Returns:
(int) The index of the element in the list. | def get_list_index(lst, index_or_name):
if isinstance(index_or_name, six.integer_types):
return index_or_name
return lst.index(index_or_name) | 1,112,076 |
Returns n unique and "evenly" spaced colors for the backgrounds
of the projects.
Args:
n (int): The number of unique colors wanted.
Returns:
colors (list of str): The colors in hex form. | def _get_colors(n):
import matplotlib.pyplot as plt
from matplotlib.colors import rgb2hex as r2h
from numpy import linspace
cols = linspace(0.05, .95, n)
cmap = plt.get_cmap('nipy_spectral')
return [r2h(cmap(i)) for i in cols] | 1,112,450 |
Returns a dictionaries in which each project is a key and the
tasks are stored as a list within that dictionaly element.
Args:
path (str): The path to the folder containing the *.json files.
Returns:
projects (list of dict): A dictionary in which each project is a key
co... | def _make_projcet_list(path):
from collections import OrderedDict
from matplotlib.colors import LinearSegmentedColormap
from matplotlib.colors import rgb2hex as r2h
from numpy import linspace
proj = []
projects = OrderedDict()
file_list = os.listdir(path)
for files in file_list... | 1,112,452 |
Internal function takes a list of prefix, namespace uri tuples and
generates a SPARQL PREFIX string.
Args:
namespaces(list): List of tuples, defaults to BIBFRAME and
Schema.org
Returns:
string | def build_prefixes(namespaces=None):
if namespaces is None:
namespaces = [
('bf', str(BIBFRAME)),
('schema', str(SCHEMA_ORG))
]
output = "PREFIX {}: <{}>\n".format(
namespaces[0][0],
namespaces[0][1])
if len(namespaces) == 1:
return output... | 1,112,456 |
Function takes a subject and an existing graph, returns a new graph with
all predicate and objects of the existing graph copied to the new_graph with
subject as the new subject
Args:
subject(rdflib.URIRef): A URIRef subject
existing_graph(rdflib.Graph): A rdflib.Graph
Returns:
... | def copy_graph(subject, existing_graph):
new_graph = rdflib.Graph()
for predicate, object_ in existing_graph.predicate_objects():
new_graph.add((subject, predicate, object_))
return new_graph | 1,112,457 |
Initializes a Repository object
Args:
app(Flask): Flask app, default is None
base_url(str): Base url for Fedora Commons, defaults to
localhost:8080.
namespaces(list): List of namespace tuples of prefix, uri for
each na... | def __init__(
self,
app=None,
base_url='http://localhost:8080',
namespaces=DEFAULT_NAMESPACES):
self.app = app
self.namespaces = namespaces
self.base_url = None
if app is not None:
self.init_app(app)
if 'FEDORA_BASE_URL' in... | 1,112,458 |
Internal method takes a RDF graph, cycles through the RDFS
label and BIBFRAME authorizedAccessPoint triples to see if the graph's
entity already exists in Fedora. As other searchable unique triples are
added from other vocabularies, they should be added to this method.
Args:
... | def __dedup__(self,
subject,
graph):
if graph is None:
return
for uri in Repository.DEFAULT_ID_URIS:
# Checks for duplicates
for obj_uri in graph.objects(subject=subject, predicate=uri):
sparql_url = urllib.... | 1,112,459 |
Initializes a Flask app object for the extension.
Args:
app(Flask): Flask app | def init_app(self, app):
app.config.setdefault('FEDORA_BASE_URL', 'http://localhost:8080')
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown) | 1,112,460 |
Method attempts to connect to REST servers of the Fedora
Commons repository using optional data parameter.
Args:
fedora_url(string): Fedora URL
data(dict): Data to through to REST endpoint
method(str): REST Method, defaults to GET
Returns:
result... | def connect(self,
fedora_url,
data=None,
method='Get'):
if data is None:
data = {}
if not fedora_url.startswith("http"):
fedora_url = urllib.parse.urljoin(self.base_url, fedora_url)
request = urllib.request.Request(... | 1,112,462 |
Method takes a entity uri and attempts to return the Fedora Object
as a JSON-LD.
Args:
entity_url(str): Fedora Commons URL of Entity
context(None): Returns JSON-LD with Context, default is None
Returns:
str: JSON-LD of Fedora Object | def as_json(self,
entity_url,
context=None):
try:
urllib.request.urlopen(entity_url)
except urllib.error.HTTPError:
raise ValueError("Cannot open {}".format(entity_url))
entity_graph = self.read(entity_url)
entity_json = js... | 1,112,463 |
Method deletes a Fedora Object in the repository
Args:
uri(str): URI of Fedora Object | def delete(self, uri):
try:
self.connect(uri, method='DELETE')
return True
except urllib.error.HTTPError:
return False | 1,112,465 |
Method returns true is the entity exists in the Repository,
false, otherwise
Args:
uri(str): Entity URI
Returns:
bool | def exists(self, uri):
##entity_uri = "/".join([self.base_url, entity_id])
try:
urllib.request.urlopen(uri)
return True
except urllib.error.HTTPError:
return False | 1,112,466 |
Method inserts a new entity's property in Fedora4 Repository
Args:
entity_id(string): Unique ID of Fedora object
property_uri(string): URI of property
value: Value of the property, can be literal or URI reference
Returns:
boolean: True if successful chan... | def insert(self,
entity_id,
property_uri,
value):
if not entity_id.startswith("http"):
entity_uri = urllib.parse.urljoin(self.base_url, entity_id)
else:
entity_uri = entity_id
if entity_uri.endswith("/"):
e... | 1,112,468 |
Method takes uri and creates a RDF graph from Fedora Repository
Args:
uri(str): URI of Fedora URI
Returns:
rdflib.Graph | def read(self, uri):
read_response = self.connect(uri)
fedora_graph = rdflib.Graph().parse(
data=read_response.read(),
format='turtle')
return fedora_graph | 1,112,469 |
Method removes a triple for the given/subject.
Args:
entity_id(string): Fedora Object ID, ideally URI of the subject
property_uri(string):
value(string):
Return:
boolean: True if triple was removed from the object | def remove(self,
entity_id,
property_uri,
value):
if not entity_id.startswith("http"):
entity_uri = urllib.parse.urljoin(self.base_url, entity_id)
else:
entity_uri = entity_id
sparql_template = Template()
sparq... | 1,112,470 |
Method replaces a triple for the given entity/subject. Property
name is from the schema.org vocabulary.
Args:
entity_id(string): Unique ID of Fedora object
property_name(string): Prefix and property name i.e. schema:name
old_value(string): Literal or URI of old value... | def replace(self,
entity_id,
property_name,
old_value,
value):
if not entity_id.startswith("http"):
entity_uri = '/'.join([self.base_url, self.transaction, entity_id])
else:
entity_uri = entity_id
sp... | 1,112,471 |
DEPRECIATED
Method takes a query term and searches Fedora Repository using SPARQL
search endpoint and returns a RDF graph of the search results.
Args:
query_term(str): String to search repository
Returns:
rdflib.Graph() | def search(self, query_term):
fedora_search_url = "/".join([self.base_url, 'rest', 'fcr:search'])
fedora_search_url = "{}?{}".format(
fedora_search_url,
urllib.parse.urlencode({"q": query_term}))
search_request = urllib.request.Request(
fedora_search_... | 1,112,472 |
Make sure that 0 < heading < 360
Args:
heading: base heading
Returns: corrected heading | def _normalize_direction(heading: int) -> int:
while heading > 359:
heading = int(heading - 359)
while heading < 0:
heading = int(heading + 359)
return heading | 1,112,477 |
Creates a variation from a base value
Args:
mean: base value
sigma: gaussian sigma
Returns: random value | def _gauss(mean: int, sigma: int) -> int:
return int(random.gauss(mean, sigma)) | 1,112,478 |
Creates a variation in wind speed
Args:
base_speed: base wind speed
sigma: sigma value for gaussian variation
Returns: random wind speed | def _randomize_speed(base_speed: int, sigma: int = None) -> int:
if sigma is None:
int_sigma = int(base_speed / 4)
else:
int_sigma = sigma
val = MissionWeather._gauss(base_speed, int_sigma)
if val < 0:
return 0
return min(val, 50) | 1,112,479 |
Creates a variation in direction
Args:
base_heading: base direction
sigma: sigma value for gaussian variation
Returns: random direction | def _randomize_direction(base_heading, sigma) -> int:
val = MissionWeather._gauss(base_heading, sigma)
val = MissionWeather._normalize_direction(val)
return val | 1,112,480 |
Applies weather to an opened Miz file (the mission will be mutated)
Args:
miz: source miz
Returns: True | def apply_to_miz(self, miz):
report = ['Building mission with weather:']
miz.mission.weather.wind_at_ground_level_dir = self.wind_at_ground_level_dir
miz.mission.weather.wind_at_ground_level_speed = self.wind_at_ground_level_speed
miz.mission.weather.wind_at2000_dir = self._ra... | 1,112,489 |
Edit an opened MIZ file and sets the time and date and the weather
Args:
infile: source file
outfile: output file (will default to source file)
metar: metar string, ICAO or object to apply
time: time string to apply (YYYYMMDDHHMMSS)
min_wind: minimum wind
max_wind: m... | def edit_miz( # noqa: C901
infile: str,
outfile: str = None,
metar: typing.Union[str, Metar] = None,
time: str = None,
min_wind: int = 0,
max_wind: int = 40
) -> str:
# noinspection SpellCheckingInspection
if outfile is None:
LOGGER.debug('editing in... | 1,112,613 |
Grab a type definition, returns a typecode class definition
because the facets (name, minOccurs, maxOccurs) must be provided.
Parameters:
namespaceURI --
name -- | def getTypeDefinition(cls, namespaceURI, name, lazy=False):
klass = cls.types.get((namespaceURI, name), None)
if lazy and klass is not None:
return _Mirage(klass)
return klass | 1,112,748 |
Grab an element declaration, returns a typecode instance
representation or a typecode class definition. An element
reference has its own facets, and is local so it will not be
cached.
Parameters:
namespaceURI --
name --
isref -- if element referen... | def getElementDeclaration(cls, namespaceURI, name, isref=False, lazy=False):
key = (namespaceURI, name)
if isref:
klass = cls.elements.get(key,None)
if klass is not None and lazy is True:
return _Mirage(klass)
return klass
typecode =... | 1,112,749 |
if xsi:type does not match the instance type attr,
check to see if it is a derived type substitution.
DONT Return the element's type.
Parameters:
elt -- the DOM element being parsed
ps -- the ParsedSoap object. | def getSubstituteType(self, elt, ps):
pyclass = SchemaInstanceType.getTypeDefinition(*self.type)
if pyclass is None:
raise EvaluateException(
'No Type registed for xsi:type=(%s, %s)' %
(self.type[0], self.type[1]), ps.Backtrace(elt))
... | 1,112,752 |
return a wrapper for pyobj, with typecode attribute set.
Parameters:
pyobj -- instance of builtin type (immutable)
what -- typecode describing the data | def WrapImmutable(cls, pyobj, what):
d = cls.types_dict
if type(pyobj) is bool:
pyclass = d[int]
elif d.has_key(type(pyobj)) is True:
pyclass = d[type(pyobj)]
else:
raise TypeError,\
'Expecting a built-in type in %s (got %s).'... | 1,112,761 |
Checks parameters such as codon_positions, aminoacids... to return the
required sequence as string.
Parameters:
seq_record (SeqRecordExpanded object):
codon_positions (str):
aminoacids (boolean):
Returns:
Namedtuple containing ``seq (str)`` and ``warning (str)``. | def get_seq(seq_record, codon_positions, aminoacids=False, degenerate=None):
Sequence = namedtuple('Sequence', ['seq', 'warning'])
if codon_positions not in [None, '1st', '2nd', '3rd', '1st-2nd', 'ALL']:
raise WrongParameterFormat("`codon_positions` argument should be any of the following"
... | 1,112,786 |
Creates the dataset header for NEXUS files from ``#NEXUS`` to ``MATRIX``.
Parameters:
data (namedtuple): with necessary info for dataset creation.
file_format (str): TNT, PHYLIP, NEXUS, FASTA
aminoacids (boolean): If ``aminoacids is True`` the header will show
... | def make_dataset_header(data, file_format, aminoacids):
if aminoacids:
datatype = 'PROTEIN'
else:
datatype = 'DNA'
if file_format in ['NEXUS', 'PHYLIP', 'FASTA']:
header = .format(data.number_taxa, data.number_chars, datatype)
elif file_format == 'MEGA':
return "#M... | 1,112,789 |
Handles decoding of the CSV `data`.
Args:
data (str): Data which will be decoded.
Returns:
dict: Dictionary with decoded data. | def decode(data):
# try to guess dialect of the csv file
dialect = None
try:
dialect = csv.Sniffer().sniff(data)
except Exception:
pass
# parse data with csv parser
handler = None
try:
data = data.splitlines() # used later
handler = csv.reader(data, dia... | 1,113,086 |
Guess strategy type to use for file by extension.
Args:
file_name_or_ext: Either a file name with an extension or just
an extension
Returns:
Strategy: Type corresponding to extension or None if there's no
corresponding strategy type | def guess_strategy_type(file_name_or_ext):
if '.' not in file_name_or_ext:
ext = file_name_or_ext
else:
name, ext = os.path.splitext(file_name_or_ext)
ext = ext.lstrip('.')
file_type_map = get_file_type_map()
return file_type_map.get(ext, None) | 1,113,133 |
Search for events with the provided title
Args:
event_title: The title of the event
Returns:
An event JSON object returned from the server with the following:
{
"meta":{
"limit": 20, "next": null, "offset": 0,
... | def get_events(self, event_title, regex=False):
regex_val = 0
if regex:
regex_val = 1
r = requests.get('{0}/events/?api_key={1}&username={2}&c-title='
'{3}®ex={4}'.format(self.url, self.api_key,
self... | 1,113,217 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.