text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove_file(profile, branch, file_path, commit_message=None):
"""Remove a file from a branch. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. branch The name of a branch. file_path The path of the file to delete. commit_message A commit message to give to the commit. Returns: A dict with data about the branch's new ref (it includes the new SHA the branch's HEAD points to, after committing the new file). """ |
branch_sha = get_branch_sha(profile, branch)
tree = get_files_in_branch(profile, branch_sha)
new_tree = remove_file_from_tree(tree, file_path)
data = trees.create_tree(profile, new_tree)
sha = data.get("sha")
if not commit_message:
commit_message = "Deleted " + file_path + "."
parents = [branch_sha]
commit_data = commits.create_commit(profile, commit_message, sha, parents)
commit_sha = commit_data.get("sha")
ref_data = refs.update_ref(profile, "heads/" + branch, commit_sha)
return ref_data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_file(profile, branch, file_path):
"""Get a file from a branch. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. branch The name of a branch. file_path The path of the file to fetch. Returns: The (UTF-8 encoded) content of the file, as a string. """ |
branch_sha = get_branch_sha(profile, branch)
tree = get_files_in_branch(profile, branch_sha)
match = None
for item in tree:
if item.get("path") == file_path:
match = item
break
file_sha = match.get("sha")
blob = blobs.get_blob(profile, file_sha)
content = blob.get("content")
decoded_content = b64decode(content)
return decoded_content.decode("utf-8") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make(self):
""" Make the lock file. """ |
try:
# Create the lock file
self.mkfile(self.lock_file)
except Exception as e:
self.die('Failed to generate lock file: {}'.format(str(e))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _short_circuit(value=None):
""" Add the `value` to the `collection` by modifying the collection to be either a dict or list depending on what is already in the collection and value. Returns the collection with the value added to it. Clean up by removing single item array and single key dict. ['abc'] -> 'abc' [['abc']] -> 'abc' [{'abc':123},{'def':456}] -> {'abc':123,'def':456} [{'abc':123},{'abc':456}] -> [{'abc':123,'abc':456}] # skip for same set keys [[{'abc':123},{'abc':456}]] -> [{'abc':123,'abc':456}] """ |
if not isinstance(value, list):
return value
if len(value) == 0:
return value
if len(value) == 1:
if not isinstance(value[0], list):
return value[0]
else:
if len(value[0]) == 1:
return value[0][0]
else:
return value[0]
else:
value = filter(None, value)
# Only checking first item and assumin all others are same type
if isinstance(value[0], dict):
if set(value[0].keys()) == set(value[1].keys()):
return value
elif max([len(x.keys()) for x in value]) == 1:
newvalue = {}
for v in value:
key = v.keys()[0]
newvalue[key] = v[key]
return newvalue
else:
return value
else:
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _query(_node_id, value=None, **kw):
"Look up value by using Query table"
query_result = []
try:
query_result = db.execute(text(fetch_query_string('select_query_from_node.sql')), **kw).fetchall()
except DatabaseError as err:
current_app.logger.error("DatabaseError: %s, %s", err, kw)
return value
#current_app.logger.debug("queries kw: %s", kw)
#current_app.logger.debug("queries value: %s", value)
current_app.logger.debug("queries: %s", query_result)
if query_result:
values = []
for query_name in [x['name'] for x in query_result]:
if query_name:
result = []
try:
current_app.logger.debug("query_name: %s", query_name)
#current_app.logger.debug("kw: %s", kw)
# Query string can be insert or select here
#statement = text(fetch_query_string(query_name))
#params = [x.key for x in statement.params().get_children()]
#skw = {key: kw[key] for key in params}
#result = db.execute(statement, **skw)
result = db.execute(text(fetch_query_string(query_name)), **kw)
current_app.logger.debug("result query: %s", result.keys())
except (DatabaseError, StatementError) as err:
current_app.logger.error("DatabaseError (%s) %s: %s", query_name, kw, err)
if result and result.returns_rows:
result = result.fetchall()
#values.append(([[dict(zip(result.keys(), x)) for x in result]], result.keys()))
#values.append((result.fetchall(), result.keys()))
#current_app.logger.debug("fetchall: %s", values)
if len(result) == 0:
values.append(([], []))
else:
current_app.logger.debug("result: %s", result)
# There may be more results, but only interested in the
# first one. Use the older rowify method for now.
# TODO: use case for rowify?
values.append(rowify(result, [(x, None) for x in result[0].keys()]))
#current_app.logger.debug("fetchone: %s", values)
value = values
#current_app.logger.debug("value: %s", value)
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _template(node_id, value=None):
"Check if a template is assigned to it and render that with the value"
result = []
select_template_from_node = fetch_query_string('select_template_from_node.sql')
try:
result = db.execute(text(select_template_from_node), node_id=node_id)
template_result = result.fetchone()
result.close()
if template_result and template_result['name']:
template = template_result['name']
if isinstance(value, dict):
return render_template(template, **value)
else:
return render_template(template, value=value)
except DatabaseError as err:
current_app.logger.error("DatabaseError: %s", err)
# No template assigned to this node so just return the value
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def render_node(_node_id, value=None, noderequest={}, **kw):
"Recursively render a node's value"
if value == None:
kw.update( noderequest )
results = _query(_node_id, **kw)
current_app.logger.debug("results: %s", results)
if results:
values = []
for (result, cols) in results:
if set(cols) == set(['node_id', 'name', 'value']):
for subresult in result:
#if subresult.get('name') == kw.get('name'):
# This is a link node
current_app.logger.debug("sub: %s", subresult)
name = subresult['name']
if noderequest.get('_no_template'):
# For debugging or just simply viewing with the
# operate script we append the node_id to the name
# of each. This doesn't work with templates.
name = "{0} ({1})".format(name, subresult['node_id'])
values.append( {name: render_node( subresult['node_id'], noderequest=noderequest, **subresult )} )
#elif 'node_id' and 'name' in cols:
# for subresult in result:
# current_app.logger.debug("sub2: %s", subresult)
# values.append( {subresult.get('name'): render_node( subresult.get('node_id'), **subresult )} )
else:
values.append( result )
value = values
value = _short_circuit(value)
if not noderequest.get('_no_template'):
value = _template(_node_id, value)
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create(url, name, subject_id, image_group_id, properties):
"""Create a new experiment using the given SCO-API create experiment Url. Parameters url : string Url to POST experiment create request name : string User-defined name for experiment subject_id : string Unique identifier for subject at given SCO-API image_group_id : string Unique identifier for image group at given SCO-API properties : Dictionary Set of additional properties for created experiment. Argument may be None. Given name will override name property in this set (if present). Returns ------- string Url of created experiment resource """ |
# Create list of key,value-pairs representing experiment properties for
# request. The given name overrides the name in properties (if present).
obj_props = [{'key':'name','value':name}]
if not properties is None:
# Catch TypeErrors if properties is not a list.
try:
for key in properties:
if key != 'name':
obj_props.append({'key':key, 'value':properties[key]})
except TypeError as ex:
raise ValueError('invalid property set')
# Create request body and send POST request to given Url
body = {
'subject' : subject_id,
'images' : image_group_id,
'properties' : obj_props
}
try:
req = urllib2.Request(url)
req.add_header('Content-Type', 'application/json')
response = urllib2.urlopen(req, json.dumps(body))
except urllib2.URLError as ex:
raise ValueError(str(ex))
# Get experiment self reference from successful response
return references_to_dict(json.load(response)['links'])[REF_SELF] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def runs(self, offset=0, limit=-1, properties=None):
"""Get a list of run descriptors associated with this expriment. Parameters offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ModelRunDescriptor) List of model run descriptors """ |
return get_run_listing(
self.runs_url,
offset=offset,
limit=limit,
properties=properties
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def imapchain(*a, **kwa):
""" Like map but also chains the results. """ |
imap_results = map( *a, **kwa )
return itertools.chain( *imap_results ) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def iskip( value, iterable ):
""" Skips all values in 'iterable' matching the given 'value'. """ |
for e in iterable:
if value is None:
if e is None:
continue
elif e == value:
continue
yield e |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def format(self, password: str = '') -> str: """Format command along with any arguments, ready to be sent.""" |
return MARKER_START + \
self.name + \
self.action + \
self.args + \
password + \
MARKER_END |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def lint(to_lint):
""" Run all linters against a list of files. :param to_lint: a list of files to lint. """ |
exit_code = 0
for linter, options in (('pyflakes', []), ('pep8', [])):
try:
output = local[linter](*(options + to_lint))
except commands.ProcessExecutionError as e:
output = e.stdout
if output:
exit_code = 1
print "{0} Errors:".format(linter)
print output
output = hacked_pep257(to_lint)
if output:
exit_code = 1
print "Docstring Errors:".format(linter.upper())
print output
sys.exit(exit_code) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def hacked_pep257(to_lint):
""" Check for the presence of docstrings, but ignore some of the options """ |
def ignore(*args, **kwargs):
pass
pep257.check_blank_before_after_class = ignore
pep257.check_blank_after_last_paragraph = ignore
pep257.check_blank_after_summary = ignore
pep257.check_ends_with_period = ignore
pep257.check_one_liners = ignore
pep257.check_imperative_mood = ignore
original_check_return_type = pep257.check_return_type
def better_check_return_type(def_docstring, context, is_script):
"""
Ignore private methods
"""
def_name = context.split()[1]
if def_name.startswith('_') and not def_name.endswith('__'):
original_check_return_type(def_docstring, context, is_script)
pep257.check_return_type = better_check_return_type
errors = []
for filename in to_lint:
with open(filename) as f:
source = f.read()
if source:
errors.extend(pep257.check_source(source, filename))
return '\n'.join([str(error) for error in sorted(errors)]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main(self, *directories):
""" The actual logic that runs the linters """ |
if not self.git and len(directories) == 0:
print ("ERROR: At least one directory must be provided (or the "
"--git-precommit flag must be passed.\n")
self.help()
return
if len(directories) > 0:
find = local['find']
files = []
for directory in directories:
real = os.path.expanduser(directory)
if not os.path.exists(real):
raise ValueError("{0} does not exist".format(directory))
files.extend(find(real, '-name', '*.py').strip().split('\n'))
if len(files) > 0:
print "Linting {0} python files.\n".format(len(files))
lint(files)
else:
print "No python files found to lint.\n"
else:
status = local['git']('status', '--porcelain', '-uno')
root = local['git']('rev-parse', '--show-toplevel').strip()
# get all modified or added python files
modified = re.findall(r"^[AM]\s+\S+\.py$", status, re.MULTILINE)
# now just get the path part, which all should be relative to the
# root
files = [os.path.join(root, line.split(' ', 1)[-1].strip())
for line in modified]
if len(files) > 0:
lint(files) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_meta(request):
""" This context processor returns meta informations contained in cached files. If there aren't cache it calculates dictionary to return """ |
context_extras = {}
if not request.is_ajax() and hasattr(request, 'upy_context') and request.upy_context['PAGE']:
context_extras['PAGE'] = request.upy_context['PAGE']
context_extras['NODE'] = request.upy_context['NODE']
return context_extras |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def download_virtualenv(version, dldir=None):
'''
Download virtualenv package from pypi and return response that can be
read and written to file
:param str version: version to download or latest version if None
:param str dldir: directory to download into or None for cwd
'''
dl_url = PYPI_DL_URL.format(VER=version)
filename = basename(dl_url)
if dldir:
dl_path = join(dldir, filename)
else:
dl_path = filename
data = urlopen(PYPI_DL_URL.format(VER=version))
with open(dl_path, 'wb') as fh:
fh.write(data.read())
return dl_path |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def create_virtualenv(venvpath, venvargs=None):
'''
Run virtualenv from downloaded venvpath using venvargs
If venvargs is None, then 'venv' will be used as the virtualenv directory
:param str venvpath: Path to root downloaded virtualenv package(must contain
virtualenv.py)
:param list venvargs: Virtualenv arguments to pass to virtualenv.py
'''
cmd = [join(venvpath, 'virtualenv.py')]
venv_path = None
if venvargs:
cmd += venvargs
venv_path = abspath(venvargs[-1])
else:
cmd += ['venv']
p = subprocess.Popen(cmd)
p.communicate() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def bootstrap_vi(version=None, venvargs=None):
'''
Bootstrap virtualenv into current directory
:param str version: Virtualenv version like 13.1.0 or None for latest version
:param list venvargs: argv list for virtualenv.py or None for default
'''
if not version:
version = get_latest_virtualenv_version()
tarball = download_virtualenv(version)
p = subprocess.Popen('tar xzvf {0}'.format(tarball), shell=True)
p.wait()
p = 'virtualenv-{0}'.format(version)
create_virtualenv(p, venvargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compose_path(pub, uuid_url=False):
""" Compose absolute path for given `pub`. Args: pub (obj):
:class:`.DBPublication` instance. uuid_url (bool, default False):
Compose URL using UUID. Returns: str: Absolute url-path of the publication, without server's address \ and protocol. Raises: PrivatePublicationError: When the `pub` is private publication. """ |
if uuid_url:
return join(
"/",
UUID_DOWNLOAD_KEY,
str(pub.uuid)
)
return join(
"/",
DOWNLOAD_KEY,
basename(pub.file_pointer),
basename(pub.filename)
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compose_tree_path(tree, issn=False):
""" Compose absolute path for given `tree`. Args: pub (obj):
:class:`.Tree` instance. issn (bool, default False):
Compose URL using ISSN. Returns: str: Absolute path of the tree, without server's address and protocol. """ |
if issn:
return join(
"/",
ISSN_DOWNLOAD_KEY,
basename(tree.issn)
)
return join(
"/",
PATH_DOWNLOAD_KEY,
quote_plus(tree.path).replace("%2F", "/"),
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compose_full_url(pub, uuid_url=False):
""" Compose full url for given `pub`, with protocol, server's address and port. Args: pub (obj):
:class:`.DBPublication` instance. uuid_url (bool, default False):
Compose URL using UUID. Returns: str: Absolute url of the publication. Raises: PrivatePublicationError: When the `pub` is private publication. """ |
url = compose_path(pub, uuid_url)
if WEB_PORT == 80:
return "%s://%s%s" % (_PROTOCOL, WEB_ADDR, url)
return "%s://%s:%d%s" % (_PROTOCOL, WEB_ADDR, WEB_PORT, url) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compose_tree_url(tree, issn_url=False):
""" Compose full url for given `tree`, with protocol, server's address and port. Args: tree (obj):
:class:`.Tree` instance. issn_url (bool, default False):
Compose URL using ISSN. Returns: str: URL of the tree """ |
url = compose_tree_path(tree, issn_url)
if WEB_PORT == 80:
return "%s://%s%s" % (_PROTOCOL, WEB_ADDR, url)
return "%s://%s:%d%s" % (_PROTOCOL, WEB_ADDR, WEB_PORT, url) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def profile(func):
""" Simple profile decorator, monitors method execution time """ |
@inlineCallbacks
def callme(*args, **kwargs):
start = time.time()
ret = yield func(*args, **kwargs)
time_to_execute = time.time() - start
log.msg('%s executed in %.3f seconds' % (func.__name__, time_to_execute))
returnValue(ret)
return callme |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def prepareClasses(locals):
"""Fix _userClasses and some stuff in classes. Traverses locals, which is a locals() dictionary from the namespace where Forgetter subclasses have been defined, and resolves names in _userClasses to real class-references. Normally you would call forgettSQL.prepareClasses(locals()) after defining all classes in your local module. prepareClasses will only touch objects in the name space that is a subclassed of Forgetter. """ |
for (name, forgetter) in locals.items():
if not (type(forgetter) is types.TypeType and
issubclass(forgetter, Forgetter)):
# Only care about Forgetter objects
continue
# Resolve classes
for (key, userclass) in forgetter._userClasses.items():
if type(userclass) is types.StringType:
# resolve from locals
resolved = locals[userclass]
forgetter._userClasses[key] = resolved
forgetter._tables = {}
# Update all fields with proper names
for (field, sqlfield) in forgetter._sqlFields.items():
forgetter._sqlFields[field] = forgetter._checkTable(sqlfield)
newLinks = []
for linkpair in forgetter._sqlLinks:
(link1, link2) = linkpair
link1=forgetter._checkTable(link1)
link2=forgetter._checkTable(link2)
newLinks.append((link1, link2))
forgetter._sqlLinks = newLinks
forgetter._prepared = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _setID(self, id):
"""Set the ID, ie. the values for primary keys. id can be either a list, following the _sqlPrimary, or some other type, that will be set as the singleton ID (requires 1-length sqlPrimary). """ |
if type(id) in (types.ListType, types.TupleType):
try:
for key in self._sqlPrimary:
value = id[0]
self.__dict__[key] = value
id = id[1:] # rest, go revursive
except IndexError:
raise 'Not enough id fields, required: %s' % len(self._sqlPrimary)
elif len(self._sqlPrimary) <= 1:
# It's a simple value
key = self._sqlPrimary[0]
self.__dict__[key] = id
else:
raise 'Not enough id fields, required: %s' % len(self._sqlPrimary)
self._new = False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _getID(self):
"""Get the ID values as a tuple annotated by sqlPrimary""" |
id = []
for key in self._sqlPrimary:
value = self.__dict__[key]
if isinstance(value, Forgetter):
# It's another object, we store only the ID
if value._new:
# It's a new object too, it must be saved!
value.save()
try:
(value,) = value._getID()
except:
raise "Unsupported: Part %s of %s primary key is a reference to %s, with multiple-primary-key %s " % (key, self.__class__, value.__class__, value)
id.append(value)
return id |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _resetID(self):
"""Reset all ID fields.""" |
# Dirty.. .=))
self._setID((None,) * len(self._sqlPrimary))
self._new = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _checkTable(cls, field):
"""Split a field from _sqlFields into table, column. Registers the table in cls._tables, and returns a fully qualified table.column (default table: cls._sqlTable) """ |
# Get table part
try:
(table, field) = field.split('.')
except ValueError:
table = cls._sqlTable
# clean away white space
table = table.strip()
field = field.strip()
# register table
cls._tables[table] = None
# and return in proper shape
return table + '.' + field |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reset(self):
"""Reset all fields, almost like creating a new object. Note: Forgets changes you have made not saved to database! (Remember: Others might reference the object already, expecting something else!) Override this method if you add properties not defined in _sqlFields. """ |
self._resetID()
self._new = None
self._updated = None
self._changed = None
self._values = {}
# initially create fields
for field in self._sqlFields.keys():
self._values[field] = None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load(self, id=None):
"""Load from database. Old values will be discarded.""" |
if id is not None:
# We are asked to change our ID to something else
self.reset()
self._setID(id)
if not self._new and self._validID():
self._loadDB()
self._updated = time.time() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save(self):
"""Save to database if anything has changed since last load""" |
if ( self._new or
(self._validID() and self._changed) or
(self._updated and self._changed > self._updated) ):
# Don't save if we have not loaded existing data!
self._saveDB()
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete(self):
"""Mark this object for deletion in the database. The object will then be reset and ready for use again with a new id. """ |
(sql, ) = self._prepareSQL("DELETE")
curs = self.cursor()
curs.execute(sql, self._getID())
curs.close()
self.reset() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _nextSequence(cls, name=None):
"""Return a new sequence number for insertion in self._sqlTable. Note that if your sequences are not named tablename_primarykey_seq (ie. for table 'blapp' with primary key 'john_id', sequence name blapp_john_id_seq) you must give the full sequence name as an optional argument to _nextSequence) """ |
if not name:
name = cls._sqlSequence
if not name:
# Assume it's tablename_primarykey_seq
if len(cls._sqlPrimary) <> 1:
raise "Could not guess sequence name for multi-primary-key"
primary = cls._sqlPrimary[0]
name = '%s_%s_seq' % (cls._sqlTable, primary.replace('.','_'))
# Don't have . as a tablename or column name! =)
curs = cls.cursor()
curs.execute("SELECT nextval('%s')" % name)
value = curs.fetchone()[0]
curs.close()
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _loadFromRow(self, result, fields, cursor):
"""Load from a database row, described by fields. ``fields`` should be the attribute names that will be set. Note that userclasses will be created (but not loaded). """ |
position = 0
for elem in fields:
value = result[position]
valueType = cursor.description[position][1]
if hasattr(self._dbModule, 'BOOLEAN') and \
valueType == self._dbModule.BOOLEAN and \
(value is not True or value is not False):
# convert to a python boolean
value = value and True or False
if value and self._userClasses.has_key(elem):
userClass = self._userClasses[elem]
# create an instance
value = userClass(value)
self._values[elem] = value
position += 1 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _loadDB(self):
"""Connect to the database to load myself""" |
if not self._validID():
raise NotFound, self._getID()
(sql, fields) = self._prepareSQL("SELECT")
curs = self.cursor()
curs.execute(sql, self._getID())
result = curs.fetchone()
if not result:
curs.close()
raise NotFound, self._getID()
self._loadFromRow(result, fields, curs)
curs.close()
self._updated = time.time() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _saveDB(self):
"""Insert or update into the database. Note that every field will be updated, not just the changed one. """ |
# We're a "fresh" copy now
self._updated = time.time()
if self._new:
operation = 'INSERT'
if not self._validID():
self._setID(self._nextSequence())
# Note that we assign this ID to our self
# BEFORE possibly saving any of our attribute
# objects that might be new as well. This means
# that they might have references to us, as long
# as the database does not require our existence
# yet.
#
# Since mysql does not have Sequences, this will
# not work as smoothly there. See class
# MysqlForgetter below.
else:
operation = 'UPDATE'
(sql, fields) = self._prepareSQL(operation)
values = []
for field in fields:
value = getattr(self, field)
# First some dirty datatype hacks
if DateTime and type(value) == DateTime.DateTimeType:
# stupid psycopg does not support it's own return type..
# lovely..
value = str(value)
if DateTime and type(value) == DateTime.DateTimeDeltaType:
# Format delta as days, hours, minutes seconds
# NOTE: includes value.second directly to get the
# whole floating number
value = value.strftime("%d %H:%M:") + str(value.second)
if value is True or value is False:
# We must store booleans as 't' and 'f' ...
value = value and 't' or 'f'
if isinstance(value, Forgetter):
# It's another object, we store only the ID
if value._new:
# It's a new object too, it must be saved!
value.save()
try:
(value,) = value._getID()
except:
raise "Unsupported: Can't reference multiple-primary-key: %s" % value
values.append(value)
cursor = self.cursor()
cursor.execute(sql, values)
# cursor.commit()
cursor.close()
self._new = False
self._changed = None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getAll(cls, where=None, orderBy=None):
"""Retrieve all the objects. If a list of ``where`` clauses are given, they will be AND-ed and will limit the search. This will not load everything out from the database, but will create a large amount of objects with only the ID inserted. The data will be loaded from the objects when needed by the regular load()-autocall. """ |
ids = cls.getAllIDs(where, orderBy=orderBy)
# Instansiate a lot of them
if len(cls._sqlPrimary) > 1:
return [cls(*id) for id in ids]
else:
return [cls(id) for id in ids] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getAllIterator(cls, where=None, buffer=100, useObject=None, orderBy=None):
"""Retrieve every object as an iterator. Possibly limitted by the where list of clauses that will be AND-ed. Since an iterator is returned, only ``buffer`` rows are loaded from the database at once. This is useful if you need to process all objects. If useObject is given, this object is returned each time, but with new data. This can be used to avoid creating many new objects when only one object is needed each time. """ |
(sql, fields) = cls._prepareSQL("SELECTALL", where, orderBy=orderBy)
curs = cls.cursor()
fetchedAt = time.time()
curs.execute(sql)
# We might start eating memory at this point
def getNext(rows=[]):
forgetter = cls
if not rows:
rows += curs.fetchmany(buffer)
if not rows:
curs.close()
return None
row = rows[0]
del rows[0]
try:
idPositions = [fields.index(key) for key in cls._sqlPrimary]
except ValueError:
raise "Bad sqlPrimary, should be a list or tuple: %s" % cls._sqlPrimary
ids = [row[pos] for pos in idPositions]
if useObject:
result = useObject
result.reset()
result._setID(ids)
else:
result = forgetter(*ids)
result._loadFromRow(row, fields, curs)
result._updated = fetchedAt
return result
return iter(getNext, None) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getAllIDs(cls, where=None, orderBy=None):
"""Retrive all the IDs, possibly matching the where clauses. Where should be some list of where clauses that will be joined with AND). Note that the result might be tuples if this table has a multivalue _sqlPrimary. """ |
(sql, fields) = cls._prepareSQL("SELECTALL", where,
cls._sqlPrimary, orderBy=orderBy)
curs = cls.cursor()
curs.execute(sql)
# We might start eating memory at this point
rows = curs.fetchall()
curs.close()
result = []
idPositions = [fields.index(key) for key in cls._sqlPrimary]
for row in rows:
ids = [row[pos] for pos in idPositions]
if len(idPositions) > 1:
ids = tuple(ids)
else:
ids = ids[0]
result.append((ids))
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getAllText(cls, where=None, SEPERATOR=' ', orderBy=None):
"""Retrieve a list of of all possible instances of this class. The list is composed of tuples in the format (id, description) - where description is a string composed by the fields from cls._shortView, joint with SEPERATOR. """ |
(sql, fields) = cls._prepareSQL("SELECTALL", where, orderBy=orderBy)
curs = cls.cursor()
curs.execute(sql)
# We might start eating memory at this point
rows = curs.fetchall()
curs.close()
result = []
idPositions = [fields.index(key) for key in cls._sqlPrimary]
shortPos = [fields.index(short) for short in cls._shortView]
for row in rows:
ids = [row[pos] for pos in idPositions]
if len(idPositions) > 1:
ids = tuple(ids)
else:
ids = ids[0]
text = SEPERATOR.join([str(row[pos]) for pos in shortPos])
result.append((ids, text))
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getChildren(self, forgetter, field=None, where=None, orderBy=None):
"""Return the children that links to me. That means that I have to be listed in their _userClasses somehow. If field is specified, that field in my children is used as the pointer to me. Use this if you have multiple fields referring to my class. """ |
if type(where) in (types.StringType, types.UnicodeType):
where = (where,)
if not field:
for (i_field, i_class) in forgetter._userClasses.items():
if isinstance(self, i_class):
field = i_field
break # first one found is ok :=)
if not field:
raise "No field found, check forgetter's _userClasses"
sqlname = forgetter._sqlFields[field]
myID = self._getID()[0] # assuming single-primary !
whereList = ["%s='%s'" % (sqlname, myID)]
if where:
whereList.extend(where)
return forgetter.getAll(whereList, orderBy=orderBy) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def hide_address(func):
""" Used to decorate Serializer.to_representation method. It hides the address field if the Project has 'hidden_address' == True and the request user is neither owner or member of the organization """ |
@wraps(func)
def _impl(self, instance):
# We pop address field to avoid AttributeError on default Serializer.to_representation
if instance.hidden_address:
for i, field in enumerate(self._readable_fields):
if field.field_name == "address":
address = self._readable_fields.pop(i)
ret = func(self, instance)
self._readable_fields.insert(i, address) # Put address back
request = self.context["request"]
# Check if user is organization member
is_organization_member = False
try:
if instance.organization is not None:
is_organization_member = (request.user in instance.organization.members.all())
except Organization.DoesNotExist: # pragma: no cover
pass
# Add address representation
if request.user == instance.owner or is_organization_member:
ret["address"] = self.fields["address"].to_representation(instance.address)
else:
ret["address"] = None
else:
ret = func(self, instance)
return ret
return _impl |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_current_user_is_applied_representation(func):
""" Used to decorate Serializer.to_representation method. It sets the field "current_user_is_applied" if the user is applied to the project """ |
@wraps(func)
def _impl(self, instance):
# We pop current_user_is_applied field to avoid AttributeError on default Serializer.to_representation
ret = func(self, instance)
user = self.context["request"].user
applied = False
if not user.is_anonymous():
try:
applied = models.Apply.objects.filter(user=user, project=instance).count() > 0
except:
pass
ret["current_user_is_applied"] = applied
return ret
return _impl |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def configure(self, debug=None, quiet=None, verbosity=None, compile=None, compiler_factory=None, **kwargs):
"""configure managed args """ |
if debug is not None:
self.arg_debug = debug
if quiet is not None:
self.arg_quiet = quiet
if verbosity is not None:
self.arg_verbosity = verbosity
if compile is not None:
self.compile = compile
if compiler_factory is not None:
self.compiler_factory = compiler_factory
if kwargs:
# other keyword arguments update command attribute
self.command.update(**kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def uninstall_bash_completion(self, script_name=None, dest="~/.bashrc"):
'''remove line to activate bash_completion for given script_name from given dest
You can use this for letting the user uninstall bash_completion::
from argdeco import command, main
@command("uninstall-bash-completion",
arg('--dest', help="destination", default="~/.bashrc")
)
def uninstall_bash_completion(dest):
main.uninstall_bash_completion(dest=dest)
'''
if 'USERPROFILE' in os.environ and 'HOME' not in os.environ:
os.environ['HOME'] = os.environ['USERPROFILE']
dest = expanduser(dest)
if script_name is None:
script_name = sys.argv[0]
lines = []
remove_line = 'register-python-argcomplete %s' % script_name
with open(dest, 'r') as f:
for line in f:
if line.strip().startswith('#'):
lines.append(line)
continue
if remove_line in line: continue
lines.append(line)
with open(dest, 'w') as f:
f.write(''.join(lines)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def install_bash_completion(self, script_name=None, dest="~/.bashrc"):
'''add line to activate bash_completion for given script_name into dest
You can use this for letting the user install bash_completion::
from argdeco import command, main
@command("install-bash-completion",
arg('--dest', help="destination", default="~/.bashrc")
)
def install_bash_completion(dest):
main.install_bash_completion(dest=dest)
'''
if 'USERPROFILE' in os.environ and 'HOME' not in os.environ:
os.environ['HOME'] = os.environ['USERPROFILE']
dest = expanduser(dest)
if script_name is None:
script_name = sys.argv[0]
self.uninstall_bash_completion(script_name=script_name, dest=dest)
with open(dest, 'a') as f:
f.write('eval "$(register-python-argcomplete %s)"\n' % script_name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _fetch_arguments(handler, method):
"""Get the arguments depending on the type of HTTP method.""" |
if method.__name__ == 'get':
arguments = {}
for key, value in six.iteritems(handler.request.arguments):
# Tornado supports comma-separated lists of values in
# parameters. We're undoing that here, and if a list
# is expected the _validate method can handle it.
if isinstance(value, list):
arguments[key] = ','.join(value)
else:
arguments[key] = value
else: # post, put, patch, delete?
arguments = handler.get_post_arguments()
return arguments |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _apply_validator_chain(chain, value, handler):
"""Apply validators in sequence to a value.""" |
if hasattr(chain, 'validate'): # not a list
chain = [chain, ]
for validator in chain:
if hasattr(validator, 'validate'):
value = validator.validate(value, handler)
else:
raise web.HTTPError(500)
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_arguments(self, method, parameters):
"""Parse arguments to method, returning a dictionary.""" |
# TODO: Consider raising an exception if there are extra arguments.
arguments = _fetch_arguments(self, method)
arg_dict = {}
errors = []
for key, properties in parameters:
if key in arguments:
value = arguments[key]
try:
arg_dict[key] = _apply_validator_chain(
properties.get('validators', []), value, self)
except validators.ValidationError as err:
errors.append(err)
else:
if properties.get('required', False):
raise web.HTTPError(
400,
('Missing required parameter: %s'
% (key, ))
)
else:
if properties.get('default', None) is not None:
arg_dict[key] = properties['default']
else:
arg_dict[key] = None
if errors:
raise web.HTTPError(400, 'There were %s errors' % len(errors))
return arg_dict |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse(parameters):
"""Decorator to parse parameters according to a set of criteria. This outer method is called to set up the decorator. Arguments: parameters: An array of parameter declarations tuples in the format: Usage: @chassis.util.parameters.parse([ ('email', {'validators': [validators.Email], 'required': True}), ('password', {'validators': [validators.Password], 'required': True}) ]) def post(self, email=None, password=None):
# Render JSON for the provided parameters self.render_json({'email': email, 'password': password}) """ |
# pylint: disable=protected-access
@decorators.include_original
def decorate(method):
"""Setup returns this decorator, which is called on the method."""
def call(self, *args):
"""This is called whenever the decorated method is invoked."""
kwargs = _parse_arguments(self, method, parameters)
return method(self, *args, **kwargs)
# TODO: Autogenerate documentation data for parameters.
return call
return decorate |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_dict(parameters):
"""Decorator to parse parameters as a dict according to a set of criteria. This outer method is called to set up the decorator. Arguments: parameters: An array of parameter declarations tuples in the format: Usage: @chassis.util.parameters.parse_dict([ ('email', {'validators': [validators.Email], 'required': True}), ('password', {'validators': [validators.Password], 'required': True}) ]) def post(self, data):
# Render JSON for the provided parameters self.render_json({'email': data['email'], 'password': data['password']}) """ |
# pylint: disable=protected-access
@decorators.include_original
def decorate(method):
"""Setup returns this decorator, which is called on the method."""
def call(self, *args):
"""This is called whenever the decorated method is invoked."""
arg_dict = _parse_arguments(self, method, parameters)
return method(self, *args, data=arg_dict)
# TODO: Autogenerate documentation data for parameters.
return call
return decorate |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def mode_assignment(arg):
""" Translates arg to enforce proper assignment """ |
arg = arg.upper()
stream_args = ('STREAM', 'CONSOLE', 'STDOUT')
try:
if arg in stream_args:
return 'STREAM'
else:
return arg
except Exception:
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save_object(self, obj):
"""Save an object with Discipline Only argument is a Django object. This function saves the object (regardless of whether it already exists or not) and registers with Discipline, creating a new Action object. Do not use obj.save()! """ |
obj.save()
try:
save_object(obj, editor=self)
except DisciplineException:
pass |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete_object(self, obj, post_delete=False):
"""Delete an object with Discipline Only argument is a Django object. Analogous to Editor.save_object. """ |
# Collect related objects that will be deleted by cascading
links = [rel.get_accessor_name() for rel in \
obj._meta.get_all_related_objects()]
# Recursively delete each of them
for link in links:
objects = getattr(obj, link).all()
for o in objects:
self.delete_object(o, post_delete)
# Delete the actual object
self._delete_object(obj, post_delete) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _description(self):
"""A concise html explanation of this Action.""" |
inst = self.timemachine.presently
if self.action_type == "dl":
return "Deleted %s" % inst.content_type.name
elif self.action_type == "cr":
return "Created %s" % inst._object_type_html()
else:
return "Modified %s" % inst._object_type_html() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_timemachine(self):
"""Return a TimeMachine for the object on which this action was performed and at the time of this action.""" |
if not self.__timemachine:
self.__timemachine = TimeMachine(
self.object_uid,
step = self.id,
)
return self.__timemachine.at(self.id) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_is_revertible(self):
"""Return a boolean representing whether this Action is revertible or not""" |
# If it was already reverted
if self.reverted:
return False
errors = []
inst = self.timemachine
if inst.fields != inst.presently.fields or \
inst.foreignkeys != inst.presently.foreignkeys:
self.__undo_errors = [
"Cannot undo action %s. The database schema"
" for %s has changed"
% (self.id,
inst.content_type.name,)]
return False
if self.action_type in ["dl", "md"]:
# If undoing deletion, make sure it actually doesn't exist
if self.action_type == "dl" and inst.presently.exists:
errors.append(
"Cannot undo action %d: the %s you are trying to"
" recreate already exists"
% (self.id,
inst.content_type.name,))
# The only problem we can have by reversing this action
# is that some of its foreignkeys could be pointing to
# objects that have since been deleted.
check_here = inst.at_previous_action
for field in inst.foreignkeys:
fk = check_here.get_timemachine_instance(field)
# If the ForeignKey doesn't have a value
if not fk: continue
if not fk.exists:
errors.append(
"Cannot undo action %s: the %s used to link to"
" a %s that has since been deleted"
% (self.id,
inst.content_type.name,
fk.content_type.name,))
else: # self.action_type == "cr"
# Make sure it actually exists
if not self.timemachine.presently.exists:
errors.append(
"Cannot undo action %s: the %s you are trying"
" to delete doesn't currently exist"
% (self.id, inst.content_type.name,))
# The only problem we can have by undoing this action is
# that it could have foreignkeys pointed to it, so deleting
# it will cause deletion of other objects
else:
links = [rel.get_accessor_name()
for rel in \
inst.get_object()._meta.get_all_related_objects()]
for link in links:
objects = getattr(inst.get_object(), link).all()
for rel in objects:
errors.append(
"Cannot undo action %s: you are trying to"
" delete a %s that has a %s pointing to it" %
(self.id,
inst.content_type.name,
ContentType.objects.get_for_model(rel.__class__),))
self.__undo_errors = errors
return (len(errors) == 0) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def undo(self, editor):
"""Create a new Action that undos the effects of this one, or, more accurately, reverts the object of this Action to the state at which it was right before the Action took place.""" |
inst = self.timemachine
if not self.is_revertible:
raise DisciplineException("You tried to undo a non-revertible action! "
"Check action.is_revertible and action.undo_errors"
" before trying to undo.")
if self.action_type == "dl":
obj = inst.restore()
self.reverted = save_object(obj, editor)
self.save()
elif self.action_type == "md":
# Restore as it was *before* the modification
obj = inst.at_previous_action.restore()
self.reverted = save_object(obj, editor)
self.save()
else:
editor.delete_object(inst.get_object())
# This is safe from race conditions but still a pretty inelegant
# solution. I can't figure out a different way to find the last action
# for now
self.reverted = DeletionCommit.objects.filter(
object_uid = self.object_uid
).order_by("-action__id")[0].action
self.save() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _status(self):
"""Return html saying whether this Action is reverted by another one or reverts another one.""" |
text = ""
# Turns out that is related field in null, Django
# doesn't even make it a property of the object
# http://code.djangoproject.com/ticket/11920
if hasattr(self, "reverts"):
text += '(reverts <a href="%s">%s</a>)<br/>' % (
self.reverts.get_absolute_url(),
self.reverts.id
)
if self.reverted:
text += '(reverted in <a href="%s">%s</a>)<br/>' % (
self.reverted.get_absolute_url(),
self.reverted.id
)
return text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __summary(self):
"""A plaintext summary of the Action, useful for debugging.""" |
text = "Time: %s\n" % self.when
text += "Comitter: %s\n" % self.editor
inst = self.timemachine.presently
if self.action_type == "dl":
text += "Deleted %s\n" % inst._object_type_text()
elif self.action_type == "cr":
text += "Created %s\n" % inst._object_type_text()
else:
text += "Modified %s\n" % inst._object_type_text()
text += self._details(nohtml=True)
return text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _details(self, nohtml=False):
"""Return the html representation of the Action.""" |
text = ""
inst = self.timemachine
# If deleted or created, show every field, otherwise only
# the modified
if self.action_type in ("dl","cr",):
fields = inst.fields + inst.foreignkeys
else: fields = [i.key for i in self.modification_commits.all()]
for field in fields:
if not nohtml:
text += "<strong>%s</strong>: " % field
else:
text += "%s: " % field
# If modified, show what it was like one step earlier
if self.action_type == "md":
if not nohtml:
text += "%s → " % \
inst.at_previous_action._field_value_html(field)
else:
text += "%s -> " % \
inst.at_previous_action._field_value_text(field)
if not nohtml:
text += "%s<br/>" % inst._field_value_html(field)
else:
text += "%s\n" % inst._field_value_text(field)
return text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __update_information(self):
"""Gether information that doesn't change at different points in time""" |
info = {}
info["actions_count"] = Action.objects.count()
info["creation_times"] = []
info["deletion_times"] = []
info["content_type"] = None
# Find object type and when it was created
for ccommit in CreationCommit.objects.filter(object_uid=self.uid):
info["creation_times"].append(ccommit.action.id)
info["creation_times"].sort()
for dcommit in DeletionCommit.objects.filter(object_uid=self.uid):
info["deletion_times"].append(dcommit.action.id)
info["deletion_times"].sort()
try:
info["content_type"] = ccommit.content_type
except NameError:
raise DisciplineException("You tried to make a TimeMachine out of"
" an object that doesn't exist!")
self.info = info
for key in info.keys():
setattr(self, key, info[key]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def at(self, step):
"""Return a TimeMachine for the same object at a different time. Takes an integer argument representing the id field of an Action. Returns the TimeMachine at the time of that Action. (Less ambiguously: at the time right after the Action. """ |
return TimeMachine(
self.uid,
step = step,
info = copy.deepcopy(self.info)
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get(self, key):
"""Return the value of a field. Take a string argument representing a field name, return the value of that field at the time of this TimeMachine. When restoring a ForeignKey-pointer object that doesn't exist, raise DisciplineException """ |
modcommit = self._get_modcommit(key)
if not modcommit: return None
# If this isn't a ForeignKey, then just return the value
if key not in self.foreignkeys:
return cPickle.loads(str(modcommit.value))
# If it is, then return the object instance
try:
return TimeMachine(uid = modcommit.value).get_object()
except self.content_type.DoesNotExist:
raise DisciplineException("When restoring a ForeignKey, the " \
"%s %s was not found." % (self.content_type.name, self.uid)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_timemachine_instance(self, key):
"""Return a TimeMachine for a related object. Take a string argument representing a ForeignKey field name, find what object was related to this one at the time of this TimeMachine and return a TimeMachine for that related object. """ |
modcommit = self._get_modcommit(key)
if not modcommit:
return None
return TimeMachine(uid = modcommit.value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_object(self):
"""Return the object of this TimeMachine""" |
return self.content_type.model_class().objects.get(uid = self.uid) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def restore(self, nosave=False):
"""Restore all of the object attributes to the attributes. Return the Django object. """ |
if self.exists:
obj = self.content_type.model_class().objects.get(uid=self.uid)
else:
obj = self.content_type.model_class()(uid=self.uid)
for field in self.fields + self.foreignkeys:
obj.__setattr__(field, self.get(field))
if not nosave: obj.save()
return obj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def url(self):
"""Return the admin url of the object.""" |
return urlresolvers.reverse(
"admin:%s_%s_change" % (self.content_type.app_label,
self.content_type.model),
args = (self.get_object().uid,)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _object_type_html(self):
"""Return an html admin link with the object's type as text. If the object doesn't exist, return the object's type crossed out. """ |
if self.exists:
return "<a href=\"%s\">%s</a>" % (self.url(),
self.content_type.name,)
else:
return "<s>%s</s>" % self.content_type.name |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_for_content_type(self, ct):
"""Return the schema for the model of the given ContentType object""" |
try:
return json.loads(self.state)[ct.app_label][ct.model]
except KeyError:
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def html_state(self):
"""Display state in HTML format for the admin form.""" |
ret = ""
state = json.loads(self.state)
for (app, appstate) in state.items():
for (model, modelstate) in appstate.items():
ret += "<p>%s.models.%s</p>" % (app, model,)
ret += "<ul>"
for field in modelstate["fields"] + ["uid"]:
ret += "<li>%s</li>" % field
for fk in modelstate["foreignkeys"]:
ret += "<li>%s (foreign key)</li>" % fk
ret += "</ul>"
return ret |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def plot_prh_des_asc(p, r, h, asc, des):
'''Plot pitch, roll, and heading during the descent and ascent dive phases
Args
----
p: ndarray
Derived pitch data
r: ndarray
Derived roll data
h: ndarray
Derived heading data
des: ndarray
boolean mask for slicing descent phases of dives from tag dta
asc: ndarray
boolean mask for slicing asccent phases of dives from tag dta
'''
import matplotlib.pyplot as plt
import numpy
from . import plotutils
# Convert boolean mask to indices
des_ind = numpy.where(des)[0]
asc_ind = numpy.where(asc)[0]
fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex='col')
ax1.title.set_text('Pitch')
ax1 = plotutils.plot_noncontiguous(ax1, p, des_ind, _colors[0], 'descents')
ax1 = plotutils.plot_noncontiguous(ax1, p, asc_ind, _colors[1], 'ascents')
ax1.title.set_text('Roll')
ax2 = plotutils.plot_noncontiguous(ax2, r, des_ind, _colors[0], 'descents')
ax2 = plotutils.plot_noncontiguous(ax2, r, asc_ind, _colors[1], 'ascents')
ax1.title.set_text('Heading')
ax3 = plotutils.plot_noncontiguous(ax3, h, des_ind, _colors[0], 'descents')
ax3 = plotutils.plot_noncontiguous(ax3, h, asc_ind, _colors[1], 'ascents')
for ax in [ax1, ax2, ax3]:
ax.legend(loc="upper right")
plt.ylabel('Radians')
plt.xlabel('Samples')
plt.show()
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def plot_prh_filtered(p, r, h, p_lf, r_lf, h_lf):
'''Plot original and low-pass filtered PRH data
Args
----
p: ndarray
Derived pitch data
r: ndarray
Derived roll data
h: ndarray
Derived heading data
p_lf: ndarray
Low-pass filtered pitch data
r_lf: ndarray
Low-pass filtered roll data
h_lf: ndarray
Low-pass filtered heading data
'''
import numpy
fig, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex='col')
#rad2deg = lambda x: x*180/numpy.pi
ax1.title.set_text('Pitch')
ax1.plot(range(len(p)), p, color=_colors[0], linewidth=_linewidth,
label='original')
ax1.plot(range(len(p_lf)), p_lf, color=_colors[1], linewidth=_linewidth,
label='filtered')
ax2.title.set_text('Roll')
ax2.plot(range(len(r)), r, color=_colors[2], linewidth=_linewidth,
label='original')
ax2.plot(range(len(r_lf)), r_lf, color=_colors[3], linewidth=_linewidth,
label='filtered')
ax3.title.set_text('Heading')
ax3.plot(range(len(h)), h, color=_colors[4], linewidth=_linewidth,
label='original')
ax3.plot(range(len(h_lf)), h_lf, color=_colors[5], linewidth=_linewidth,
label='filtered')
plt.ylabel('Radians')
plt.xlabel('Samples')
for ax in [ax1, ax2, ax3]:
ax.legend(loc="upper right")
plt.show()
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def plot_swim_speed(exp_ind, swim_speed):
'''Plot the swim speed during experimental indices
Args
----
exp_ind: ndarray
Indices of tag data where experiment is active
swim_speed: ndarray
Swim speed data at sensor sampling rate
'''
import numpy
fig, ax = plt.subplots()
ax.title.set_text('Swim speed from depth change and pitch angle (m/s^2')
ax.plot(exp_ind, swim_speed, linewidth=_linewidth, label='speed')
ymax = numpy.ceil(swim_speed[~numpy.isnan(swim_speed)].max())
ax.set_ylim(0, ymax)
ax.legend(loc='upper right')
plt.show()
return ax |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def load(self, skey, sdesc,
sdict=None, loaders=None, merge=False, writeback=False):
'''
Loads a dictionary into current settings
:param skey:
Type of data to load. Is be used to reference the data \
in the files sections within settings
:param sdesc:
Either filename of yaml-file to load or further description of \
imported data when `sdict` is used
:param dict sdict:
Directly pass data as dictionary instead of loading \
it from a yaml-file. \
Make sure to set `skey` and `sdesc` accordingly
:param list loaders:
Append custom loaders to the YAML-loader.
:param merge:
Merge received data into current settings or \
place it under `skey` within meta
:param writeback:
Write back loaded (and merged/imported) result back \
to the original file. \
This is used to generate the summary files
:returns:
The loaded (or directly passed) content
.. seealso:: |yaml_loaders|
'''
y = sdict if sdict else read_yaml(sdesc, add_constructor=loaders)
if y and isinstance(y, dict):
if not sdict:
self.__settings['files'].update({skey: sdesc})
if merge:
self.__settings = dict_merge(self.__settings, y)
else:
self.__settings[skey] = y
shell_notify(
'load %s data and %s it into settings' % (
'got' if sdict else 'read',
'merged' if merge else 'imported'
),
more=dict(skey=skey, sdesc=sdesc,
merge=merge, writeback=writeback),
verbose=self.__verbose
)
if writeback and y != self.__settings:
write_yaml(sdesc, self.__settings)
return y |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def encode(name, values, strict=True, encoding=pyamf.AMF0):
""" Produces a SharedObject encoded stream based on the name and values. @param name: The root name of the SharedObject. @param values: A `dict` of name value pairs to be encoded in the stream. @param strict: Ensure that the SOL stream is as spec compatible as possible. @return: A SharedObject encoded stream. @rtype: L{BufferedByteStream<pyamf.util.BufferedByteStream>}, a file like object. """ |
encoder = pyamf.get_encoder(encoding)
stream = encoder.stream
# write the header
stream.write(HEADER_VERSION)
if strict:
length_pos = stream.tell()
stream.write_ulong(0)
# write the signature
stream.write(HEADER_SIGNATURE)
# write the root name
name = name.encode('utf-8')
stream.write_ushort(len(name))
stream.write(name)
# write the padding
stream.write(PADDING_BYTE * 3)
stream.write_uchar(encoding)
for n, v in values.iteritems():
encoder.serialiseString(n)
encoder.writeElement(v)
# write the padding
stream.write(PADDING_BYTE)
if strict:
stream.seek(length_pos)
stream.write_ulong(stream.remaining() - 4)
stream.seek(0)
return stream |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def mainloop(self):
""" Handles events and calls their handler for infinity. """ |
while self.keep_going:
with self.lock:
if self.on_connect and not self.readable(2):
self.on_connect()
self.on_connect = None
if not self.keep_going:
break
self.process_once() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_cell(self, cell, coords, cell_mode=CellMode.cooked):
"""Parses a cell according to the cell.ctype.""" |
# pylint: disable=too-many-return-statements
if cell_mode == CellMode.cooked:
if cell.ctype == xlrd.XL_CELL_BLANK:
return None
if cell.ctype == xlrd.XL_CELL_BOOLEAN:
return cell.value
if cell.ctype == xlrd.XL_CELL_DATE:
if self.handle_ambiguous_date:
try:
return self._parse_date(cell.value)
except xlrd.xldate.XLDateAmbiguous:
return self.handle_ambiguous_date(cell.value)
else:
return self._parse_date(cell.value)
if cell.ctype == xlrd.XL_CELL_EMPTY:
return None
if cell.ctype == xlrd.XL_CELL_ERROR:
return cell.value
if cell.ctype == xlrd.XL_CELL_NUMBER:
return cell.value
if cell.ctype == xlrd.XL_CELL_TEXT:
return cell.value
raise ValueError("Unhandled cell type {0}".format(cell.ctype))
else:
return cell |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_note(self, coords):
"""Get the note for the cell at the given coordinates. coords is a tuple of (col, row) """ |
col, row = coords
note = self.raw_sheet.cell_note_map.get((row, col))
return note.text if note else None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_date(self, cell_value):
"""Attempts to parse a cell_value as a date.""" |
date_tuple = xlrd.xldate_as_tuple(cell_value, self.raw_sheet.book.datemode)
return self.tuple_to_datetime(date_tuple) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_args(cls):
""" Method to parse command line arguments """ |
cls.parser = argparse.ArgumentParser()
cls.parser.add_argument(
"symbol", help="Symbol for horizontal line", nargs="*")
cls.parser.add_argument(
"--color", "-c", help="Color of the line", default=None, nargs=1)
cls.parser.add_argument(
"--version", "-v", action="version", version="0.13")
return cls.parser |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run_args(self):
""" Pass in the parsed args to the script """ |
self.arg_parser = self._parse_args()
self.args = self.arg_parser.parse_args()
color_name = self.args.color
if color_name is not None:
color_name = color_name[0]
symbol = self.args.symbol
try:
self.tr(symbol, color_name)
except InvalidColorException:
print("Invalid Color Name!") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _term_size(self):
""" Method returns lines and columns according to terminal size """ |
for fd in (0, 1, 2):
try:
return self._ioctl_GWINSZ(fd)
except:
pass
# try os.ctermid()
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
try:
return self._ioctl_GWINSZ(fd)
finally:
os.close(fd)
except:
pass
# try `stty size`
try:
return tuple(int(x) for x in os.popen("stty size", "r").read().split())
except:
pass
# try environment variables
try:
return tuple(int(os.getenv(var)) for var in ("LINES", "COLUMNS"))
except:
pass
# i give up. return default.
return (25, 80) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def tr(self, args, color=None):
""" Method to print ASCII patterns to terminal """ |
width = self._term_size()[1]
if not args:
if color is not None:
print(self._echo("#" * width, color))
else:
print(self._echo("#" * width, "green"))
else:
for each_symbol in args:
chars = len(each_symbol)
number_chars = width // chars
if color is not None:
print(self._echo(each_symbol * number_chars, color))
else:
print(each_symbol * number_chars) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cache_add(self, resource_url, cache_id):
"""Add entry permanently to local cache. Parameters resource_url : string Resource Url cache_id : string Unique cache identifier for resource """ |
# Add entry to cache index
self.cache[resource_url] = cache_id
# Write cache index content to database file
with open(self.db_file, 'w') as f:
for resource in self.cache:
f.write(resource + '\t' + self.cache[resource] + '\n') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cache_clear(self):
"""Clear local cache by deleting all cached resources and their downloaded files. """ |
# Delete content of local cache directory
for f in os.listdir(self.directory):
f = os.path.join(self.directory, f)
if os.path.isfile(f):
os.remove(f)
elif os.path.isdir(f):
shutil.rmtree(f)
# Empty cache index
self.cache = {} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_api_references(self, api_url=None):
"""Get set of HATEOAS reference for the given SCO-API. Use the default SCO-API if none is given. References are cached as they are not expected to change. Parameters Returns ------- """ |
# Get subject listing Url for SCO-API
if not api_url is None:
url = api_url
else:
url = self.api_url
# Check if API references are in local cache. If not send GET request
# and add the result to the local cache
if not url in self.apis:
self.apis[url] = sco.references_to_dict(
sco.JsonResource(url).json[sco.REF_LINKS]
)
return self.apis[url] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def experiments_create(self, name, subject_id, image_group_id, api_url=None, properties=None):
"""Create a new experiment at the given SCO-API. Subject and image group reference existing resources at the SCO-API. Parameters name : string User-defined name for experiment subject_id : string Unique identifier for subject at given SCO-API image_group_id : string Unique identifier for image group at given SCO-API api_url : string, optional Base Url of SCO-API where experiment will be created properties : Dictionary, optional Set of additional properties for created experiment. The given experiment name will override an existing name property in this set. Returns ------- scoserv.ExperimentHandle Handle for local copy of created experiment resource """ |
# Create experiment and return handle for created resource
return self.experiments_get(
ExperimentHandle.create(
self.get_api_references(api_url)[sco.REF_EXPERIMENTS_CREATE],
name,
subject_id,
image_group_id,
properties=properties
)
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def experiments_get(self, resource_url):
"""Get handle for experiment resource at given Url. Parameters resource_url : string Url for experiment resource at SCO-API Returns ------- scoserv.ExperimentHandle Handle for local copy of experiment resource """ |
# Get resource directory, Json representation, active flag, and cache id
obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url)
# Create experiment handle. Will raise an exception if resource is not
# in cache and cannot be downloaded.
experiment = ExperimentHandle(obj_json, self)
# Add resource to cache if not exists
if not cache_id in self.cache:
self.cache_add(resource_url, cache_id)
# Return experiment handle
return experiment |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def experiments_fmri_create(self, experiment_url, data_file):
"""Upload given data file as fMRI for experiment with given Url. Parameters experiment_url : string Url for experiment resource data_file: Abs. Path to file on disk Functional data file Returns ------- scoserv.FunctionalDataHandle Handle to created fMRI resource """ |
# Get the experiment
experiment = self.experiments_get(experiment_url)
# Upload data
FunctionalDataHandle.create(
experiment.links[sco.REF_EXPERIMENTS_FMRI_CREATE],
data_file
)
# Get new fmri data handle and return it
return self.experiments_get(experiment_url).fmri_data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def experiments_fmri_get(self, resource_url):
"""Get handle for functional fMRI resource at given Url. Parameters resource_url : string Url for fMRI resource at SCO-API Returns ------- scoserv.FunctionalDataHandle Handle for funcrional MRI data resource """ |
# Get resource directory, Json representation, active flag, and cache id
obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url)
# Create image group handle. Will raise an exception if resource is not
# in cache and cannot be downloaded.
fmri_data = FunctionalDataHandle(obj_json, obj_dir)
# Add resource to cache if not exists
if not cache_id in self.cache:
self.cache_add(resource_url, cache_id)
# Return functional data handle
return fmri_data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def experiments_predictions_create(self, model_id, name, api_url, arguments={}, properties=None):
"""Create a new model run at the given SCO-API. Parameters model_id : string Unique model identifier name : string User-defined name for experiment api_url : string Url to POST create model run request arguments : Dictionary Dictionary of arguments for model run properties : Dictionary, optional Set of additional properties for created mode run. Returns ------- scoserv.ModelRunHandle Handle for local copy of created model run resource """ |
# Create experiment and return handle for created resource
return self.experiments_predictions_get(
ModelRunHandle.create(
api_url,
model_id,
name,
arguments,
properties=properties
)
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def experiments_predictions_get(self, resource_url):
"""Get handle for model run resource at given Url. Parameters resource_url : string Url for model run resource at SCO-API Returns ------- scoserv.ModelRunHandle Handle for local copy of model run resource """ |
# Get resource directory, Json representation, active flag, and cache id
obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url)
# Create model run handle. Will raise an exception if resource is not
# in cache and cannot be downloaded.
run = ModelRunHandle(obj_json, obj_dir, self)
# Add resource to cache if not exists
if not cache_id in self.cache:
self.cache_add(resource_url, cache_id)
# Return model run handle
return run |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_object(self, resource_url):
"""Get remote resource information. Creates a local directory for the resource if this is the first access to the resource. Downloads the resource Json representation and writes it into a .json file in the cache directory. Raises ValueError if resource is not cached and does not exist. If the resource no longer exists on the server but in the local cache, a reference to the local copy is returned and the value of the is_active flag is False. Parameters cache_id : string Unique cache identifier resource_url : string Url of the resource Returns ------- (string, Json, Boolean, string) Returns a 4-tuple containing local resource directory, the Json object representing the resource, an active flag indicating if the resource still exists on the remote server or only in the local cache, and the resource unique cache identifier. """ |
# Check if resource is in local cache. If not, create a new cache
# identifier and set is_cached flag to false
if resource_url in self.cache:
cache_id = self.cache[resource_url]
else:
cache_id = str(uuid.uuid4())
# The local cahce directory for resource is given by cache identifier
obj_dir = os.path.join(self.directory, cache_id)
# File for local copy of object's Json representation
f_json = os.path.join(obj_dir, '.json')
# Object active flag
is_active = True
# Read the remote resource representation
try:
obj_json = sco.JsonResource(resource_url).json
# Save local copy of Json object. Create local resource directory if
# it doesn't exist
if not os.path.isdir(obj_dir):
os.mkdir(obj_dir)
with open(f_json, 'w') as f:
json.dump(obj_json, f)
except ValueError as ex:
# If the resource does not exists but we have a local copy then read
# object from local disk. Set is_active flag to false. Raise
# ValueError if no local copy exists
if os.path.isfile(f_json):
with open(f_json, 'r') as f:
obj_json = json.load(f)
is_active = False
else:
raise ex
# Return object directory, Json, active flag, and cache identifier
return obj_dir, obj_json, is_active, cache_id |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def image_groups_get(self, resource_url):
"""Get handle for image group resource at given Url. Parameters resource_url : string Url for image group resource at SCO-API Returns ------- scoserv.ImageGroupHandle Handle for local copy of image group resource """ |
# Get resource directory, Json representation, active flag, and cache id
obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url)
# Create image group handle. Will raise an exception if resource is not
# in cache and cannot be downloaded.
image_group = ImageGroupHandle(obj_json, obj_dir)
# Add resource to cache if not exists
if not cache_id in self.cache:
self.cache_add(resource_url, cache_id)
# Return image group handle
return image_group |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def image_groups_list(self, api_url=None, offset=0, limit=-1, properties=None):
"""Get list of image group resources from a SCO-API. Parameters api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per image group in the listing) """ |
# Get subject listing Url for given SCO-API and return the retrieved
# resource listing
return sco.get_resource_listing(
self.get_api_references(api_url)[sco.REF_IMAGE_GROUPS_LIST],
offset,
limit,
properties
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def models_get(self, resource_url):
"""Get handle for model resource at given Url. Parameters resource_url : string Url for subject resource at SCO-API Returns ------- models.ModelHandle Handle for local copy of subject resource """ |
# Get resource directory, Json representation, active flag, and cache id
obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url)
# Create model handle.
model = ModelHandle(obj_json)
# Add resource to cache if not exists
if not cache_id in self.cache:
self.cache_add(resource_url, cache_id)
# Return subject handle
return model |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def models_list(self, api_url=None, offset=0, limit=-1, properties=None):
"""Get list of model resources from a SCO-API. Parameters api_url : string, optional Base Url of the SCO-API. Uses default API if argument not present. offset : int, optional Starting offset for returned list items limit : int, optional Limit the number of items in the result properties : List(string) List of additional object properties to be included for items in the result Returns ------- List(scoserv.ResourceHandle) List of resource handles (one per model in the listing) """ |
# Get subject listing Url for given SCO-API and return the retrieved
# resource listing
return sco.get_resource_listing(
self.get_api_references(api_url)[sco.REF_MODELS_LIST],
offset,
limit,
properties
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def subjects_create(self, filename, api_url=None, properties=None):
"""Create new anatomy subject at given SCO-API by uploading local file. Expects an tar-archive containing a FreeSurfer anatomy. Parameters filename : string Path to tar-archive on local disk api_url : string, optional Base Url of SCO-API where subject will be created properties : Dictionary, optional Set of additional properties for created subject Returns ------- scoserv.SubjectHandle Handle for local copy of created image group resource """ |
# Create image group and return handle for created resource
return self.subjects_get(
SubjectHandle.create(
self.get_api_references(api_url)[sco.REF_SUBJECTS_CREATE],
filename,
properties
)
) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.