text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_status(self, status):
""" Save the new status and call all defined callbacks """ |
self.status = status
for callback in self._update_status_callbacks:
callback(self) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def wait_for_job(self):
""" Use a redis blocking list call to wait for a job, and return it. """ |
blpop_result = self.connection.blpop(self.keys, self.timeout)
if blpop_result is None:
return None
queue_redis_key, job_ident = blpop_result
self.set_status('running')
return self.get_queue(queue_redis_key), self.get_job(job_ident) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_queue(self, queue_redis_key):
""" Return a queue based on the key used in redis to store the list """ |
try:
queue_pk = int(queue_redis_key.split(':')[-2])
except:
raise DoesNotExist('Unable to get the queue from the key %s' % queue_redis_key)
return self.queue_model.get(queue_pk) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_keys(self):
""" Update the redis keys to listen for new jobs priorities. """ |
self.keys = self.queue_model.get_waiting_keys(self.queues)
if not self.keys:
self.log('No queues yet', level='warning')
self.last_update_keys = datetime.utcnow() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(self):
""" The main method of the worker. Will ask redis for list items via blocking calls, get jobs from them, try to execute these jobs, and end when needed. """ |
# if status is not None, we already had a run !
if self.status:
self.set_status('aborted')
raise LimpydJobsException('This worker run is already terminated')
self.set_status('starting')
self.start_date = datetime.utcnow()
if self.max_duration:
self.wanted_end_date = self.start_date + self.max_duration
must_stop = self.must_stop()
if not must_stop:
# get keys
while not self.keys and not must_stop:
self.update_keys()
if not self.keys:
sleep(self.fetch_priorities_delay)
must_stop = self.must_stop()
if not must_stop:
# wait for queues available if no ones are yet
self.requeue_delayed_jobs()
self.run_started()
self._main_loop()
self.set_status('terminated')
self.end_date = datetime.utcnow()
self.run_ended()
if self.terminate_gracefuly:
self.stop_handling_end_signal() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def elapsed(self):
""" Return a timedelta representation of the time passed sine the worker was running. """ |
if not self.start_date:
return None
return (self.end_date or datetime.utcnow()) - self.start_date |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def requeue_delayed_jobs(self):
""" Requeue each delayed job that are now ready to be executed """ |
failures = []
for queue in self.queue_model.get_all_by_priority(self.queues):
failures.extend(queue.requeue_delayed_jobs())
self.last_requeue_delayed = datetime.utcnow()
for failure in failures:
self.log('Unable to requeue %s: %s' % failure) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _main_loop(self):
""" Run jobs until must_stop returns True """ |
fetch_priorities_delay = timedelta(seconds=self.fetch_priorities_delay)
fetch_delayed_delay = timedelta(seconds=self.fetch_delayed_delay)
while not self.must_stop():
self.set_status('waiting')
if self.last_update_keys + fetch_priorities_delay < datetime.utcnow():
self.update_keys()
if self.last_requeue_delayed + fetch_delayed_delay < datetime.utcnow():
self.requeue_delayed_jobs()
try:
queue_and_job = self.wait_for_job()
if queue_and_job is None:
# timeout for blpop
continue
queue, job = queue_and_job
except Exception as e:
self.log('Unable to get job: %s\n%s'
% (str(e), traceback.format_exc()), level='error')
else:
self.num_loops += 1
try:
identifier = 'pk:%s' % job.pk.get()
except Exception as e:
identifier = '??'
try:
self.set_status('running')
identifier, status = job.hmget('identifier', 'status')
# some cache, don't count on it on subclasses
job._cached_identifier = identifier
job._cached_status = status
queue._cached_name = queue.name.hget()
if status == STATUSES.DELAYED:
self.job_delayed(job, queue)
elif status != STATUSES.WAITING:
self.job_skipped(job, queue)
else:
try:
self.job_started(job, queue)
job_result = self.callback(job, queue)
except Exception as e:
trace = None
if self.save_tracebacks:
trace = traceback.format_exc()
self.job_error(job, queue, e, trace)
else:
job._cached_status = job.status.hget()
if job._cached_status == STATUSES.DELAYED:
self.job_delayed(job, queue)
elif job._cached_status == STATUSES.CANCELED:
self.job_skipped(job, queue)
else:
self.job_success(job, queue, job_result)
except Exception as e:
self.log('[%s] unexpected error: %s\n%s'
% (identifier, str(e), traceback.format_exc()), level='error')
try:
queue.errors.rpush(job.ident)
except Exception as e:
self.log('[%s] unable to add the error in the queue: %s\n%s'
% (identifier, str(e), traceback.format_exc()), level='error') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_error(self, job, queue, exception, trace=None):
""" Called when an exception was raised during the execute call for a job. """ |
to_be_requeued = not job.must_be_cancelled_on_error and self.requeue_times and self.requeue_times >= int(job.tries.hget() or 0)
if not to_be_requeued:
job.queued.delete()
job.hmset(end=str(datetime.utcnow()), status=STATUSES.ERROR)
queue.errors.rpush(job.ident)
if self.save_errors:
additional_fields = self.additional_error_fields(job, queue, exception)
self.error_model.add_error(queue_name=queue._cached_name,
job=job,
error=exception,
trace=trace,
**additional_fields)
self.log(self.job_error_message(job, queue, to_be_requeued, exception, trace), level='error')
if hasattr(job, 'on_error'):
job.on_error(queue, exception, trace)
# requeue the job if needed
if to_be_requeued:
priority = queue.priority.hget()
if self.requeue_priority_delta:
priority = int(priority) + self.requeue_priority_delta
self.requeue_job(job, queue, priority, delayed_for=self.requeue_delay_delta) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def requeue_job(self, job, queue, priority, delayed_for=None):
""" Requeue a job in a queue with the given priority, possibly delayed """ |
job.requeue(queue_name=queue._cached_name,
priority=priority,
delayed_for=delayed_for,
queue_model=self.queue_model)
if hasattr(job, 'on_requeued'):
job.on_requeued(queue)
self.log(self.job_requeue_message(job, queue)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_error_message(self, job, queue, to_be_requeued, exception, trace=None):
""" Return the message to log when a job raised an error """ |
return '[%s|%s|%s] error: %s [%s]' % (queue._cached_name,
job.pk.get(),
job._cached_identifier,
str(exception),
'requeued' if to_be_requeued else 'NOT requeued') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_requeue_message(self, job, queue):
""" Return the message to log when a job is requeued """ |
priority, delayed_until = job.hmget('priority', 'delayed_until')
msg = '[%s|%s|%s] requeued with priority %s'
args = [queue._cached_name, job.pk.get(), job._cached_identifier, priority]
if delayed_until:
msg += ', delayed until %s'
args.append(delayed_until)
return msg % tuple(args) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_success(self, job, queue, job_result):
""" Called just after an execute call was successful. job_result is the value returned by the callback, if any. """ |
job.queued.delete()
job.hmset(end=str(datetime.utcnow()), status=STATUSES.SUCCESS)
queue.success.rpush(job.ident)
self.log(self.job_success_message(job, queue, job_result))
if hasattr(job, 'on_success'):
job.on_success(queue, job_result) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_success_message(self, job, queue, job_result):
""" Return the message to log when a job is successful """ |
return '[%s|%s|%s] success, in %s' % (queue._cached_name, job.pk.get(),
job._cached_identifier, job.duration) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_started(self, job, queue):
""" Called just before the execution of the job """ |
job.hmset(start=str(datetime.utcnow()), status=STATUSES.RUNNING)
job.tries.hincrby(1)
self.log(self.job_started_message(job, queue))
if hasattr(job, 'on_started'):
job.on_started(queue) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_started_message(self, job, queue):
""" Return the message to log just befre the execution of the job """ |
return '[%s|%s|%s] starting' % (queue._cached_name, job.pk.get(),
job._cached_identifier) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_skipped(self, job, queue):
""" Called if a job, before trying to run it, has not the "waiting" status, or, after run, if its status was set to "canceled" """ |
job.queued.delete()
self.log(self.job_skipped_message(job, queue), level='warning')
if hasattr(job, 'on_skipped'):
job.on_skipped(queue) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_skipped_message(self, job, queue):
""" Return the message to log when a job was skipped """ |
return '[%s|%s|%s] job skipped (current status: %s)' % (
queue._cached_name,
job.pk.get(),
job._cached_identifier,
STATUSES.by_value(job._cached_status, 'UNKNOWN')) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_delayed(self, job, queue):
""" Called if a job, before trying to run it, has the "delayed" status, or, after run, if its status was set to "delayed" If delayed_until was not set, or is invalid, set it to 60sec in the future """ |
delayed_until = job.delayed_until.hget()
if delayed_until:
try:
delayed_until = compute_delayed_until(delayed_until=parse(delayed_until))
except (ValueError, TypeError):
delayed_until = None
if not delayed_until:
# by default delay it for 60 seconds
delayed_until = compute_delayed_until(delayed_for=60)
job.enqueue_or_delay(
queue_name=queue._cached_name,
delayed_until=delayed_until,
queue_model=queue.__class__,
)
self.log(self.job_delayed_message(job, queue), level='warning')
if hasattr(job, 'on_delayed'):
job.on_delayed(queue) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def job_delayed_message(self, job, queue):
""" Return the message to log when a job was delayed just before or during its execution """ |
return '[%s|%s|%s] job delayed until %s' % (
queue._cached_name,
job.pk.get(),
job._cached_identifier,
job.delayed_until.hget()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_parser(self):
""" Create and return the ``OptionParser`` which will be used to parse the arguments to the worker. """ |
return OptionParser(prog=self.prog_name,
usage=self.usage(),
version='%%prog %s' % self.get_version(),
option_list=self.option_list) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def manage_options(self):
""" Create a parser given the command-line arguments, creates a parser Return True if the programme must exit. """ |
self.parser = self.create_parser()
self.options, self.args = self.parser.parse_args(self.argv)
self.do_imports()
if self.options.callback and not callable(self.options.callback):
self.parser.error('The callback is not callable')
self.logger_level = None
if self.options.logger_level:
if self.options.logger_level.isdigit():
self.options.logger_level = int(self.options.logger_level)
else:
try:
self.options.logger_level = getattr(logging, self.options.logger_level.upper())
except:
self.parser.error('Invalid logger-level %s' % self.options.logger_level)
if self.options.max_loops is not None and self.options.max_loops < 0:
self.parser.error('The max-loops argument (%s) must be a <positive></positive> integer' % self.options.max_loops)
if self.options.max_duration is not None and self.options.max_duration < 0:
self.parser.error('The max-duration argument (%s) must be a positive integer' % self.options.max_duration)
if self.options.timeout is not None and self.options.timeout < 0:
self.parser.error('The timeout argument (%s) must be a positive integer (including 0)' % self.options.timeout)
if self.options.fetch_priorities_delay is not None and self.options.fetch_priorities_delay <= 0:
self.parser.error('The fetch-priorities-delay argument (%s) must be a positive integer' % self.options.fetch_priorities_delay)
if self.options.fetch_delayed_delay is not None and self.options.fetch_delayed_delay <= 0:
self.parser.error('The fetch-delayed-delay argument (%s) must be a positive integer' % self.options.fetch_delayed_delay)
if self.options.requeue_times is not None and self.options.requeue_times < 0:
self.parser.error('The requeue-times argument (%s) must be a positive integer (including 0)' % self.options.requeue_times)
if self.options.requeue_delay_delta is not None and self.options.requeue_delay_delta < 0:
self.parser.error('The rrequeue-delay-delta argument (%s) must be a positive integer (including 0)' % self.options.requeue_delay_delta)
self.database_config = None
if self.options.database:
host, port, db = self.options.database.split(':')
self.database_config = dict(host=host, port=int(port), db=int(db))
self.update_title = self.options.update_title |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def do_imports(self):
""" Import all importable options """ |
self.do_import('worker_class', Worker)
self.do_import('queue_model', self.options.worker_class.queue_model)
self.do_import('error_model', self.options.worker_class.error_model)
self.do_import('callback', self.options.worker_class.callback) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def print_options(self):
""" Print all options as parsed by the script """ |
options = []
print("The script is running with the following options:")
options.append(("dry_run", self.options.dry_run))
options.append(("worker_config", self.__class__))
database_config = self.database_config or \
self.options.queue_model.database.connection_settings
options.append(("database", '%s:%s:%s' % (database_config['host'],
database_config['port'],
database_config['db'])))
if self.options.worker_class is not None:
options.append(("worker-class", self.options.worker_class))
for name, value in options:
print(" - %s = %s" % (name.replace('_', '-'), value))
print("The worker will run with the following options:")
for name in self.options.worker_class.parameters:
option = getattr(self.worker, name)
if name == 'callback' and \
self.options.worker_class.execute == Worker.execute:
option = '<jobs "run" method>'
elif isinstance(option, (list, tuple, set)):
option = ','.join(option)
print(" - %s = %s" % (name.replace('_', '-'), option)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def execute(self):
""" Main method to call to run the worker """ |
self.prepare_models()
self.prepare_worker()
if self.options.print_options:
self.print_options()
self.run() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def prepare_models(self):
""" If a database config ws given as argument, apply it to our models """ |
if self.database_config:
for model in (self.options.queue_model, self.options.error_model):
model.database.reset(**self.database_config) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def settable_options(doc, argv, ignore, options_first):
"""Determine which options we can set, which ones are boolean, and which ones are repeatable. All set items are option long names. :param str doc: Docstring from docoptcfg(). :param iter argv: CLI arguments from docoptcfg(). :param iter ignore: Options to ignore from docoptcfg(). :param bool options_first: docopt argument from docoptcfg(). :return: Settable options, boolean options, repeatable options, and short to long option name mapping. :rtype: tuple """ |
settable, booleans, repeatable, short_map = set(), set(), set(), dict()
# Determine which options are settable by docoptcfg and which ones are flags/booleans.
options = docopt.parse_defaults(doc)
short_map.update((o.short, o.long) for o in options)
parsed_argv = docopt.parse_argv(docopt.TokenStream(argv, docopt.DocoptExit), list(options), options_first)
overridden = [o.long for o in parsed_argv if hasattr(o, 'long')]
for option in options:
if option.long in overridden or (option.long in ignore or option.short in ignore):
continue
if option.argcount == 0:
booleans.add(option.long)
settable.add(option.long)
# Determine which options are repeatable.
if settable and '...' in doc:
pattern = docopt.parse_pattern(docopt.formal_usage(docopt.DocoptExit.usage), options)
for option in pattern.fix().flat():
if not hasattr(option, 'long'):
continue # Positional argument or sub-command.
if getattr(option, 'long') not in settable:
continue # Don't care about this if we can't set it.
if getattr(option, 'long') in booleans and getattr(option, 'value') == 0:
repeatable.add(getattr(option, 'long'))
elif hasattr(getattr(option, 'value'), '__iter__'):
repeatable.add(getattr(option, 'long'))
return settable, booleans, repeatable, short_map |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def values_from_env(env_prefix, settable, booleans, repeatable):
"""Get all values from environment variables. :param str env_prefix: Argument from docoptcfg(). :param iter settable: Option long names available to set by config file. :param iter booleans: Option long names of boolean/flag types. :param iter repeatable: Option long names of repeatable options. :return: Settable values. :rtype: dict """ |
defaults_env = dict()
for key in settable:
try:
defaults_env[key] = get_env(key, env_prefix, key in booleans, key in repeatable)
except KeyError:
pass
return defaults_env |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_opt(key, config, section, booleans, repeatable):
"""Get one value from config file. :raise DocoptcfgFileError: If an option is the wrong type. :param str key: Option long name (e.g. --config). :param ConfigParser config: ConfigParser instance with config file data already loaded. :param str section: Section in config file to focus on. :param iter booleans: Option long names of boolean/flag types. :param iter repeatable: Option long names of repeatable options. :return: Value to set in the defaults dict. """ |
# Handle repeatable non-boolean options (e.g. --file=file1.txt --file=file2.txt).
if key in repeatable and key not in booleans:
return config.get(section, key[2:]).strip('\n').splitlines()
# Handle repeatable booleans.
if key in repeatable and key in booleans:
try:
return config.getint(section, key[2:])
except ValueError as exc:
raise DocoptcfgFileError('Repeatable boolean option "{0}" invalid.'.format(key[2:]), str(exc))
# Handle non-repeatable booleans.
if key in booleans:
try:
return config.getboolean(section, key[2:])
except ValueError as exc:
raise DocoptcfgFileError('Boolean option "{0}" invalid.'.format(key[2:]), str(exc))
# Handle the rest.
return str(config.get(section, key[2:])) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def values_from_file(docopt_dict, config_option, settable, booleans, repeatable):
"""Parse config file and read settable values. Can be overridden by both command line arguments and environment variables. :raise DocoptcfgError: If `config_option` isn't found in docstring. :raise DocoptcfgFileError: On any error while trying to read and parse config file. :param dict docopt_dict: Dictionary from docopt with environment variable defaults merged in by docoptcfg(). :param str config_option: Config option long name with file path as its value. :param iter settable: Option long names available to set by config file. :param iter booleans: Option long names of boolean/flag types. :param iter repeatable: Option long names of repeatable options. :return: Settable values. :rtype: dict """ |
section = docopt.DocoptExit.usage.split()[1]
settable = set(o for o in settable if o != config_option)
config = ConfigParser()
defaults = dict()
# Sanity checks.
if config_option not in docopt_dict:
raise DocoptcfgError
if docopt_dict[config_option] is None or not settable:
return defaults
# Read config file.
path = DocoptcfgFileError.FILE_PATH = docopt_dict[config_option]
try:
with open(path) as handle:
if hasattr(config, 'read_file'):
config.read_file(handle)
else:
getattr(config, 'readfp')(handle)
except Error as exc:
raise DocoptcfgFileError('Unable to parse config file.', str(exc))
except IOError as exc:
raise DocoptcfgFileError('Unable to read config file.', str(exc))
# Make sure section is in config file.
if not config.has_section(section):
raise DocoptcfgFileError('Section [{0}] not in config file.'.format(section))
# Parse config file.
for key in settable:
if config.has_option(section, key[2:]):
defaults[key] = get_opt(key, config, section, booleans, repeatable)
return defaults |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setRepoData(self, searchString, category="", extension="", math=False, game=False, searchFiles=False):
"""Call this function with all the settings to use for future operations on a repository, must be called FIRST""" |
self.searchString = searchString
self.category = category
self.math = math
self.game = game
self.searchFiles = searchFiles
self.extension = extension |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setOutputObject(self, newOutput=output.CalcpkgOutput(True, True)):
"""Set an object where all output from calcpkg will be redirected to for this repository""" |
self.output = newOutput |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def searchIndex(self, printData=True):
"""Search the index with all the repo's specified parameters""" |
backupValue = copy.deepcopy(self.output.printData)
self.output.printData = printData
self.data = self.index.search(self.searchString, self.category, self.math, self.game, self.searchFiles, self.extension)
self.output.printData = backupValue
return self.data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def countIndex(self):
"""A wrapper for the count function in calcrepo.index; count using specified parameters""" |
self.data = self.index.count(self.searchString, self.category, self.math, self.game, self.searchFiles, self.extension) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getDownloadUrls(self):
"""Return a list of the urls to download from""" |
data = self.searchIndex(False)
fileUrls = []
for datum in data:
fileUrl = self.formatDownloadUrl(datum[0])
fileUrls.append(fileUrl)
return fileUrls |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getFileInfos(self):
"""Return a list of FileInfo objects""" |
data = self.searchIndex(False)
self.data = data
self.printd(" ")
fileInfos = []
for datum in data:
try:
fileInfo = self.getFileInfo(datum[0], datum[1])
fileInfos.append(fileInfo)
except NotImplementedError:
self.printd("Error: the info command is not supported for " + self.name + ".")
return []
return fileInfos |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def downloadFiles(self, prompt=True, extract=False):
"""Download files from the repository""" |
#First, get the download urls
data = self.data
downloadUrls = self.getDownloadUrls()
#Then, confirm the user wants to do this
if prompt:
confirm = raw_input("Download files [Y/N]? ")
if confirm.lower() != 'y':
self.printd("Operation aborted by user input")
return
#Now, if they still do, do all this stuff:
counter = -1
for datum in data:
counter += 1
try:
download = downloadUrls[counter]
except:
pass
# Download the file; fix our user agent
self.printd("Downloading " + datum[0] + " from " + download)
headers = { 'User-Agent' : 'calcpkg/2.0' }
request = urllib2.Request(download, None, headers)
fileData = urllib2.urlopen(request).read()
# Now, process the downloaded file
dowName = datum[0]
# Use a helper function to remove /pub, /files
dowName = util.removeRootFromName(dowName)
dowName = dowName[1:]
dowName = dowName.replace('/', '-')
dowName = self.downloadDir + dowName
try:
downloaded = open(dowName, 'wb')
except:
os.remove(dowName)
downloaded.write(fileData)
downloaded.close()
self.printd("Download complete! Wrote file " + dowName + "\n")
#Extract them if told to do so
if extract:
extractType = ""
if '.zip' in dowName:
extractType = "zip"
elif '.tar' in dowName:
extractType = "tar"
specType = ""
if '.bz2' in dowName:
specType = ":bz2"
elif ".gz" in dowName:
specType = ":gz"
elif ".tgz" in dowName:
extractType = "tar"
specType = ":gz"
if extractType != "":
self.printd("Extracting file " + dowName + ", creating directory for extracted files")
dirName, a, ending = dowName.partition('.')
dirName = dirName + '-' + ending
try:
os.mkdir(dirName)
except:
pass
if extractType == "zip":
archive = zipfile.ZipFile(dowName, 'r')
elif extractType == "tar":
archive = tarfile.open(dowName, "r" + specType)
else:
self.printd("An unknown error has occured!")
return
archive.extractall(dirName)
self.printd("All files in archive extracted to " + dirName)
os.remove(dowName)
self.printd("The archive file " + dowName + " has been deleted!\n") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def downloadFileFromUrl(self, url):
"""Given a URL, download the specified file""" |
fullurl = self.baseUrl + url
try:
urlobj = urllib2.urlopen(fullurl)
contents = urlobj.read()
except urllib2.HTTPError, e:
self.printd("HTTP error:", e.code, url)
return None
except urllib2.URLError, e:
self.printd("URL error:", e.code, url)
return None
self.printd("Fetched '%s' (size %d bytes)" % (fullurl, len(contents)))
return contents |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def openIndex(self, filename, description):
"""Attempt to delete and recreate an index, returns open file object or None.""" |
try:
os.remove(filename)
self.printd(" Deleted old " + description)
except:
self.printd(" No " + description + " found")
# Now, attempt to open a new index
try:
files = open(filename, 'wt')
except:
self.printd("Error: Unable to create file " + filename + " in current folder. Quitting.")
return None
return files |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_model(*args):
"""Load an HTK model from one ore more files. :param args: Filenames of the model (e.g. macros hmmdefs) :return: The model as an OrderedDict() """ |
text = ''
for fnm in args:
text += open(fnm).read()
text += '\n'
parser = htk_model_parser.htk_modelParser()
model = HtkModelSemantics()
return parser.parse(text,
rule_name='model',
ignorecase=True,
semantics=model,
comments_re="\(\*.*?\*\)",
trace=False) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save_model(model, filename):
"""Save the model into a file. :param model: HTK model to be saved :param filename: File where to save the model """ |
with open(filename, 'w') as f:
f.write(serialize_model(model)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def serialize_model(model):
"""Serialize the HTK model into a file. :param model: Model to be serialized """ |
result = ''
# First serialize the macros
for macro in model['macros']:
if macro.get('options', None):
result += '~o '
for option in macro['options']['definition']:
result += _serialize_option(option)
elif macro.get('transition', None):
result += '~t "{}"\n'.format(macro['transition']['name'])
result += _serialize_transp(macro['transition']['definition'])
elif macro.get('variance', None):
result += '~v "{}"\n'.format(macro['variance']['name'])
result += _serialize_variance(macro['variance']['definition'])
elif macro.get('state', None):
result += '~s "{}"\n'.format(macro['state']['name'])
result += _serialize_stateinfo(macro['state']['definition'])
elif macro.get('mean', None):
result += '~u "{}"\n'.format(macro['mean']['name'])
result += _serialize_mean(macro['mean']['definition'])
elif macro.get('duration', None):
result += '~d "{}"\n'.format(macro['duration']['name'])
result += _serialize_duration(macro['duration']['definition'])
else:
raise NotImplementedError('Cannot serialize {}'.format(macro))
for hmm in model['hmms']:
if hmm.get('name', None) is not None:
result += '~h "{}"\n'.format(hmm['name'])
result += _serialize_hmm(hmm['definition'])
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _load_resources(self):
""" Load all the native goldman resources. The route or API endpoint will be automatically determined based on the resource object instance passed in. INFO: Only our Model based resources are supported when auto-generating API endpoints. """ |
for resource in self.RESOURCES:
if isinstance(resource, goldman.ModelsResource):
route = '/%s' % resource.rtype
elif isinstance(resource, goldman.ModelResource):
route = '/%s/{rid}' % resource.rtype
elif isinstance(resource, goldman.RelatedResource):
route = '/%s/{rid}/{related}' % resource.rtype
else:
raise TypeError('unsupported resource type')
self.add_route(*(route, resource)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _error_serializer(req, exc):
# pylint: disable=unused-argument """ Serializer for native falcon HTTPError exceptions. We override the default serializer with our own so we can ensure the errors are serialized in a JSON API compliant format. Surprisingly, most falcon error attributes map directly to the JSON API spec. The few that don't can be mapped accordingly: HTTPError JSON API ~~~~~~~~~ ~~~~~~~~ exc.description -> error['detail'] exc.link['href'] -> error['links']['about'] Per the falcon docs this function should return a tuple of (MIMETYPE, BODY PAYLOAD) """ |
error = {
'detail': exc.description,
'title': exc.title,
'status': exc.status,
}
try:
error['links'] = {'about': exc.link['href']}
except (TypeError, KeyError):
error['links'] = {'about': ''}
return (
goldman.config.JSONAPI_MIMETYPE,
json.dumps({'errors': [error]}),
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def find_peakset(dataset, basecolumn=-1, method='', where=None):
""" Find peakset from the dataset Parameters dataset : list A list of data basecolumn : int An index of column for finding peaks method : str A method name of numpy for finding peaks where : function A function which recieve ``data`` and return numpy indexing list Returns ------- list A list of peaks of each axis (list) """ |
peakset = []
where_i = None
for data in dataset:
base = data[basecolumn]
base = maidenhair.statistics.average(base)
# limit data points
if where:
adata = [maidenhair.statistics.average(x) for x in data]
where_i = np.where(where(adata))
base = base[where_i]
# find peak index
index = getattr(np, method, np.argmax)(base)
# create peakset
for a, axis in enumerate(data):
if len(peakset) <= a:
peakset.append([])
if where_i:
axis = axis[where_i]
peakset[a].append(axis[index])
peakset = np.array(peakset)
return peakset |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def normalise_string(string):
""" Strips trailing whitespace from string, lowercases it and replaces spaces with underscores """ |
string = (string.strip()).lower()
return re.sub(r'\W+', '_', string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def json_decode(data):
""" Decodes the given JSON as primitives """ |
if isinstance(data, six.binary_type):
data = data.decode('utf-8')
return json.loads(data) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def write(self, *args):
'''Write convenience function; writes strings.
'''
for s in args: self.out.write(s)
event = ''.join(*args) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _save(self, counters):
""" Saves the current counters measurements. :param counters: current counters measurements to be saves. """ |
if self._logger == None:
return
if len(counters) == 0:
return
# Sort counters by name
counters = sorted(counters, key=LogCounters._get_counter_name)
for counter in counters:
self._logger.info("counters", self._counter_to_string(counter)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def humanize(text):
'''Transform code conventions to human readable strings'''
words = []
for part in text.split('_'):
for word in RE_CAMEL.findall(part) or [part]:
words.append(word.lower())
words[0] = words[0].title()
return ' '.join(words) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def is_visit_primitive(obj):
'''Returns true if properly visiting the object returns only the object itself.'''
from .base import visit
if (isinstance(obj, tuple(PRIMITIVE_TYPES)) and not isinstance(obj, STR)
and not isinstance(obj, bytes)):
return True
if (isinstance(obj, CONTAINERS) and not isinstance(obj, STR) and not
isinstance(obj, bytes)):
return False
if isinstance(obj, STR) or isinstance(obj, bytes):
if len(obj) == 1:
return True
return False
return list(visit(obj, max_enum=2)) == [obj] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def clear(self, correlation_id):
""" Clears component state. :param correlation_id: (optional) transaction id to trace execution through call chain. """ |
self._lock.acquire()
try:
self._cache = {}
finally:
self._lock.release() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def rest_verbs(http_method_names=None):
""" Decorator that converts a function-based view into an RestView subclass. Takes a list of allowed methods for the view as an argument. """ |
http_method_names = ['GET'] if (http_method_names is None) else http_method_names
def decorator(func):
WrappedRestView = type(
six.PY3 and 'WrappedRestView' or b'WrappedRestView',
(RestView,),
{'__doc__': func.__doc__}
)
# Note, the above allows us to set the docstring.
# It is the equivalent of:
#
# class WrappedRestView(RestView):
# pass
# WrappedRestView.__doc__ = func.doc <--- Not possible to do this
# api_view applied without (method_names)
assert not(isinstance(http_method_names, types.FunctionType)), \
'@api_view missing list of allowed HTTP methods'
# api_view applied with eg. string instead of list of strings
assert isinstance(http_method_names, (list, tuple)), \
'@api_view expected a list of strings, received %s' % type(http_method_names).__name__
allowed_methods = set(http_method_names) | set(('options',))
WrappedRestView.http_method_names = [method.lower() for method in allowed_methods]
def handler(self, *args, **kwargs):
return func(*args, **kwargs)
for method in http_method_names:
setattr(WrappedRestView, method.lower(), handler)
WrappedRestView.__name__ = func.__name__
WrappedRestView.renderer_classes = getattr(func, 'renderer_classes',
RestView.renderer_classes)
WrappedRestView.parser_classes = getattr(func, 'parser_classes',
RestView.parser_classes)
WrappedRestView.authentication_classes = getattr(func, 'authentication_classes',
RestView.authentication_classes)
WrappedRestView.throttle_classes = getattr(func, 'throttle_classes',
RestView.throttle_classes)
WrappedRestView.permission_classes = getattr(func, 'permission_classes',
RestView.permission_classes)
return WrappedRestView.as_view()
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_stations(self):
""" Return a list of all stations IDs """ |
request = requests.get("{}/stations".format(
self.base_url))
if request.status_code != 200:
return None
return request.json() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_station_temperature_datetime(self, station_id):
""" Return temperature measurement datetime for a given station """ |
request = requests.get(
"{}/station/{}/parameters/temperature/datetime".format(
self.base_url, station_id))
if request.status_code != 200:
return None
return datetime.strptime(request.json(), "%Y-%m-%dT%H:%M:%S") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_station_temperature_value(self, station_id):
""" Return temperature value for a given station """ |
request = requests.get(
"{}/station/{}/parameters/temperature/value".format(
self.base_url, station_id))
if request.status_code != 200:
return None
return request.json() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_email_status(mx_resolver, recipient_address, sender_address, smtp_timeout=10, helo_hostname=None):
""" Checks if an email might be valid by getting the status from the SMTP server. :param mx_resolver: MXResolver :param recipient_address: string :param sender_address: string :param smtp_timeout: integer :param helo_hostname: string :return: dict """ |
domain = recipient_address[recipient_address.find('@') + 1:]
if helo_hostname is None:
helo_hostname = domain
ret = {'status': 101, 'extended_status': None, 'message': "The server is unable to connect."}
records = []
try:
records = mx_resolver.get_mx_records(helo_hostname)
except socket.gaierror:
ret['status'] = 512
ret['extended_status'] = "5.1.2 Domain name address resolution failed in MX lookup."
smtp = smtplib.SMTP(timeout=smtp_timeout)
for mx in records:
try:
connection_status, connection_message = smtp.connect(mx.exchange)
if connection_status == 220:
smtp.helo(domain)
smtp.mail(sender_address)
status, message = smtp.rcpt(recipient_address)
ret['status'] = status
pattern = re.compile('(\d+\.\d+\.\d+)')
matches = re.match(pattern, message)
if matches:
ret['extended_status'] = matches.group(1)
ret['message'] = message
smtp.quit()
break
except smtplib.SMTPConnectError:
ret['status'] = 111
ret['message'] = "Connection refused or unable to open an SMTP stream."
except smtplib.SMTPServerDisconnected:
ret['status'] = 111
ret['extended_status'] = "SMTP Server disconnected"
except socket.gaierror:
ret['status'] = 512
ret['extended_status'] = "5.1.2 Domain name address resolution failed."
return ret |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def start_namespace(self, prefix, uri):
""" Declare a namespace prefix Use prefix=None to set the default namespace. """ |
self._g.startPrefixMapping(prefix, uri)
self._ns[prefix] = uri |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def end_namespace(self, prefix):
""" Undeclare a namespace prefix. """ |
del self._ns[prefix]
self._g.endPrefixMapping(prefix) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_colors(n):
"""Returns n unique and "evenly" spaced colors for the backgrounds of the projects. Args: n (int):
The number of unique colors wanted. Returns: colors (list of str):
The colors in hex form. """ |
import matplotlib.pyplot as plt
from matplotlib.colors import rgb2hex as r2h
from numpy import linspace
cols = linspace(0.05, .95, n)
cmap = plt.get_cmap('nipy_spectral')
return [r2h(cmap(i)) for i in cols] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_prefixes(namespaces=None):
"""Internal function takes a list of prefix, namespace uri tuples and generates a SPARQL PREFIX string. Args: namespaces(list):
List of tuples, defaults to BIBFRAME and Schema.org Returns: string """ |
if namespaces is None:
namespaces = [
('bf', str(BIBFRAME)),
('schema', str(SCHEMA_ORG))
]
output = "PREFIX {}: <{}>\n".format(
namespaces[0][0],
namespaces[0][1])
if len(namespaces) == 1:
return output
else:
for namespace in namespaces[1:]:
output += "PREFIX {}: <{}>\n".format(namespace[0], namespace[1])
return output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def copy_graph(subject, existing_graph):
"""Function takes a subject and an existing graph, returns a new graph with all predicate and objects of the existing graph copied to the new_graph with subject as the new subject Args: subject(rdflib.URIRef):
A URIRef subject existing_graph(rdflib.Graph):
A rdflib.Graph Returns: rdflib.Graph """ |
new_graph = rdflib.Graph()
for predicate, object_ in existing_graph.predicate_objects():
new_graph.add((subject, predicate, object_))
return new_graph |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init_app(self, app):
""" Initializes a Flask app object for the extension. Args: app(Flask):
Flask app """ |
app.config.setdefault('FEDORA_BASE_URL', 'http://localhost:8080')
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_transaction(self):
"""Method creates a new transaction resource and sets instance's transaction.""" |
request = urlllib.request.urlopen(
urllib.parse.urljoin(self.base_url, 'fcr:tx'))
self.transaction = request.read() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self, fedora_url, data=None, method='Get'):
"""Method attempts to connect to REST servers of the Fedora Commons repository using optional data parameter. Args: fedora_url(string):
Fedora URL data(dict):
Data to through to REST endpoint method(str):
REST Method, defaults to GET Returns: result(string):
Response string from Fedora """ |
if data is None:
data = {}
if not fedora_url.startswith("http"):
fedora_url = urllib.parse.urljoin(self.base_url, fedora_url)
request = urllib.request.Request(fedora_url,
method=method)
request.add_header('Accept', 'text/turtle')
request.add_header('Content-Type', 'text/turtle')
if len(data) > 0:
request.data = data
try:
response = urllib.request.urlopen(request)
except urllib.error.URLError as err:
if hasattr(err, 'reason'):
print("failed to reach server at {} with {} method".format(
fedora_url,
request.method))
print("Reason: ", err.reason)
print("Data: ", data)
elif hasattr(err, 'code'):
print("Server error {}".format(err.code))
raise err
return response |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def as_json(self, entity_url, context=None):
"""Method takes a entity uri and attempts to return the Fedora Object as a JSON-LD. Args: entity_url(str):
Fedora Commons URL of Entity context(None):
Returns JSON-LD with Context, default is None Returns: str: JSON-LD of Fedora Object """ |
try:
urllib.request.urlopen(entity_url)
except urllib.error.HTTPError:
raise ValueError("Cannot open {}".format(entity_url))
entity_graph = self.read(entity_url)
entity_json = json.loads(
entity_graph.serialize(
format='json-ld',
context=context).decode())
return json.dumps(entity_json) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create(self, uri=None, graph=None, data=None):
"""Method takes an optional URI and graph, first checking if the URL is already present in Fedora, if not, creates a Fedora Object with the graph as properties. If URI is None, uses Fedora 4 default PID minter to create the object's URI. Args: uri(string):
String of URI, default is None graph(rdflib.Graph):
RDF Graph of subject, default is None data(object):
Binary datastream that will be saved as fcr:content Returns: URI(string):
New Fedora URI or None if uri already exists """ |
if uri is not None:
existing_entity = self.__dedup__(rdflib.URIRef(uri), graph)
if existing_entity is not None:
return # Returns nothing
else:
default_request = urllib.request.Request(
"/".join([self.base_url, "rest"]),
method='POST')
uri = urllib.request.urlopen(default_request).read().decode()
if graph is not None:
new_graph = copy_graph(rdflib.URIRef(uri), graph)
create_response = self.connect(
uri,
data=new_graph.serialize(format='turtle'),
method='PUT')
raw_response = create_response.read()
return uri |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete(self, uri):
"""Method deletes a Fedora Object in the repository Args: uri(str):
URI of Fedora Object """ |
try:
self.connect(uri, method='DELETE')
return True
except urllib.error.HTTPError:
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def exists(self, uri):
"""Method returns true is the entity exists in the Repository, false, otherwise Args: uri(str):
Entity URI Returns: bool """ |
##entity_uri = "/".join([self.base_url, entity_id])
try:
urllib.request.urlopen(uri)
return True
except urllib.error.HTTPError:
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def flush(self):
"""Method flushes repository, deleting all objects""" |
base_graph = rdflib.Graph().parse('{}/rest'.format(self.base_url))
has_child = rdflib.URIRef(
'http://fedora.info/definitions/v4/repository#hasChild')
for obj in base_graph.objects(predicate=has_child):
self.delete(str(obj)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def insert(self, entity_id, property_uri, value):
"""Method inserts a new entity's property in Fedora4 Repository Args: entity_id(string):
Unique ID of Fedora object property_uri(string):
URI of property value: Value of the property, can be literal or URI reference Returns: boolean: True if successful changed in Fedora, False otherwise """ |
if not entity_id.startswith("http"):
entity_uri = urllib.parse.urljoin(self.base_url, entity_id)
else:
entity_uri = entity_id
if entity_uri.endswith("/"):
entity_uri = entity_uri[:-1]
if not entity_id.endswith("fcr:metadata"):
entity_uri = "/".join([entity_uri, "fcr:metadata"])
if not self.exists(entity_id):
self.create(entity_id)
sparql_template = Template("""$prefix
INSERT DATA {
<$entity> $prop_uri $value_str ;
}""")
sparql = sparql_template.substitute(
prefix=build_prefixes(self.namespaces),
entity=entity_uri,
prop_uri=property_uri,
value_str=self.__value_format__(value))
update_request = urllib.request.Request(
entity_uri,
data=sparql.encode(),
method='PATCH',
headers={'Content-Type': 'application/sparql-update'})
try:
response = urllib.request.urlopen(update_request)
except urllib.error.HTTPError:
print("Error trying patch {}, sparql=\n{}".format(entity_uri,
sparql))
return False
if response.code < 400:
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read(self, uri):
"""Method takes uri and creates a RDF graph from Fedora Repository Args: uri(str):
URI of Fedora URI Returns: rdflib.Graph """ |
read_response = self.connect(uri)
fedora_graph = rdflib.Graph().parse(
data=read_response.read(),
format='turtle')
return fedora_graph |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def search(self, query_term):
"""DEPRECIATED Method takes a query term and searches Fedora Repository using SPARQL search endpoint and returns a RDF graph of the search results. Args: query_term(str):
String to search repository Returns: rdflib.Graph() """ |
fedora_search_url = "/".join([self.base_url, 'rest', 'fcr:search'])
fedora_search_url = "{}?{}".format(
fedora_search_url,
urllib.parse.urlencode({"q": query_term}))
search_request = urllib.request.Request(
fedora_search_url,
method='GET')
search_request.add_header('Accept', 'text/turtle')
try:
search_response = urllib.request.urlopen(search_request)
except urllib.error.URLError as error:
raise error
fedora_results = rdflib.Graph().parse(
data=search_response.read(),
format='turtle')
return fedora_results |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _normalize_direction(heading: int) -> int: """ Make sure that 0 < heading < 360 Args: heading: base heading Returns: corrected heading """ |
while heading > 359:
heading = int(heading - 359)
while heading < 0:
heading = int(heading + 359)
return heading |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _gauss(mean: int, sigma: int) -> int: """ Creates a variation from a base value Args: mean: base value sigma: gaussian sigma Returns: random value """ |
return int(random.gauss(mean, sigma)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _randomize_speed(base_speed: int, sigma: int = None) -> int: """ Creates a variation in wind speed Args: base_speed: base wind speed sigma: sigma value for gaussian variation Returns: random wind speed """ |
if sigma is None:
int_sigma = int(base_speed / 4)
else:
int_sigma = sigma
val = MissionWeather._gauss(base_speed, int_sigma)
if val < 0:
return 0
return min(val, 50) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _randomize_direction(base_heading, sigma) -> int: """ Creates a variation in direction Args: base_heading: base direction sigma: sigma value for gaussian variation Returns: random direction """ |
val = MissionWeather._gauss(base_heading, sigma)
val = MissionWeather._normalize_direction(val)
return val |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def guess_version_by_running_live_package( pkg_key, default="?" ):
# type: (str,str) -> Any """Guess the version of a pkg when pip doesn't provide it. :param str pkg_key: key of the package :param str default: default version to return if unable to find :returns: version :rtype: string """ |
try:
m = import_module(pkg_key)
except ImportError:
return default
else:
return getattr(m, "__version__", default) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_api_params(self):
"""Dictionary with available API parameters :raises ValueError: If value of __class__.params is not dictionary :return: Should return dict with available pushalot API methods :rtype: dict """ |
result = self.params
if type(result) != dict:
raise ValueError(
'{}.params should return dictionary'.format(
self.__class__.__name__
)
)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_api_required_params(self):
""" List with required params :return: Dictionary with API parameters :raises ValueError: If value of __class__.required_params is not list :rtype: list """ |
result = self.required_params
if type(result) != list:
raise ValueError(
'{}.required_params should return list'.format(
self.__class__.__name__
)
)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _build_params_from_kwargs(self, **kwargs):
"""Builds parameters from passed arguments Search passed parameters in available methods, prepend specified API key, and return dictionary which can be sent directly to API server. :param kwargs: :type param: dict :raises ValueError: If type of specified parameter doesn't match the expected type. Also raised if some basic validation of passed parameter fails. :raises PushalotException: If required parameter not set. :return: Dictionary with params which can be sent to API server :rtype: dict """ |
api_methods = self.get_api_params()
required_methods = self.get_api_required_params()
ret_kwargs = {}
for key, val in kwargs.items():
if key not in api_methods:
warnings.warn(
'Passed uknown parameter [{}]'.format(key),
Warning
)
continue
if key not in required_methods and val is None:
continue
if type(val) != api_methods[key]['type']:
raise ValueError(
"Invalid type specified to param: {}".format(key)
)
if 'max_len' in api_methods[key]:
if len(val) > api_methods[key]['max_len']:
raise ValueError(
"Lenght of parameter [{}] more than "
"allowed length".format(key)
)
ret_kwargs[api_methods[key]['param']] = val
for item in required_methods:
if item not in ret_kwargs:
raise pushalot.exc.PushalotException(
"Parameter [{}] required, but not set".format(item)
)
return ret_kwargs |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write(self, label, index):
""" Saves a new label, index mapping to the cache. Raises a RuntimeError on a conflict. """ |
if label in self.cache:
if self.cache[label] != index:
error_message = 'cache_conflict on label: {} with index: {}\ncache dump: {}'.format(label, index, self.cache)
raise RuntimeError(error_message)
else:
self.cache[label] = index |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pack_image(filename, max_size, form_field='image'):
"""Pack an image from file into multipart-formdata post body""" |
try:
if os.path.getsize(filename) > (max_size * 1024):
raise IdeaScalyError('File is too big, must be less than %skb.' % max_size)
except os.error as e:
raise IdeaScalyError('Unable to access file: %s' % e.strerror)
# build the mulitpart-formdata body
fp = open(filename, 'rb')
# image must be gif, jpeg, or png
file_type = mimetypes.guess_type(filename)
if file_type is None:
raise IdeaScalyError('Could not determine file type')
file_type = file_type[0]
if file_type not in ['image/gif', 'image/jpeg', 'image/png']:
raise IdeaScalyError('Invalid file type for image: %s' % file_type)
if isinstance(filename, six.text_type):
filename = filename.encode('utf-8')
BOUNDARY = b'Id34Sc4ly'
body = list()
body.append(b'--' + BOUNDARY)
body.append('content-disposition: form-data; name="{0}";'
' filename="{1}"'.format(form_field, filename)
.encode('utf-8'))
body.append('content-type: {0}'.format(file_type).encode('utf-8'))
body.append(b'')
body.append(fp.read())
body.append(b'--' + BOUNDARY + b'--')
body.append(b'')
fp.close()
body = b'\r\n'.join(body)
body_length = str(len(body))
# build headers
headers = {
'content-type': 'multipart/form-data; boundary={0}'.format(BOUNDARY),
'content-length': body_length
}
return headers, body |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_configuration(name='Base'):
"""Create a configuration base class It is built using :class:`ConfigurationMeta`. Subclassing such a base class will register exposed methods .. class:: Base .. attribute:: configuration Configuration dict that can be used by a Router or the API .. classmethod:: register(element, action, method) Register an element in the :attr:`configuration` :param element: the element to register :type element: tuple of (class, method name) or function :param string action: name of the exposed action that will hold the method :param string method: name of the exposed method """ |
@classmethod
def register(cls, element, action, method):
if not action in cls.configuration:
cls.configuration[action] = {}
if method in cls.configuration[action]:
raise ValueError('Method %s already defined for action %s' % (method, action))
cls.configuration[action][method] = element
return ConfigurationMeta(name, (object,), {'configuration': {}, 'register': register}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def expose(f=None, base=None, action=None, method=None, kind=BASIC):
"""Decorator to expose a function .. note:: A module function can be decorated but ``base`` parameter has to be specified :param f: function to expose :type f: function or None :param base: base class that can register the function :param string action: name of the exposed action that will hold the method :param string method: name of the exposed method :param kind: kind of the method :type kind: :data:`BASIC` or :data:`LOAD` or :data:`SUBMIT` """ |
def expose_f(f):
f.exposed = True
f.exposed_action = action
f.exposed_method = method
f.exposed_kind = kind
return f
def register_f(f):
f = expose_f(f)
base.register(f, action or f.__module__, method or f.__name__)
return f
if f is not None: # @expose case (no parameters)
return expose_f(f)
if base is not None: # module-level function case
return register_f
return expose_f |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def merge_configurations(configurations):
"""Merge configurations together and raise error if a conflict is detected :param configurations: configurations to merge together :type configurations: list of :attr:`~pyextdirect.configuration.Base.configuration` dicts :return: merged configurations as a single one :rtype: dict """ |
configuration = {}
for c in configurations:
for k, v in c.iteritems():
if k in configuration:
raise ValueError('%s already in a previous base configuration' % k)
configuration[k] = v
return configuration |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def AsCGI(nsdict={}, typesmodule=None, rpc=False, modules=None):
'''Dispatch within a CGI script.
'''
if os.environ.get('REQUEST_METHOD') != 'POST':
_CGISendFault(Fault(Fault.Client, 'Must use POST'))
return
ct = os.environ['CONTENT_TYPE']
try:
if ct.startswith('multipart/'):
cid = resolvers.MIMEResolver(ct, sys.stdin)
xml = cid.GetSOAPPart()
ps = ParsedSoap(xml, resolver=cid.Resolve)
else:
length = int(os.environ['CONTENT_LENGTH'])
ps = ParsedSoap(sys.stdin.read(length))
except ParseException, e:
_CGISendFault(FaultFromZSIException(e))
return
_Dispatch(ps, modules, _CGISendXML, _CGISendFault, nsdict=nsdict,
typesmodule=typesmodule, rpc=rpc) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def AsHandler(request=None, modules=None, **kw):
'''Dispatch from within ModPython.'''
ps = ParsedSoap(request)
kw['request'] = request
_Dispatch(ps, modules, _ModPythonSendXML, _ModPythonSendFault, **kw) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def send_xml(self, text, code=200):
'''Send some XML.
'''
self.send_response(code)
if text:
self.send_header('Content-type', 'text/xml; charset="%s"' %UNICODE_ENCODING)
self.send_header('Content-Length', str(len(text)))
self.end_headers()
if text:
self.wfile.write(text)
self.wfile.flush() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def do_POST(self):
'''The POST command.
'''
try:
ct = self.headers['content-type']
if ct.startswith('multipart/'):
cid = resolvers.MIMEResolver(ct, self.rfile)
xml = cid.GetSOAPPart()
ps = ParsedSoap(xml, resolver=cid.Resolve)
else:
length = int(self.headers['content-length'])
ps = ParsedSoap(self.rfile.read(length))
except ParseException, e:
self.send_fault(FaultFromZSIException(e))
return
except Exception, e:
# Faulted while processing; assume it's in the header.
self.send_fault(FaultFromException(e, 1, sys.exc_info()[2]))
return
_Dispatch(ps, self.server.modules, self.send_xml, self.send_fault,
docstyle=self.server.docstyle, nsdict=self.server.nsdict,
typesmodule=self.server.typesmodule, rpc=self.server.rpc) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def punctuate(current_text, new_text, add_punctuation):
""" Add punctuation as needed """ |
if add_punctuation and current_text and not current_text[-1] in string.punctuation:
current_text += '. '
spacer = ' ' if not current_text or (not current_text[-1].isspace() and not new_text[0].isspace()) else ''
return current_text + spacer + new_text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ingest(topic, text, **kwargs):
""" Ingest the given text for the topic """ |
if not text:
raise ValueError('No text given to ingest for topic: ' + topic)
data = {'topic': topic, 'text': text.strip()}
data.update(kwargs)
db.markovify.insert(data) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate(topic, add_punctuation, character_count=None):
""" Generate the text for a given topic """ |
corpus_cursor = db.markovify.find({'topic': topic})
if(corpus_cursor):
corpus = ''
for text in corpus_cursor:
corpus = punctuate(corpus, text['text'], add_punctuation)
text_model = markovify.Text(corpus)
sentence = text_model.make_short_sentence(character_count) \
if character_count else text_model.make_sentence()
if not sentence:
raise Exception('There is not enough in the corpus to generate a sentence.')
return sentence
raise Exception('No text found for topic: ' + topic) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def default_setup():
"""The default API setup for lxc4u This is the API that you access globally from lxc4u. """ |
service = LXCService
lxc_types = dict(LXC=LXC, LXCWithOverlays=LXCWithOverlays,
__default__=UnmanagedLXC)
loader = LXCLoader(lxc_types, service)
manager = LXCManager(loader, service)
return LXCAPI(manager=manager, service=service) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def unordered_pair_eq(pair1, pair2):
'''Performs pairwise unordered equality.
``pair1`` == ``pair2`` if and only if
``frozenset(pair1)`` == ``frozenset(pair2)``.
'''
(x1, y1), (x2, y2) = pair1, pair2
return (x1 == x2 and y1 == y2) or (x1 == y2 and y1 == x2) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def expand_labels(labels, subtopic=False):
'''Expand a set of labels that define a connected component.
``labels`` must define a *positive* connected component: it is all
of the edges that make up the *single* connected component in the
:class:`LabelStore`. expand will ignore subtopic assignments, and
annotator_id will be an arbitrary one selected from ``labels``.
Note that this function only returns the expanded labels, which
is guaranteed to be disjoint with the given ``labels``. This
requirement implies that ``labels`` is held in memory to ensure
that no duplicates are returned.
If ``subtopic`` is ``True``, then it is assumed that ``labels``
defines a ``subtopic`` connected component. In this case, subtopics
are included in the expanded labels.
:param labels: iterable of :class:`Label` for the connected component.
:rtype: generator of expanded :class:`Label`s only
'''
labels = list(labels)
assert all(lab.value == CorefValue.Positive for lab in labels)
# Anything to expand?
if len(labels) == 0:
return
annotator = labels[0].annotator_id
data_backed = set()
connected_component = set()
for label in labels:
ident1, ident2 = idents_from_label(label, subtopic=subtopic)
data_backed.add(normalize_pair(ident1, ident2))
connected_component.add(ident1)
connected_component.add(ident2)
# We do not want to rebuild the Labels we already have,
# because they have true annotator_id and subtopic
# fields that we may want to preserve.
for ident1, ident2 in combinations(connected_component, 2):
if normalize_pair(ident1, ident2) not in data_backed:
(cid1, subid1), (cid2, subid2) = ident1, ident2
yield Label(cid1, cid2, annotator, CorefValue.Positive,
subtopic_id1=subid1, subtopic_id2=subid2) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def normalize_ident(ident):
'''Splits a generic identifier.
If ``ident`` is a tuple, then ``(ident[0], ident[1])`` is returned.
Otherwise, ``(ident[0], None)`` is returned.
'''
if isinstance(ident, tuple) and len(ident) == 2:
return ident[0], ident[1] # content_id, subtopic_id
else:
return ident, None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def idents_from_label(lab, subtopic=False):
'''Returns the "ident" of a label.
If ``subtopic`` is ``True``, then a pair of pairs is returned,
where each pair corresponds to the content id and subtopic id in
the given label.
Otherwise, a pair of pairs is returned, but the second element of each
pair is always ``None``.
This is a helper function that is useful for dealing with generic
label identifiers.
'''
if not subtopic:
return (lab.content_id1, None), (lab.content_id2, None)
else:
return (
(lab.content_id1, lab.subtopic_id1),
(lab.content_id2, lab.subtopic_id2),
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def put(self, *labels):
'''Add a new label to the store.
:param label: label
:type label: :class:`Label`
'''
puts = []
for label in labels:
k1, k2 = self._keys_from_label(label)
v = self._value_from_label(label)
puts.append((k1, v))
puts.append((k2, v))
self.kvl.put(self.TABLE, *puts) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def directly_connected(self, ident):
'''Return a generator of labels connected to ``ident``.
``ident`` may be a ``content_id`` or a ``(content_id,
subtopic_id)``.
If no labels are defined for ``ident``, then the generator
will yield no labels.
Note that this only returns *directly* connected labels. It
will not follow transitive relationships.
:param ident: content id or (content id and subtopic id)
:type ident: ``str`` or ``(str, str)``
:rtype: generator of :class:`Label`
'''
content_id, subtopic_id = normalize_ident(ident)
return self.everything(include_deleted=False,
content_id=content_id,
subtopic_id=subtopic_id) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.