text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unique_slug(queryset, slug_field, slug):
""" Ensures a slug is unique for the given queryset, appending an integer to its end until the slug is unique. """ |
i = 0
while True:
if i > 0:
if i > 1:
slug = slug.rsplit("-", 1)[0]
slug = "%s-%s" % (slug, i)
try:
queryset.get(**{slug_field: slug})
except ObjectDoesNotExist:
break
i += 1
return slug |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def next_url(request):
""" Returns URL to redirect to from the ``next`` param in the request. """ |
next = request.GET.get("next", request.POST.get("next", ""))
host = request.get_host()
return next if next and is_safe_url(next, host=host) else None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def path_to_slug(path):
""" Removes everything from the given URL path, including language code and ``PAGES_SLUG`` if any is set, returning a slug that would match a ``Page`` instance's slug. """ |
from yacms.urls import PAGES_SLUG
lang_code = translation.get_language_from_path(path)
for prefix in (lang_code, settings.SITE_PREFIX, PAGES_SLUG):
if prefix:
path = path.replace(prefix, "", 1)
return clean_slashes(path) or "/" |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def process_exception(self, request, exception):
"""Report exceptions from requests via Exreporter. """ |
gc = GithubCredentials(
user=settings.EXREPORTER_GITHUB_USER,
repo=settings.EXREPORTER_GITHUB_REPO,
auth_token=settings.EXREPORTER_GITHUB_AUTH_TOKEN)
gs = GithubStore(credentials=gc)
reporter = ExReporter(
store=gs, labels=settings.EXREPORTER_GITHUB_LABELS)
reporter.report() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self):
""" Connects to publisher """ |
self.client = redis.Redis(
host=self.host, port=self.port, password=self.password) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self):
""" Connects to Redis """ |
logger.info("Connecting to Redis on {host}:{port}...".format(
host=self.host, port=self.port))
super(RedisSubscriber, self).connect()
logger.info("Successfully connected to Redis")
# Subscribe to channel
self.pubsub = self.client.pubsub()
self.pubsub.subscribe(self.channel)
logger.info("Subscribed to [{channel}] Redis channel".format(
channel=self.channel))
# Start listening
t = Thread(target=self.listen)
t.setDaemon(True)
t.start() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def listen(self):
""" Listen for messages """ |
for message in self.pubsub.listen():
if message['type'] == 'message':
message_type, client_id, client_storage, args, kwargs = self.unpack(
message['data'])
self.dispatch_message(
message_type, client_id, client_storage, args, kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def exit(self):
""" Closes the connection """ |
self.pubsub.unsubscribe()
self.client.connection_pool.disconnect()
logger.info("Connection to Redis closed") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_kwargs(self):
""" Returns kwargs for both publisher and subscriber classes """ |
return {
'host': self.host,
'port': self.port,
'channel': self.channel,
'password': self.password
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def read(fname):
" read the passed file "
if exists(fname):
return open(join(dirname(__file__), fname)).read() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read(url, **args):
"""Get the object from a ftp URL.""" |
all_ = args.pop('all', False)
password = args.pop('password', '')
if not password:
raise ValueError('password')
try:
username, __ = url.username.split(';')
except ValueError:
username = url.username
if not username:
username = os.environ.get('USERNAME')
username = os.environ.get('LOGNAME', username)
username = os.environ.get('USER', username)
client = poplib.POP3(url.hostname, url.port or poplib.POP3_PORT,
args.pop('timeout', socket._GLOBAL_DEFAULT_TIMEOUT))
response, count, __ = client.apop(username, password)
if 'OK' not in response:
raise BadPOP3Response(response)
if count == 0:
raise ValueError('count: 0')
collection = []
for id_ in range(count if all_ is True else 1):
response, lines, __ = client.retr(id_ + 1)
if 'OK' not in response:
raise BadPOP3Response(response)
client.dele(id_ + 1)
message = email.message_from_string('\n'.join(lines))
content_type = message.get_content_type()
filename = message.get_filename('')
encoding = message['Content-Encoding']
content = message.get_payload(decode=True)
content = content_encodings.get(encoding).decode(content)
content = content_types.get(content_type).parse(content)
collection.append((filename, content))
client.quit()
return collection if len(collection) > 0 else collection[0][1] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get():
""" Get all nagios status information from a local nagios instance """ |
livestatus = mk_livestatus()
hosts = livestatus.get_hosts()
services = livestatus.get_services()
result = {}
result['hosts'] = hosts
result['services'] = services
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def send(remote_host=None):
""" Send local nagios data to a remote nago instance """ |
my_data = get()
if not remote_host:
remote_host = nago.extensions.settings.get('server')
remote_node = nago.core.get_node(remote_host)
remote_node.send_command('checkresults', 'post', **my_data)
return "checkresults sent to %s" % remote_host |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _format_checkresult(**kwargs):
""" Returns a string in a nagios "checkresults" compatible format """ |
o = {}
o['check_type'] = '1'
o['check_options'] = '0'
o['scheduled_check'] = '1'
o['reschedule_check'] = '1'
o['latency'] = '0.0'
o['start_time'] = '%5f' % time.time()
o['finish_time'] = '%5f' % time.time()
o['early_timeout'] = '0'
o['exited_ok'] = '1'
o['long_plugin_output'] = ''
o['performance_data'] = ''
o.update(locals())
o.update(kwargs)
del o['kwargs']
del o['o']
template = _host_check_result
# Escape all linebreaks if we have them
for k, v in o.items():
if isinstance(v, basestring) and '\n' in v:
o[k] = v.replace('\n', '\\n')
# Livestatus returns slightly different output than status.dat
# Lets normalize everything to status.dat format
if 'name' in o and not 'host_name' in o:
o['host_name'] = o['name']
if 'state' in o and not 'return_code' in o:
o['return_code'] = o['state']
if 'description' in o and not 'service_description' in o:
o['service_description'] = o['description']
if not o['performance_data'] and 'perf_data' in o:
o['performance_data'] = o['perf_data']
# If this is a service (as opposed to host) lets add service_description field in out putput
if 'service_description' in o:
template += "service_description={service_description}\n"
if not o['performance_data'].endswith('\\n'):
o['performance_data'] += '\\n'
# Format the string and return
return template.format(**o) + '\n' |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def from_data(self, time, value, series_id=None, key=None, tz=None):
"""Create a DataPoint object from data, rather than a JSON object or string. This should be used by user code to construct DataPoints from Python-based data like Datetime objects and floats. The series_id and key arguments are only necessary if you are doing a multi write, in which case those arguments can be used to specify which series the DataPoint belongs to. If needed, the tz argument should be an Olsen database compliant string specifying the time zone for this DataPoint. This argument is most often used internally when reading data from TempoDB. :param time: the point in time for this reading :type time: ISO8601 string or Datetime :param value: the value for this reading :type value: int or float :param string series_id: (optional) a series ID for this point :param string key: (optional) a key for this point :param string tz: (optional) a timezone for this point :rtype: :class:`DataPoint`""" |
t = check_time_param(time)
if type(value) in [float, int]:
v = value
else:
raise ValueError('Values must be int or float. Got "%s".' %
str(value))
j = {
't': t,
'v': v,
'id': series_id,
'key': key
}
return DataPoint(j, None, tz=tz) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def truncate(self, size=0):
""" Truncates the stream to the specified length. @param size: The length of the stream, in bytes. @type size: C{int} """ |
if size == 0:
self._buffer = StringIO()
self._len_changed = True
return
cur_pos = self.tell()
self.seek(0)
buf = self.read(size)
self._buffer = StringIO()
self._buffer.write(buf)
self.seek(cur_pos)
self._len_changed = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_len(self):
""" Return total number of bytes in buffer. """ |
if hasattr(self._buffer, 'len'):
self._len = self._buffer.len
return
old_pos = self._buffer.tell()
self._buffer.seek(0, 2)
self._len = self._buffer.tell()
self._buffer.seek(old_pos) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _is_big_endian(self):
""" Whether the current endian is big endian. """ |
if self.endian == DataTypeMixIn.ENDIAN_NATIVE:
return SYSTEM_ENDIAN == DataTypeMixIn.ENDIAN_BIG
return self.endian in (DataTypeMixIn.ENDIAN_BIG, DataTypeMixIn.ENDIAN_NETWORK) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write_ushort(self, s):
""" Writes a 2 byte unsigned integer to the stream. @param s: 2 byte unsigned integer @type s: C{int} @raise TypeError: Unexpected type for int C{s}. @raise OverflowError: Not in range. """ |
if type(s) not in python.int_types:
raise TypeError('expected an int (got:%r)' % (type(s),))
if not 0 <= s <= 65535:
raise OverflowError("Not in range, %d" % s)
self.write(struct.pack("%sH" % self.endian, s)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write_ulong(self, l):
""" Writes a 4 byte unsigned integer to the stream. @param l: 4 byte unsigned integer @type l: C{int} @raise TypeError: Unexpected type for int C{l}. @raise OverflowError: Not in range. """ |
if type(l) not in python.int_types:
raise TypeError('expected an int (got:%r)' % (type(l),))
if not 0 <= l <= 4294967295:
raise OverflowError("Not in range, %d" % l)
self.write(struct.pack("%sL" % self.endian, l)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read_24bit_uint(self):
""" Reads a 24 bit unsigned integer from the stream. @since: 0.4 """ |
order = None
if not self._is_big_endian():
order = [0, 8, 16]
else:
order = [16, 8, 0]
n = 0
for x in order:
n += (self.read_uchar() << x)
return n |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write_24bit_uint(self, n):
""" Writes a 24 bit unsigned integer to the stream. @since: 0.4 @param n: 24 bit unsigned integer @type n: C{int} @raise TypeError: Unexpected type for int C{n}. @raise OverflowError: Not in range. """ |
if type(n) not in python.int_types:
raise TypeError('expected an int (got:%r)' % (type(n),))
if not 0 <= n <= 0xffffff:
raise OverflowError("n is out of range")
order = None
if not self._is_big_endian():
order = [0, 8, 16]
else:
order = [16, 8, 0]
for x in order:
self.write_uchar((n >> x) & 0xff) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write_double(self, d):
""" Writes an 8 byte float to the stream. @param d: 8 byte float @type d: C{float} @raise TypeError: Unexpected type for float C{d}. """ |
if not type(d) is float:
raise TypeError('expected a float (got:%r)' % (type(d),))
self.write(struct.pack("%sd" % self.endian, d)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write_float(self, f):
""" Writes a 4 byte float to the stream. @param f: 4 byte float @type f: C{float} @raise TypeError: Unexpected type for float C{f}. """ |
if type(f) is not float:
raise TypeError('expected a float (got:%r)' % (type(f),))
self.write(struct.pack("%sf" % self.endian, f)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read_utf8_string(self, length):
""" Reads a UTF-8 string from the stream. @rtype: C{unicode} """ |
s = struct.unpack("%s%ds" % (self.endian, length), self.read(length))[0]
return s.decode('utf-8') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def write_utf8_string(self, u):
""" Writes a unicode object to the stream in UTF-8. @param u: unicode object @raise TypeError: Unexpected type for str C{u}. """ |
if not isinstance(u, python.str_types):
raise TypeError('Expected %r, got %r' % (python.str_types, u))
bytes = u
if isinstance(bytes, unicode):
bytes = u.encode("utf8")
self.write(struct.pack("%s%ds" % (self.endian, len(bytes)), bytes)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read(self, length=-1):
""" Reads up to the specified number of bytes from the stream into the specified byte array of specified length. @raise IOError: Attempted to read past the end of the buffer. """ |
if length == -1 and self.at_eof():
raise IOError(
'Attempted to read from the buffer but already at the end')
elif length > 0 and self.tell() + length > len(self):
raise IOError('Attempted to read %d bytes from the buffer but '
'only %d remain' % (length, len(self) - self.tell()))
return StringIOProxy.read(self, length) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def append(self, data):
""" Append data to the end of the stream. The pointer will not move if this operation is successful. @param data: The data to append to the stream. @type data: C{str} or C{unicode} @raise TypeError: data is not C{str} or C{unicode} """ |
t = self.tell()
# seek to the end of the stream
self.seek(0, 2)
if hasattr(data, 'getvalue'):
self.write_utf8_string(data.getvalue())
else:
self.write_utf8_string(data)
self.seek(t) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def calibration(date, satellite):
""" Return the calibration dictionary. Keyword arguments: satellite -- the name of the satellite. date -- the datetime of an image. """ |
counts_shift = CountsShift()
space_measurement = SpaceMeasurement()
prelaunch = PreLaunch()
postlaunch = PostLaunch()
return {
'counts_shift': counts_shift.coefficient(satellite),
'space_measurement': space_measurement.coefficient(satellite),
'prelaunch': prelaunch.coefficient(satellite),
'postlaunch': postlaunch.coefficient(date, satellite)
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def install_dependencies(plugins_directory, ostream=sys.stdout):
'''
Run ``on_plugin_install`` script for each plugin directory found in
specified plugins directory.
Parameters
----------
plugins_directory : str
File system path to directory containing zero or more plugin
subdirectories.
ostream : file-like
Output stream for status messages (default: ``sys.stdout``).
'''
plugin_directories = plugins_directory.realpath().dirs()
print >> ostream, 50 * '*'
print >> ostream, 'Processing plugins:'
print >> ostream, '\n'.join([' - {}'.format(p)
for p in plugin_directories])
print >> ostream, '\n' + 50 * '-' + '\n'
for plugin_dir_i in plugin_directories:
try:
on_plugin_install(plugin_dir_i, ostream=ostream)
except RuntimeError, exception:
print exception
print >> ostream, '\n' + 50 * '-' + '\n' |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def previous_weekday(date):
'''
Returns the last weekday before date
Args:
date (datetime or datetime.date)
Returns:
(datetime or datetime.date)
Raises:
-
'''
weekday = date.weekday()
if weekday == 0:
n_days = 3
elif weekday == 6:
n_days = 2
else:
n_days = 1
return date - datetime.timedelta(days=n_days) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def next_weekday(date):
'''
Return the first weekday after date
Args:
date (datetime or datetime.date)
Returns:
(datetime or datetime.date)
Raises:
-
'''
n_days = 7 - date.weekday()
if n_days > 3:
n_days = 1
return date + datetime.timedelta(days=n_days) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def last_year(date_):
'''
Returns the same date 1 year ago.
Args:
date (datetime or datetime.date)
Returns:
(datetime or datetime.date)
Raises:
-
'''
day = 28 if date_.day == 29 and date_.month == 2 else date_.day
return datetime.date(date_.year-1, date_.month, day) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def timestr2time(time_str):
'''
Turns a string into a datetime.time object. This will only work if the
format can be "guessed", so the string must have one of the formats from
VALID_TIME_FORMATS_TEXT.
Args:
time_str (str) a string that represents a date
Returns:
datetime.time object
Raises:
ValueError if the input string does not have a valid format.
'''
if any(c not in '0123456789:' for c in time_str):
raise ValueError('Illegal character in time string')
if time_str.count(':') == 2:
h, m, s = time_str.split(':')
elif time_str.count(':') == 1:
h, m = time_str.split(':')
s = '00'
elif len(time_str) == 6:
h = time_str[:2]
m = time_str[2:4]
s = time_str[4:]
else:
raise ValueError('Time format not recognised. {}'.format(
VALID_TIME_FORMATS_TEXT))
if len(m) == 2 and len(s) == 2:
mins = int(m)
sec = int(s)
else:
raise ValueError('m and s must be 2 digits')
try:
return datetime.time(int(h), mins, sec)
except ValueError:
raise ValueError('Invalid time {}. {}'.format(time_str,
VALID_TIME_FORMATS_TEXT)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def time2timestr(time, fmt='hhmmss'):
'''
Turns a datetime.time object into a string. The string must have one of the
formats from VALID_TIME_FORMATS_TEXT to make it compatible with
timestr2time.
Args:
time (datetime.time) the time to be translated
fmt (str) a format string.
Returns:
(str) that represents a time.
Raises:
ValueError if the format is not valid.
'''
if fmt.count(':') == 2:
if not fmt.index('h') < fmt.index('m') < fmt.index('s'):
raise ValueError('Invalid format string. {}'.format(
VALID_TIME_FORMATS_TEXT))
h, m, s = fmt.split(':')
elif fmt.count(':') == 1:
if not fmt.index('h') < fmt.index('m'):
raise ValueError('Invalid format string. {}'.format(
VALID_TIME_FORMATS_TEXT))
h, m = fmt.split(':')
s = None
elif any(c not in 'hms' for c in fmt) or len(fmt) != 6:
raise ValueError('Invalid character in format string. {}'.format(
VALID_TIME_FORMATS_TEXT))
else:
if not fmt.index('h') < fmt.index('m') < fmt.index('s'):
raise ValueError('Invalid format string. {}'.format(
VALID_TIME_FORMATS_TEXT))
h, m, s = fmt[:-4], fmt[-4:-2], fmt[-2:]
for string, char in ((h, 'h'), (m, 'm'), (s, 's')):
if string is not None and any(c != char for c in string):
raise ValueError('Invalid date format: {} is not {}'.\
format(char, string))
if len(h) == 2:
fmt = fmt.replace('hh', '%H', 1)
elif len(h) == 1:
fmt = fmt.replace('h', 'X%H', 1)
else:
raise ValueError('Invalid format string, hour must have 1 or 2 digits')
if len(m) == 2:
fmt = fmt.replace('mm', '%M', 1)
else:
raise ValueError('Invalid format string, minutes must have 2 digits')
if s is not None and len(s) == 2:
fmt = fmt. replace('ss', '%S', 1)
elif s is not None:
raise ValueError('Invalid format string, seconds must have 2 digits')
return time.strftime(fmt).replace('X0','X').replace('X','') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_custom_concurrency(default, forced, logger=None):
""" Get the proper concurrency value according to the default one and the one specified by the crawler. :param int default: default tasks concurrency :param forced: concurrency asked by crawler :return: concurrency to use. :rtype: int """ |
logger = logger or LOGGER
cmc_msg = 'Invalid "max_concurrent_tasks: '
if not isinstance(forced, int):
logger.warn(cmc_msg + 'expecting int')
elif forced > default:
msg = 'may not be greater than: %s' % default
logger.warn(cmc_msg + msg)
elif forced < 1:
msg = 'may not be less than 1'
logger.warn(cmc_msg + msg)
else:
default = forced
return default |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reorg_crawl_tasks(tasks, concurrency, logger=None):
""" Extract content returned by the crawler `iter_crawl_tasks` member method. :return: tuple made of the sub-tasks to executed, the epilogue task to execute or `None` is none was specified by the crawler, and the proper tasks concurrency level. :rtype: tuple (sub-tasks, epilogue, concurrent) """ |
futures = tasks['tasks']
epilogue = tasks.get('epilogue')
custom_concurrency = tasks.get('max_concurrent_tasks', concurrency)
check_custom_concurrency(concurrency, custom_concurrency, logger)
futures = list(futures)
return futures, epilogue, concurrency |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def split_crawl_tasks(tasks, concurrency):
""" Reorganize tasks according to the tasks max concurrency value. :param tasks: sub-tasks to execute, can be either a list of tasks of a list of list of tasks :param int concurrency: Maximum number of tasks that might be executed in parallel. :return: list of list of tasks. """ |
if any(tasks) and isinstance(tasks[0], list):
for seq in tasks:
if not isinstance(seq, list):
raise Exception("Expected a list of tasks")
else:
if concurrency > 1:
chain_size = int(ceil(float(len(tasks)) / concurrency))
tasks = [
chunk for chunk in
chunks(
iter(tasks),
max(1, chain_size)
)
]
else:
tasks = [tasks]
return tasks |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def do_static_merge(cls, c_source, c_target):
"""By the time we're just folding in clusters, there's no need to maintain self.INSTANCES and self.clusters, so we just call this method """ |
c_target.extend(c_source)
c_source.parent = c_target.parent
cls.CLUSTERS.remove(c_source)
for m in c_source.mentions:
cls.MENTION_TO_CLUSTER[m] = c_target |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def random_letters(count):
"""Get a series of pseudo-random letters with no repeats.""" |
rv = random.choice(string.ascii_uppercase)
while len(rv) < count:
l = random.choice(string.ascii_uppercase)
if not l in rv:
rv += l
return rv |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def random_codebuch(path):
"""Generate a month-long codebuch and save it to a file.""" |
lines = []
for i in range(31):
line = str(i+1) + " "
# Pick rotors
all_rotors = ['I', 'II', 'III', 'IV', 'V']
rotors = [random.choice(all_rotors)]
while len(rotors) < 3:
r = random.choice(all_rotors)
if not r in rotors:
rotors.append(r)
line += r + ' '
# Pick rotor settings.
settings = [str(random.randint(1, 26))]
while len(settings) < 3:
s = str(random.randint(1, 26))
if not s in settings:
settings.append(s)
line += s + ' '
# Pick plugboard settings.
plugboard = []
while len(plugboard) < 20:
p1 = random_letters(1)
p2 = random_letters(1)
if (not p1 == p2 and
not p1 in plugboard and not p2 in plugboard):
plugboard.extend([p1, p2])
line += p1 + p2 + ' '
# Pick a reflector.
reflector = random.choice(['B', 'C'])
line += reflector
line += os.linesep
lines.append(line)
with open(path, 'w') as f:
f.writelines(lines)
return lines |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def encrypt(self, plaintext):
"""Have the operator encrypt a message.""" |
# Encrpyt message key.
msg_key = random_letters(3)
while msg_key == self.grundstellung:
msg_key = random_letters(3)
self.machine.set_display(self.grundstellung)
enc_key = self.machine.process_text(msg_key)
# Encrpyt message.
self.machine.set_display(msg_key)
ciphertext = self.machine.process_text(plaintext)
# Encode message with keys.
return "{enc_key}{ciphertext}{grundstellung}".format(
enc_key=enc_key,
ciphertext=ciphertext,
grundstellung=self.grundstellung
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def decrypt(self, ciphertext):
"""Have the operator decrypt a message.""" |
# Separate keys from message.
enc_key = ciphertext[:3]
message = ciphertext[3:-3]
grundstellung = ciphertext[-3:]
# Decrypt message key.
self.machine.set_display(grundstellung)
msg_key = self.machine.process_text(enc_key)
# Decrpyt message.
self.machine.set_display(msg_key)
return self.machine.process_text(message) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_version(full=False):
""" Returns a string-ified version number. Optionally accepts a ``full`` parameter, which if ``True``, will include any pre-release information. (Default: ``False``) """ |
version = '.'.join([str(bit) for bit in __version__[:3]])
if full:
version = '-'.join([version] + list(__version__[3:]))
return version |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def parse_args_kwargs(self, *args, **kwargs):
'''Parse the arguments with keywords.'''
# unpack the arginfo
keys, defdict = self.arginfo
assigned = keys[:len(args)]
not_assigned = keys[len(args):]
# validate kwargs
for key in kwargs:
assert key not in assigned
assert key in keys
# integrate args and kwargs
knowns = dict(defdict, **kwargs)
parsed_args = args + tuple([knowns[key] for key in not_assigned])
return parsed_args |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _get_keys_defdict(self):
'''Get the keys and the default dictionary of the given function's
arguments
'''
# inspect argspecs
argspec = inspect.getargspec(self.func)
keys, defvals = argspec.args, argspec.defaults
# convert to (list_of_argkeys, dict_of_default_keys)
if defvals is None:
return keys, None
else:
defvals = list(defvals)
keys.reverse()
defvals.reverse()
defdict = dict(zip(keys, defvals))
keys.reverse()
return keys, defdict |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def compile_with_value(self, func, args=None, owner=None):
'''Compile the function with array-like objects'''
# format args
if args is None:
args = []
# cast numpy.ndarray into theano.tensor
theano_args = [self.cast2theano_var(a, 'extheano.jit.Compiler-arg-%d' % i)
for a, i in zip(args, range(len(args)))]
# compiled value with symbol
return self.compile_with_symbol(func, theano_args, owner) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def compile_with_symbol(self, func, theano_args=None, owner=None):
'''Compile the function with theano symbols'''
if theano_args is None:
theano_args = []
# initialize the shared buffers
upc = UpdateCollector()
# get the output symbols and other Theano options
theano_ret = func(*theano_args) if owner is None \
else func(owner, *theano_args)
# integrate the information of updates, givens and the other options
out = copy.copy(self.default_options)
out['outputs'] = theano_ret
out['updates'] = upc.extract_updates()
# compile the function
return theano.function(theano_args, **out) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def cast2theano_var(self, array_like, name=None):
'''Cast `numpy.ndarray` into `theano.tensor` keeping `dtype` and `ndim`
compatible
'''
# extract the information of the input value
array = np.asarray(array_like)
args = (name, array.dtype)
ndim = array.ndim
# cast with the information above
if ndim == 0:
return T.scalar(*args)
elif ndim == 1:
return T.vector(*args)
elif ndim == 2:
return T.matrix(*args)
elif ndim == 3:
return T.tensor3(*args)
elif ndim == 4:
return T.tensor4(*args)
else:
raise ValueError('extheano.jit.Compiler: Unsupported type or shape') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _get_fn(fn, mode, load):
'''
Load a contents, checking that the file was not modified during the read.
'''
try:
mtime_before = os.path.getmtime(fn)
except OSError:
mtime_before = None
try:
with open(fn, mode) as fp:
item = load(fp)
except OpenError:
raise
else:
mtime_after = os.path.getmtime(fn)
if mtime_before in {None, mtime_after}:
return item
else:
raise EnvironmentError('File was edited during read: %s' % fn) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def flatten_iterable(iterable):
""" Flattens a nested iterable into a single layer. Generator. If you only want to flatten a single level, use more_itertools.flatten. Example:: ['t1', 't2', 'l1', 'l2', 'l1', 'l2'] True """ |
for item in iterable:
if isinstance(item, Iterable) and not isinstance(item, string_types):
for sub in flatten_iterable(item):
yield sub
else:
yield item |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def list_events_view(request):
''' A list view of upcoming events. '''
page_name = "Upcoming Events"
profile = UserProfile.objects.get(user=request.user)
event_form = EventForm(
request.POST if 'post_event' in request.POST else None,
profile=profile,
)
if event_form.is_valid():
event_form.save()
return HttpResponseRedirect(reverse('events:list'))
# a pseudo-dictionary, actually a list with items of form (event, ongoing,
# rsvpd, rsvp_form), where ongoing is a boolean of whether the event is
# currently ongoing, rsvpd is a boolean of whether the user has rsvp'd to
# the event
events_dict = list()
for event in Event.objects.filter(end_time__gte=now()):
rsvp_form = RsvpForm(
request.POST if "rsvp-{0}".format(event.pk) in request.POST else None,
instance=event,
profile=profile,
)
if rsvp_form.is_valid():
rsvpd = rsvp_form.save()
if rsvpd:
message = MESSAGES['RSVP_ADD'].format(event=event.title)
else:
message = MESSAGES['RSVP_REMOVE'].format(event=event.title)
messages.add_message(request, messages.SUCCESS, message)
return HttpResponseRedirect(reverse('events:list'))
ongoing = ((event.start_time <= now()) and (event.end_time >= now()))
rsvpd = (profile in event.rsvps.all())
events_dict.append((event, ongoing, rsvpd, rsvp_form))
if request.method == "POST":
messages.add_message(request, messages.ERROR, MESSAGES["EVENT_ERROR"])
return render_to_response('list_events.html', {
'page_name': page_name,
'events_dict': events_dict,
'now': now(),
'event_form': event_form,
}, context_instance=RequestContext(request)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def edit_event_view(request, event_pk):
''' The view to edit an event. '''
page_name = "Edit Event"
profile = UserProfile.objects.get(user=request.user)
event = get_object_or_404(Event, pk=event_pk)
if event.owner != profile and not request.user.is_superuser:
return HttpResponseRedirect(
reverse('events:view', kwargs={"event_pk": event_pk}),
)
event_form = EventForm(
request.POST or None,
profile=profile,
instance=event,
)
if event_form.is_valid():
event = event_form.save()
messages.add_message(
request, messages.SUCCESS,
MESSAGES['EVENT_UPDATED'].format(event=event.title),
)
return HttpResponseRedirect(
reverse('events:view', kwargs={"event_pk": event_pk}),
)
return render_to_response('edit_event.html', {
'page_name': page_name,
'event_form': event_form,
}, context_instance=RequestContext(request)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_url_node(parser, bits):
""" Parses the expression as if it was a normal url tag. Was copied from the original function django.template.defaulttags.url, but unnecessary pieces were removed. """ |
viewname = parser.compile_filter(bits[1])
args = []
kwargs = {}
bits = bits[2:]
if len(bits):
kwarg_re = re.compile(r"(?:(\w+)=)?(.+)")
for bit in bits:
match = kwarg_re.match(bit)
if not match:
raise TemplateSyntaxError("Malformed arguments to url tag")
name, value = match.groups()
if name:
kwargs[name] = parser.compile_filter(value)
else:
args.append(parser.compile_filter(value))
return URLNode(viewname, args, kwargs, asvar=None) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def trigger(self, identifier, force=True):
"""Trigger an upgrade task.""" |
self.debug(identifier)
url = "{base}/{identifier}".format(
base=self.local_base_url,
identifier=identifier
)
param = {}
if force:
param['force'] = force
encode = urllib.urlencode(param)
if encode:
url += "?"
url += encode
return self.core.update(url, {}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _negate_compare_text(atok: asttokens.ASTTokens, node: ast.Compare) -> str: """ Generate the text representing the negation of the comparison node. :param atok: parsing obtained with ``asttokens`` so that we can access the last tokens of a node. The standard ``ast`` module provides only the first token of an AST node. In lack of concrete syntax tree, getting text from first to last token is currently the simplest approach. :param node: AST node representing the comparison in a condition :return: text representation of the node's negation """ |
assert len(node.ops) == 1, "A single comparison expected, but got: {}".format(len(node.ops))
assert len(node.comparators) == 1, "A single comparator expected, but got: {}".format(len(node.comparators))
operator = node.ops[0]
left = node.left
right = node.comparators[0]
left_text = atok.get_text(node=left)
right_text = atok.get_text(node=right)
text = ''
if isinstance(operator, ast.Eq):
text = '{} != {}'.format(left_text, right_text)
elif isinstance(operator, ast.NotEq):
text = '{} == {}'.format(left_text, right_text)
elif isinstance(operator, ast.Lt):
text = '{} >= {}'.format(left_text, right_text)
elif isinstance(operator, ast.LtE):
text = '{} > {}'.format(left_text, right_text)
elif isinstance(operator, ast.Gt):
text = '{} <= {}'.format(left_text, right_text)
elif isinstance(operator, ast.GtE):
text = '{} < {}'.format(left_text, right_text)
elif isinstance(operator, ast.Is):
text = '{} is not {}'.format(left_text, right_text)
elif isinstance(operator, ast.IsNot):
text = '{} is {}'.format(left_text, right_text)
elif isinstance(operator, ast.In):
text = '{} not in {}'.format(left_text, right_text)
elif isinstance(operator, ast.NotIn):
text = '{} in {}'.format(left_text, right_text)
else:
raise NotImplementedError("Unhandled comparison operator: {}".format(operator))
return text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _error_type_and_message( decorator_inspection: icontract._represent.DecoratorInspection) -> Tuple[Optional[str], Optional[str]]: """ Inspect the error argument of a contract and infer the error type and the message if the error is given as a lambda. If the error argument is not given or if it is not given as a lambda function, return immediately. The error message is inferred as the single string-literal argument to a single call in the lambda body. :param decorator_inspection: inspection of a contract decorator :return: error type (None if not inferrable), error message (None if not inferrable) """ |
call_node = decorator_inspection.node
error_arg_node = None # type: Optional[ast.AST]
for keyword in call_node.keywords:
if keyword.arg == 'error':
error_arg_node = keyword.value
if error_arg_node is None and len(call_node.args) == 5:
error_arg_node = call_node.args[4]
if not isinstance(error_arg_node, ast.Lambda):
return None, None
body_node = error_arg_node.body
# The body of the error lambda needs to be a callable, since it needs to return an instance of Exception
if not isinstance(body_node, ast.Call):
return None, None
error_type = decorator_inspection.atok.get_text(node=body_node.func)
error_message = None # type: Optional[str]
if len(body_node.args) == 1 and len(body_node.keywords) == 0:
if isinstance(body_node.args[0], ast.Str):
error_message = body_node.args[0].s
elif len(body_node.args) == 0 and len(body_node.keywords) == 1:
if isinstance(body_node.keywords[0].value, ast.Str):
error_message = body_node.keywords[0].value.s
else:
# The error message could not be inferred.
pass
return error_type, error_message |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _format_contract(contract: icontract._Contract) -> str: """Format the contract as reST.""" |
# pylint: disable=too-many-branches
decorator_inspection = None # type: Optional[icontract._represent.DecoratorInspection]
##
# Parse condition
##
if not icontract._represent._is_lambda(a_function=contract.condition):
condition_text = ':py:func:`{}`'.format(contract.condition.__name__)
else:
# We need to extract the source code corresponding to the decorator since inspect.getsource() is broken with
# lambdas.
# Find the line corresponding to the condition lambda
lines, condition_lineno = inspect.findsource(contract.condition)
filename = inspect.getsourcefile(contract.condition)
decorator_inspection = icontract._represent.inspect_decorator(
lines=lines, lineno=condition_lineno, filename=filename)
lambda_inspection = icontract._represent.find_lambda_condition(decorator_inspection=decorator_inspection)
assert lambda_inspection is not None, \
"Expected non-None lambda inspection with the condition: {}".format(contract.condition)
condition_text = _condition_as_text(lambda_inspection=lambda_inspection)
##
# Parse error
##
error_type = None # type: Optional[str]
# Error message is set only for an error of a contract given as a lambda that takes no arguments and returns
# a result of a call on a string literal (*e.g.*, ``error=ValueError("some message")``.
error_msg = None # type: Optional[str]
if contract.error is not None:
if isinstance(contract.error, type):
error_type = contract.error.__qualname__
elif inspect.isfunction(contract.error) and icontract._represent._is_lambda(a_function=contract.error):
if decorator_inspection is None:
lines, condition_lineno = inspect.findsource(contract.error)
filename = inspect.getsourcefile(contract.error)
decorator_inspection = icontract._represent.inspect_decorator(
lines=lines, lineno=condition_lineno, filename=filename)
error_type, error_msg = _error_type_and_message(decorator_inspection=decorator_inspection)
else:
# Error type could not be inferred
pass
##
# Format
##
description = None # type: Optional[str]
if contract.description:
description = contract.description
elif error_msg is not None:
description = error_msg
else:
# Description could not be inferred.
pass
doc = None # type: Optional[str]
if description and error_type:
if description.strip()[-1] in [".", "!", "?"]:
doc = "{} Raise :py:class:`{}`".format(description, error_type)
elif description.strip()[-1] in [",", ";"]:
doc = "{} raise :py:class:`{}`".format(description, error_type)
else:
doc = "{}; raise :py:class:`{}`".format(description, error_type)
elif not description and error_type:
doc = "Raise :py:class:`{}`".format(error_type)
elif description and not error_type:
doc = description
else:
# No extra documentation can be generated since the error type could not be inferred and
# no contract description was given.
doc = None
if doc is not None:
return "{} ({})".format(condition_text, doc)
return condition_text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _format_preconditions(preconditions: List[List[icontract._Contract]], prefix: Optional[str] = None) -> List[str]: """ Format preconditions as reST. :param preconditions: preconditions of a function :param prefix: prefix of the ``:requires:`` and ``:requires else:`` directive :return: list of lines """ |
if not preconditions:
return []
result = [] # type: List[str]
for i, group in enumerate(preconditions):
if i == 0:
if prefix is not None:
result.append(":{} requires:".format(prefix))
else:
result.append(":requires:")
else:
if prefix is not None:
result.append(":{} requires else:".format(prefix))
else:
result.append(":requires else:")
for precondition in group:
result.append(" * {}".format(_format_contract(contract=precondition)))
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
"""Convert the capture function into its text representation by parsing the source code of the decorator.""" |
if not icontract._represent._is_lambda(a_function=capture):
signature = inspect.signature(capture)
param_names = list(signature.parameters.keys())
return "{}({})".format(capture.__qualname__, ", ".join(param_names))
lines, lineno = inspect.findsource(capture)
filename = inspect.getsourcefile(capture)
decorator_inspection = icontract._represent.inspect_decorator(lines=lines, lineno=lineno, filename=filename)
call_node = decorator_inspection.node
capture_node = None # type: Optional[ast.Lambda]
if len(call_node.args) > 0:
assert isinstance(call_node.args[0], ast.Lambda), \
("Expected the first argument to the snapshot decorator to be a condition as lambda AST node, "
"but got: {}").format(type(call_node.args[0]))
capture_node = call_node.args[0]
elif len(call_node.keywords) > 0:
for keyword in call_node.keywords:
if keyword.arg == "capture":
assert isinstance(keyword.value, ast.Lambda), \
"Expected lambda node as value of the 'capture' argument to the decorator."
capture_node = keyword.value
break
assert capture_node is not None, "Expected to find a keyword AST node with 'capture' arg, but found none"
else:
raise AssertionError(
"Expected a call AST node of a snapshot decorator to have either args or keywords, but got: {}".format(
ast.dump(call_node)))
capture_text = decorator_inspection.atok.get_text(capture_node.body)
return capture_text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _format_snapshots(snapshots: List[icontract._Snapshot], prefix: Optional[str] = None) -> List[str]: """ Format snapshots as reST. :param snapshots: snapshots defined to capture the argument values of a function before the invocation :param prefix: prefix to be prepended to ``:OLD:`` directive :return: list of lines describing the snapshots """ |
if not snapshots:
return []
result = [] # type: List[str]
if prefix is not None:
result.append(":{} OLD:".format(prefix))
else:
result.append(":OLD:")
for snapshot in snapshots:
text = _capture_as_text(capture=snapshot.capture)
result.append(" * :code:`.{}` = :code:`{}`".format(snapshot.name, text))
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _format_postconditions(postconditions: List[icontract._Contract], prefix: Optional[str] = None) -> List[str]: """ Format postconditions as reST. :param postconditions: postconditions of a function :param prefix: prefix to be prepended to ``:ensures:`` directive :return: list of lines describing the postconditions """ |
if not postconditions:
return []
result = [] # type: List[str]
if prefix is not None:
result.append(":{} ensures:".format(prefix))
else:
result.append(":ensures:")
for postcondition in postconditions:
result.append(" * {}".format(_format_contract(contract=postcondition)))
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _format_invariants(invariants: List[icontract._Contract]) -> List[str]: """Format invariants as reST.""" |
if not invariants:
return []
result = [":establishes:"] # type: List[str]
for invariant in invariants:
result.append(" * {}".format(_format_contract(contract=invariant)))
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _preconditions_snapshots_postconditions(checker: Callable) -> _PrePostSnaps: """Collect the preconditions, snapshots and postconditions from a contract checker of a function.""" |
preconditions = getattr(checker, "__preconditions__", []) # type: List[List[icontract._Contract]]
assert all(isinstance(precondition_group, list) for precondition_group in preconditions)
assert (all(
isinstance(precondition, icontract._Contract) for precondition_group in preconditions
for precondition in precondition_group))
# Filter empty precondition groups ("require else" blocks)
preconditions = [group for group in preconditions if len(group) > 0]
snapshots = getattr(checker, "__postcondition_snapshots__", []) # type: List[icontract._Snapshot]
assert all(isinstance(snap, icontract._Snapshot) for snap in snapshots)
postconditions = getattr(checker, "__postconditions__", []) # type: List[icontract._Contract]
assert all(isinstance(postcondition, icontract._Contract) for postcondition in postconditions)
return _PrePostSnaps(preconditions=preconditions, snapshots=snapshots, postconditions=postconditions) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _format_function_contracts(func: Callable, prefix: Optional[str] = None) -> List[str]: """ Format the preconditions and postconditions of a function given its checker decorator. :param func: function whose contracts we are describing :param prefix: prefix to be prepended to the contract directives such as ``get`` or ``set`` :return: list of lines """ |
checker = icontract._checkers.find_checker(func=func)
if checker is None:
return []
pps = _preconditions_snapshots_postconditions(checker=checker)
pre_block = _format_preconditions(preconditions=pps.preconditions, prefix=prefix)
old_block = _format_snapshots(snapshots=pps.snapshots, prefix=prefix)
post_block = _format_postconditions(postconditions=pps.postconditions, prefix=prefix)
return pre_block + old_block + post_block |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _format_contracts(what: str, obj: Any) -> List[str]: """Format the contracts as reST.""" |
if what in ['function', 'method', 'attribute']:
if what == 'attribute':
if not isinstance(obj, property):
return []
return _format_property_contracts(prop=obj)
if what in ['function', 'method']:
return _format_function_contracts(func=obj)
raise NotImplementedError("Unhandled what: {}".format(what))
elif what == 'class':
invariants = getattr(obj, "__invariants__", []) # type: List[icontract._Contract]
assert isinstance(invariants, list)
assert all(isinstance(inv, icontract._Contract) for inv in invariants)
return _format_invariants(invariants=invariants)
# Only properties, functions and classes have contracts.
return [] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def process_docstring(app, what, name, obj, options, lines):
"""React to a docstring event and append contracts to it.""" |
# pylint: disable=unused-argument
# pylint: disable=too-many-arguments
lines.extend(_format_contracts(what=what, obj=obj)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_wheel(ireq, sources, hashes=None, cache_dir=None):
"""Build a wheel file for the InstallRequirement object. An artifact is downloaded (or read from cache). If the artifact is not a wheel, build one out of it. The dynamically built wheel is ephemeral; do not depend on its existence after the returned wheel goes out of scope. If `hashes` is truthy, it is assumed to be a list of hashes (as formatted in Pipfile.lock) to be checked against the download. Returns a `distlib.wheel.Wheel` instance. Raises a `WheelBuildError` (a `RuntimeError` subclass) if the wheel cannot be built. """ |
kwargs = _prepare_wheel_building_kwargs(ireq)
finder = _get_finder(sources, cache_dir=cache_dir)
# Not for upgrade, hash not required. Hashes are not required here even
# when we provide them, because pip skips local wheel cache if we set it
# to True. Hashes are checked later if we need to download the file.
ireq.populate_link(finder, False, False)
# Ensure ireq.source_dir is set.
# This is intentionally set to build_dir, not src_dir. Comments from pip:
# [...] if filesystem packages are not marked editable in a req, a non
# deterministic error occurs when the script attempts to unpack the
# build directory.
# Also see comments in `_prepare_wheel_building_kwargs()` -- If the ireq
# is editable, build_dir is actually src_dir, making the build in-place.
ireq.ensure_has_source_dir(kwargs["build_dir"])
# Ensure the source is fetched. For wheels, it is enough to just download
# because we'll use them directly. For an sdist, we need to unpack so we
# can build it.
if not ireq.editable or not pip_shims.is_file_url(ireq.link):
if ireq.is_wheel:
only_download = True
download_dir = kwargs["wheel_download_dir"]
else:
only_download = False
download_dir = kwargs["download_dir"]
ireq.options["hashes"] = _convert_hashes(hashes)
unpack_url(
ireq.link, ireq.source_dir, download_dir,
only_download=only_download, session=finder.session,
hashes=ireq.hashes(False), progress_bar="off",
)
if ireq.is_wheel:
# If this is a wheel, use the downloaded thing.
output_dir = kwargs["wheel_download_dir"]
wheel_path = os.path.join(output_dir, ireq.link.filename)
else:
# Othereise we need to build an ephemeral wheel.
wheel_path = _build_wheel(
ireq, vistir.path.create_tracked_tempdir(prefix="ephem"),
finder, _get_wheel_cache(cache_dir=cache_dir), kwargs,
)
if wheel_path is None or not os.path.exists(wheel_path):
raise WheelBuildError
return distlib.wheel.Wheel(wheel_path) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_euid():
""" Set settings.DROPLET_USER effective UID for the current process This adds some security, but nothing magic, an attacker can still gain root access, but at least we only elevate privileges when needed See root context manager """ |
current = os.geteuid()
logger.debug("Current EUID is %s" % current)
if settings.DROPLET_USER is None:
logger.info("Not changing EUID, DROPLET_USER is None")
return
uid = int(pwd.getpwnam(settings.DROPLET_USER).pw_uid)
if current != uid:
try:
os.seteuid(uid)
logger.info("Set EUID to %s (%s)" %
(settings.DROPLET_USER, os.geteuid()))
except:
current_user = pwd.getpwuid(os.getuid()).pw_name
logger.error("Failed to set '%s' EUID, running as '%s'" %
(settings.DROPLET_USER, current_user))
else:
logger.debug("Didn't set EUID, it was already correct") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def drop_privileges():
""" Set settings.DROPLET_USER UID for the current process After calling this, root operation will be impossible to execute See root context manager """ |
uid = int(pwd.getpwnam(settings.DROPLET_USER).pw_uid)
os.setuid(uid) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_by_value(cls, value):
""" Get constant by its value. :param value: value of the constant to look for :returns: first found constant with given value :raises ValueError: if no constant in container has given value """ |
for constant in cls.iterconstants():
if constant.value == value:
return constant
raise ValueError(
"Constant with value \"{0}\" is not present in \"{1}\""
.format(value, cls)
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def filter_by_value(cls, value):
""" Get all constants which have given value. :param value: value of the constants to look for :returns: list of all found constants with given value """ |
constants = []
for constant in cls.iterconstants():
if constant.value == value:
constants.append(constant)
return constants |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_machine(self, key):
""" Returns the number of the machine which key gets sent to. """ |
h = self.hash(key)
# edge case where we cycle past hash value of 1 and back to 0.
if h > self.hash_tuples[-1][2]:
return self.hash_tuples[0][0]
hash_values = map(lambda x: x[2], self.hash_tuples)
index = bisect.bisect_left(hash_values, h)
return self.hash_tuples[index][0] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_user(name, username, email, password, token_manager=None, app_url=defaults.APP_URL):
""" create a new user with the specified name, username email and password """ |
headers = token_manager.get_access_token_headers()
auth_url = environment.get_auth_url(app_url=app_url)
url = "%s/api/v1/accounts" % auth_url
payload = {
'name': name,
'username': username,
'email': email,
'password': password
}
response = requests.post(url,
data=json.dumps(payload),
headers=headers)
if response.status_code == 201:
return response.json()
else:
raise JutException('Error %s: %s' % (response.status_code, response.text)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_logged_in_account(token_manager=None, app_url=defaults.APP_URL):
""" get the account details for logged in account of the auth token_manager """ |
return get_logged_in_account(token_manager=token_manager,
app_url=app_url)['id'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_logged_in_account(token_manager=None, app_url=defaults.APP_URL):
""" get the account details for credentials provided """ |
headers = token_manager.get_access_token_headers()
auth_url = environment.get_auth_url(app_url=app_url)
url = "%s/api/v1/account" % auth_url
response = requests.get(url,
headers=headers)
if response.status_code == 200:
return response.json()
else:
raise JutException('Error %s; %s' % (response.status_code, response.text)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def user_exists(username, token_manager=None, app_url=defaults.APP_URL):
""" check if the user exists with the specified username """ |
headers = token_manager.get_access_token_headers()
auth_url = environment.get_auth_url(app_url=app_url)
url = "%s/api/v1/accounts?username=%s" % (auth_url, username)
response = requests.get(url, headers=headers)
if response.status_code == 404:
return False
elif response.status_code == 200:
return True
else:
raise JutException('Error %s: %s' % (response.status_code, response.text)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def FromType(ftype):
""" DocField subclasses factory, creates a convenient field to store data from a given Type. attribute precedence : * ``|attrs| > 0`` (``multi`` and ``uniq`` are implicit) => VectorField * ``uniq`` (``multi`` is implicit) => SetField * ``multi`` and ``not uniq`` => ListField * ``not multi`` => ValueField :param ftype: the desired type of field :type ftype: subclass of :class:`.GenericType` """ |
if ftype.attrs is not None and len(ftype.attrs):
return VectorField(ftype)
elif ftype.uniq:
return SetField(ftype)
elif ftype.multi:
return ListField(ftype)
else:
return ValueField(ftype) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def clear_attributes(self):
""" removes all attributes """ |
self._attrs = {} # removes all attr
for name, attr_field in six.iteritems(self._ftype.attrs):
self._attrs[name] = [] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_attr_value(self, key, attr, value):
""" set the value of a given attribute for a given key """ |
idx = self._keys[key]
self._attrs[attr][idx].set(value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_field(self, name, value, parse=False):
""" Set the value of a field """ |
# explicit getitem needed for ValueField
try:
item = dict.__getitem__(self, name)
item.set( item.parse(value) if parse else value )
except ValidationError as err:
raise FieldValidationError(name, value, list(err)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def export(self, exclude=[]):
""" returns a dictionary representation of the document """ |
fields = ( (key, self.get_field(key)) for key in self.schema
if not key.startswith("_") and key not in exclude )
doc = {name: field.export() for name, field in fields}
return doc |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init_dirs(rootdir_or_loader, outputpath, saveto_dir='data', auximages_dir='auximages', prefix='crd'):
"""Initialize the directiories. Inputs: rootdir_or_loader: depends on the type: str: the root directory of the SAXSCtrl/CCT software, i.e. where the subfolders ``eval2d``, ``param``, ``images``, ``mask`` etc. reside. sastool.classes2.Loader instance: a fully initialized loader, which will be used to acquire headers and exposures. list: a list of sastool.classes2.Loader instances, which will be used to open headers and exposures. When opening something, always the first item will be tried first, and if it fails with FileNotFoundError, the second, third, etc. will be tried until either the file can be opened or the last one fails. outputpath: the directory where the produced files are written. This is usually the working directory of the IPython notebook. saveto_dir: the subdirectory where averaged, united, subtracted etc. datasets are written. auximages_dir: the subdirectory where automatically produced images reside. Remarks: If a single root directory is given, a list of four loaders will be constructed in this order: CCT (processed), CCT (raw), SAXSCtrl (processed), SAXSCtrl (raw). Raw and processed loaders are handled separately. """ |
ip = get_ipython()
if isinstance(rootdir_or_loader, str):
print("Initializing loaders for SAXSCtrl and CCT.", flush=True)
ip.user_ns['_loaders'] = [
credo_cct.Loader(rootdir_or_loader, processed=True, exposureclass=prefix),
credo_saxsctrl.Loader(rootdir_or_loader, processed=True, exposureclass=prefix),
credo_cct.Loader(rootdir_or_loader, processed=False, exposureclass=prefix),
credo_saxsctrl.Loader(rootdir_or_loader, processed=False, exposureclass=prefix),
]
print("Loaders initialized.", flush=True)
elif isinstance(rootdir_or_loader, Loader):
ip.user_ns['_loaders'] = [rootdir_or_loader]
elif isinstance(rootdir_or_loader, list) and all([isinstance(l, Loader) for l in rootdir_or_loader]):
ip.user_ns['_loaders'] = rootdir_or_loader[:]
else:
raise TypeError(rootdir_or_loader)
if not os.path.isdir(outputpath):
os.makedirs(outputpath)
print("Output files will be written to:", outputpath)
os.chdir(outputpath)
ip.user_ns['outputpath'] = outputpath
if not os.path.isdir(os.path.join(ip.user_ns['outputpath'], saveto_dir)):
os.mkdir(os.path.join(ip.user_ns['outputpath'], saveto_dir))
if not os.path.isdir(os.path.join(ip.user_ns['outputpath'], auximages_dir)):
os.mkdir(os.path.join(ip.user_ns['outputpath'], auximages_dir))
ip.user_ns['auximages_dir'] = os.path.join(outputpath, auximages_dir)
ip.user_ns['saveto_dir'] = os.path.join(outputpath, saveto_dir)
ip.user_ns['saveto_dir_rel'] = saveto_dir
ip.user_ns['auximages_dir_rel'] = auximages_dir
ip.user_ns['crd_prefix']=prefix
set_length_units('nm') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_pg_core(connection_string, *, cursor_factory=None, edit_connection=None):
"""Creates a simple PostgreSQL core. Requires the psycopg2 library.""" |
import psycopg2 as pq
from psycopg2.extras import NamedTupleCursor
def opener():
"""Opens a single PostgreSQL connection with the scope-captured connection string."""
cn = pq.connect(connection_string)
cn.cursor_factory = cursor_factory or NamedTupleCursor
if edit_connection:
edit_connection(cn)
return cn
return InjectedDataAccessCore(
opener,
default_connection_closer,
("%({0})s", "%s", "{0}::{1}"),
empty_params=None,
supports_timezones=True,
supports_returning_syntax=True,
get_autocommit=get_pg_autocommit,
set_autocommit=set_pg_autocommit) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_pooled_pg_core(connection_string, pool_size=None, *, cursor_factory=None, edit_connection=None, threaded=True):
"""Creates a pooled PostgreSQL core. Requires the psycopg2 library. :pool_size: must be a 2-tuple in the form (min_connections, max_connections). """ |
from psycopg2.extras import NamedTupleCursor
from psycopg2.pool import ThreadedConnectionPool as TPool, SimpleConnectionPool as SPool
if not pool_size:
pool_size = (5, 10)
if threaded:
pool = TPool(pool_size[0], pool_size[1], connection_string)
else:
pool = SPool(pool_size[0], pool_size[1], connection_string)
def opener():
"""Gets a PostgreSQL connection from the scope-captured connection pool."""
cn = pool.getconn()
cn.cursor_factory = cursor_factory or NamedTupleCursor
if edit_connection:
edit_connection(cn)
return cn
def closer(connection):
"""Returns a connection to the scope-captured connection pool."""
pool.putconn(connection)
return InjectedDataAccessCore(
opener,
closer,
("%({0})s", "%s", "{0}::{1}"),
empty_params=None,
supports_timezones=True,
supports_returning_syntax=True,
get_autocommit=get_pg_autocommit,
set_autocommit=set_pg_autocommit) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_sqlite_autocommit(cn, autocommit):
"""SQLite autocommit setter for core.""" |
if isinstance(autocommit, bool):
cn.isolation_level = None if autocommit else ""
else:
cn.isolation_level = autocommit |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_sqlite_core(connection_string, *, cursor_factory=None, edit_connection=None):
"""Creates a simple SQLite3 core.""" |
import sqlite3 as sqlite
def opener():
"""Opens a single connection with the scope-captured connection string."""
cn = sqlite.connect(connection_string)
if cursor_factory:
cn.row_factory = cursor_factory
if edit_connection:
edit_connection(cn)
return cn
return InjectedDataAccessCore(
opener,
default_connection_closer, (":{0}", "?", SQL_CAST),
empty_params=[],
supports_timezones=True,
supports_returning_syntax=False,
get_autocommit=get_sqlite_autocommit,
set_autocommit=set_sqlite_autocommit) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_mysql_core(connection_args, *, cursor_factory=None, edit_connection=None):
"""Creates a simple MySQL core. Requires the pymysql library.""" |
import pymysql
def opener():
"""Opens a single connection with the scope-captured connection string."""
cn = pymysql.connect(**connection_args)
if cursor_factory:
cn.cursorclass = cursor_factory
if edit_connection:
edit_connection(cn)
return cn
return InjectedDataAccessCore(
opener,
default_connection_closer,
("%({0})s", "%s", SQL_CAST),
empty_params=None,
supports_timezones=False,
supports_returning_syntax=False,
get_autocommit=get_mysql_autocommit,
set_autocommit=set_mysql_autocommit) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def close(self, connection, *, commit=True):
"""Close the connection using the closer method passed to the constructor.""" |
if commit:
connection.commit()
else:
connection.rollback()
self.closer(connection) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def download():
"""Download cities database.""" |
url = "http://download.geonames.org/export/dump/cities1000.zip"
logging.info("Download cities from %s", url)
if not os.path.exists(MISC_PATH):
os.makedirs(MISC_PATH)
zip_path = os.path.join(MISC_PATH, "cities1000.zip")
urlretrieve(url, zip_path)
with zipfile.ZipFile(zip_path, "r") as z:
z.extractall(MISC_PATH) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get(self, pos):
"""Get the closest dataset.""" |
latitude = int(round(pos['latitude']))
search_set = self.bins[latitude]
i = 1
if latitude - i >= -90:
search_set += self.bins[latitude-i]
if latitude + i <= 90:
search_set += self.bins[latitude+i]
while len(search_set) == 0 and i <= 200:
if latitude - i >= -90:
search_set += self.bins[latitude-i]
if latitude + i <= 90:
search_set += self.bins[latitude+i]
i += 1
return find_closest(search_set, pos) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def from_urlencode(self, data, options=None):
""" handles basic formencoded url posts """ |
qs = dict((k, v if len(v) > 1 else v[0])
for k, v in urlparse.parse_qs(data).iteritems())
return qs |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_electricity_info(self, apart_id, meter_room):
"""get electricity info :param apart_id: 栋数 :param meter_room: 宿舍号 """ |
apart_id = str(apart_id)
meter_room = str(meter_room)
try:
content = LifeService._get_electricity_info_html(apart_id, meter_room)
except KeyError as e:
_.d(e.message)
result = {
'response': None
}
return _.to_json_string(result)
soup = BeautifulSoup(content)
tags = soup.find_all(name='span', class_='STYLE7')
result = {
'response': {
'apart': _.trim(tags[0].string),
'apart_id': _.trim(tags[1].string),
'used': _.trim(tags[2].string),
'left': _.trim(tags[3].string),
'update_time': _.trim(tags[4].string)
}
}
return _.to_json_string(result) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_channels(self, channels):
"""Sets the state of multiple channels in one operation. :param channels: A dictionary where keys are channels and values the value to set for each channel. :type channels: ``dict`` """ |
for key in channels:
self.set(key, channels[key]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_pdf_filenames_at(source_directory):
"""Find all PDF files in the specified directory. Args: source_directory (str):
The source directory. Returns: list(str):
Filepaths to all PDF files in the specified directory. Raises: ValueError """ |
if not os.path.isdir(source_directory):
raise ValueError("%s is not a directory!" % source_directory)
return [os.path.join(source_directory, filename)
for filename in os.listdir(source_directory)
if filename.endswith(PDF_EXTENSION)] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compress_multiple_pdfs(source_directory, output_directory, ghostscript_binary):
"""Compress all PDF files in the current directory and place the output in the given output directory. This is a generator function that first yields the amount of files to be compressed, and then yields the output path of each file. Args: source_directory (str):
Filepath to the source directory. output_directory (str):
Filepath to the output directory. ghostscript_binary (str):
Name of the Ghostscript binary. Returns: list(str):
paths to outputs. """ |
source_paths = _get_pdf_filenames_at(source_directory)
yield len(source_paths)
for source_path in source_paths:
output = os.path.join(output_directory, os.path.basename(source_path))
compress_pdf(source_path, output, ghostscript_binary)
yield output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _post_init(self, name, container=None):
""" Called automatically by container after container's class construction. """ |
self.name = name
self.container = container |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pathresource(self, rscpath=None, logger=None):
"""Returns specific resource. :param str rscpath: resource path. :param Logger logger: logger to use. :param bool error: raise internal error if True (False by default). :param bool force: create the resource even if rscpath does not exist. :return: specific configuration resource. """ |
result = None
try:
result = self._pathresource(rscpath=rscpath)
except Exception as ex:
if logger is not None:
msg = 'Error while getting resource from {0}.'.format(rscpath)
full_msg = '{0} {1}: {2}'.format(msg, ex, format_exc())
logger.error(full_msg)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getconf(self, path, conf=None, logger=None):
"""Parse a configuration path with input conf and returns parameters by param name. :param str path: conf resource path to parse and from get parameters. :param Configuration conf: conf to fill with path values and conf param names. :param Logger logger: logger to use in order to trace information/error. :rtype: Configuration """ |
result = conf
pathconf = None
rscpaths = self.rscpaths(path=path)
for rscpath in rscpaths:
pathconf = self._getconf(rscpath=rscpath, logger=logger, conf=conf)
if pathconf is not None:
if result is None:
result = pathconf
else:
result.update(pathconf)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setconf(self, conf, rscpath, logger=None):
"""Set input conf in input path. :param Configuration conf: conf to write to path. :param str rscpath: specific resource path to use. :param Logger logger: used to log info/errors. :param bool error: raise catched errors. :raises: ConfDriver.Error in case of error and input error. """ |
resource = self.pathresource(rscpath=rscpath, logger=logger)
if resource is None:
resource = self.resource()
try:
self._setconf(conf=conf, resource=resource, rscpath=rscpath)
except Exception as ex:
if logger is not None:
msg = 'Error while setting conf to {0}.'.format(rscpath)
full_msg = '{0} {1}: {2}'.format(msg, ex, format_exc())
logger.error(full_msg)
reraise(self.Error, self.Error(msg)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.