text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def choice(opts, default=1, text='Please make a choice.'):
""" Prompt the user to select an option @param opts: List of tuples containing options in (key, value) format - value is optional @type opts: list of tuple @param text: Prompt text @type text: str """ |
opts_len = len(opts)
opts_enum = enumerate(opts, 1)
opts = list(opts)
for key, opt in opts_enum:
click.echo('[{k}] {o}'.format(k=key, o=opt[1] if isinstance(opt, tuple) else opt))
click.echo('-' * 12)
opt = click.prompt(text, default, type=click.IntRange(1, opts_len))
opt = opts[opt - 1]
return opt[0] if isinstance(opt, tuple) else opt |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def styled_status(enabled, bold=True):
""" Generate a styled status string @param enabled: Enabled / Disabled boolean @type enabled: bool @param bold: Display status in bold format @type bold: bool @rtype: str """ |
return click.style('Enabled' if enabled else 'Disabled', 'green' if enabled else 'red', bold=bold) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def domain_parse(url):
""" urlparse wrapper for user input @type url: str @rtype: urlparse.ParseResult """ |
url = url.lower()
if not url.startswith('http://') and not url.startswith('https://'):
url = '{schema}{host}'.format(schema='http://', host=url)
url = urlparse(url)
if not url.hostname:
raise ValueError('Invalid domain provided')
# Strip www prefix any additional URL data
url = urlparse('{scheme}://{host}'.format(scheme=url.scheme, host=url.hostname.lstrip('www.')))
return url |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def http_session(cookies=None):
""" Generate a Requests session @param cookies: Cookies to load. None loads the app default CookieJar. False disables cookie loading. @type cookies: dict, cookielib.LWPCookieJar, None or False @rtype requests.Session """ |
session = requests.Session()
if cookies is not False:
session.cookies.update(cookies or cookiejar())
session.headers.update({'User-Agent': 'ipsv/{v}'.format(v=ips_vagrant.__version__)})
return session |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cookiejar(name='session'):
""" Ready the CookieJar, loading a saved session if available @rtype: cookielib.LWPCookieJar """ |
log = logging.getLogger('ipsv.common.cookiejar')
spath = os.path.join(config().get('Paths', 'Data'), '{n}.txt'.format(n=name))
cj = cookielib.LWPCookieJar(spath)
log.debug('Attempting to load session file: %s', spath)
if os.path.exists(spath):
try:
cj.load()
log.info('Successfully loaded a saved session / cookie file')
except cookielib.LoadError as e:
log.warn('Session / cookie file exists, but could not be loaded', exc_info=e)
return cj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sendUserInvitationRevoked(self, context={}):
""" Sent when user is invitation is revoked """ |
organization, invited, invitator = context['invite'].organization, context['invite'].invited, context['invite'].invitator
# invited user email
self.__init__(organization, async_mail=self.async_mail, override_receiver=invited.email, locale=invited.locale)
self.sendEmail('userInvitedRevoked-toUser', 'Your invitation to an organization has been revoked', context)
if organization.owner == invitator:
self.__init__(organization, async_mail=self.async_mail, override_receiver=organization.owner.email, locale=organization.owner.locale)
self.sendEmail('userInvitedRevoked-toOwnerInviter', 'You have revoked an user invitation', context)
else:
self.__init__(organization, async_mail=self.async_mail, override_receiver=organization.owner.email, locale=organization.owner.locale)
self.sendEmail('userInvitedRevoked-toOwner', 'An invitation to join your organization has been revoked', context)
self.__init__(organization, async_mail=self.async_mail, override_receiver=invitator.email, locale=invitator.locale)
self.sendEmail('userInvitedRevoked-toMemberInviter', 'You have revoked an user invitation', context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def sendUserLeft(self, context={}):
""" Sent when user leaves organization """ |
self.__init__(context['organization'], async_mail=self.async_mail, override_receiver=context['user'].email, locale=context['user'].locale)
self.sendEmail('userLeft-toUser', 'You have left an organization', context)
self.__init__(context['organization'], async_mail=self.async_mail, override_receiver=context['organization'].owner.email, locale=context['organization'].owner.locale)
self.sendEmail('userLeft-toOwner', 'An user has left an organization you own', context) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dynamic_zoom_plot(x, y, N, RegionStartSize=1000):
""" plots 2 time traces, the top is the downsampled time trace the bottom is the full time trace. """ |
x_lowres = x[::N]
y_lowres = y[::N]
ax1 = _plt.subplot2grid((2, 1), (0, 0), colspan=1)
ax2 = _plt.subplot2grid((2, 1), (1, 0))
fig = ax1.get_figure()
_plt.subplots_adjust(bottom=0.25) # makes space at bottom for sliders
CenterTime0 = len(x)/2
TimeWidth0 = len(x)/RegionStartSize
l1, = ax1.plot(x_lowres, y_lowres, lw=2, color='red')
global r1
r1 = ax1.fill_between(x_lowres[int((CenterTime0 - TimeWidth0)/N) : int((CenterTime0 + TimeWidth0)/N)], min(y), max(y), facecolor='green', alpha=0.5)
l2, = ax2.plot(x, y, lw=2, color='red')
axcolor = 'lightgoldenrodyellow'
axCenterTime = _plt.axes([0.25, 0.1, 0.65, 0.03], facecolor=axcolor)
axTimeWidth = _plt.axes([0.25, 0.15, 0.65, 0.03], facecolor=axcolor)
SliderCentreTime = Slider(axCenterTime, 'Center Time', 0, len(x), valinit=CenterTime0)
SliderTimeWidth = Slider(axTimeWidth, 'Time Width', 0, len(x), valinit=TimeWidth0)
def update(val):
TimeWidth = SliderTimeWidth.val
CentreTime = SliderCentreTime.val
LeftIndex = int(CentreTime-TimeWidth)
if LeftIndex < 0:
LeftIndex = 0
RightIndex = int(CentreTime+TimeWidth)
if RightIndex > len(x)-1:
RightIndex = len(x)-1
global r1
r1.remove()
r1 = ax1.fill_between(x[LeftIndex:RightIndex], min(y), max(y), facecolor='green', alpha=0.5)
l2.set_xdata(x[LeftIndex:RightIndex])
l2.set_ydata(y[LeftIndex:RightIndex])
ax2.set_xlim([x[LeftIndex], x[RightIndex]])
fig.canvas.draw_idle()
SliderCentreTime.on_changed(update)
SliderTimeWidth.on_changed(update)
resetax = _plt.axes([0.8, 0.025, 0.1, 0.04])
button = Button(resetax, 'Reset', color=axcolor, hovercolor='0.975')
def reset(event):
SliderCentreTime.reset()
SliderTimeWidth.reset()
button.on_clicked(reset)
_plt.show() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def find_config(fname='.rdo.conf', start=None):
"""Go up until you find an rdo config. """ |
start = start or os.getcwd()
config_file = os.path.join(start, fname)
if os.path.isfile(config_file):
return config_file
parent, _ = os.path.split(start)
if parent == start:
raise Exception('Config file not found')
return find_config(fname, parent) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def departure(stop, destination):
"""Get departure information.""" |
from pyruter.api import Departures
async def get_departures():
"""Get departure information."""
async with aiohttp.ClientSession() as session:
data = Departures(LOOP, stop, destination, session)
await data.get_departures()
print(json.dumps(data.departures, indent=4, sort_keys=True))
LOOP.run_until_complete(get_departures()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def destinations(stop):
"""Get destination information.""" |
from pyruter.api import Departures
async def get_destinations():
"""Get departure information."""
async with aiohttp.ClientSession() as session:
data = Departures(LOOP, stop, session=session)
result = await data.get_final_destination()
print(json.dumps(result, indent=4, sort_keys=True,
ensure_ascii=False))
LOOP.run_until_complete(get_destinations()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def authenticated_redirect(view_func=None, path=None):
""" Decorator for an already authenticated user that we don't want to serve a view to. Instead we send them to the dashboard by default or a specified path. Usage: @authenticated_redirect @authenticated_redirect() @authenticated_redirect(path='home') """ |
default_path = getattr(settings, 'DEFAULT_AUTHENTICATED_PATH', 'dashboard')
if view_func is None:
return functools.partial(authenticated_redirect, path=path)
@functools.wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
if path == request.path.replace('/', ''):
return redirect(default_path)
if request.user.is_authenticated():
return redirect(path or default_path)
return view_func(request, *args, **kwargs)
return _wrapped_view |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def process(self, job_id):
""" Process a job by the queue """ |
self._logger.info(
'{:.2f}: Process job {}'.format(self._env.now, job_id)
)
# log time of commencement of service
self._observer.notify_service(time=self._env.now, job_id=job_id)
# draw a new service time
try:
service_time = next(self._service_time_generator)
except StopIteration:
# ERROR: no more service times
error_msg = ('Service time generator exhausted')
self._logger.error(error_msg)
# raise a different exception, as simpy uses StopIteration to
# signify end of process (generator)
raise GGCQServiceTimeStopIteration(error_msg)
# wait for the service time to pass
try:
self._logger.debug('Service time: {:.2f}'.format(service_time))
except:
pass
try:
yield self._env.timeout(service_time)
except TypeError:
# error: service time of wrong type
error_msg = (
"service time '{}' has wrong type '{}'".format(
service_time, type(service_time).__name__
)
)
self._logger.error(error_msg)
# trigger exception
raise GGCQServiceTimeTypeError(error_msg)
except ValueError as exc:
if str(exc).startswith('Negative delay'):
# error: negative service time
error_msg = (
"negative service time {:.2f}".format(
service_time
)
)
self._logger.error(error_msg)
# trigger exception
raise GGCQNegativeServiceTimeError(error_msg)
else:
raise
# job finished processing -> departing
self._logger.info(
'{:.2f}: Finished processing job {}'.format(self._env.now, job_id)
)
# log departure epoch
self._observer.notify_departure(time=self._env.now, job_id=job_id) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate(self):
""" Source generates jobs according to the interarrival time distribution """ |
inter_arrival_time = 0.0
while True:
# wait for next job to arrive
try:
yield self._env.timeout(inter_arrival_time)
except TypeError:
# error: arrival time of wrong type
error_msg = (
"arrival time '{}' has wrong type '{}'".format(
inter_arrival_time, type(inter_arrival_time).__name__
)
)
self._logger.error(error_msg)
# trigger exception
raise GGCQArrivalTimeTypeError(error_msg)
except ValueError as exc:
if str(exc).startswith('Negative delay'):
# error: negative arrival time
error_msg = (
"negative arrival time {:.2f}".format(
inter_arrival_time
)
)
self._logger.error(error_msg)
# trigger exception
raise GGCQNegativeArrivalTimeError(error_msg)
else:
raise
# job has arrived
job_id = self._job_id
self._observer.notify_arrival(time=self._env.now, job_id=job_id)
# get job process
job = self._job_generator(job_id)
# submit job to queue
self._env.process(job)
# time for the next job to arrive
try:
inter_arrival_time = next(self._arrival_time_generator)
self._job_id += 1
except StopIteration:
# no more jobs to arrive -- exit process
self._env.exit() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse(cls, rule_string):
""" returns a list of rules a single line may yield multiple rules """ |
result = parser.parseString(rule_string)
rules = []
# breakout port ranges into multple rules
kwargs = {}
kwargs['address'] = result.ip_and_mask or None
kwargs['group'] = result.security_group or None
kwargs['group_name'] = result.group_name or None
for x,y in result.ports:
r = Rule(result.protocol, x, y, **kwargs)
rules.append(r)
return rules |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def bind_to(self, argspec, dispatcher):
""" Add our function to dispatcher """ |
self.bound_to[argspec.key].add((argspec, dispatcher))
dispatcher.bind(self.f, argspec) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unbind(self):
""" Unbind from dispatchers and target function. :return: set of tuples containing [argspec, dispatcher] """ |
args_dispatchers = set()
f = self._wf()
if f is not None:
for ad_list in self.bound_to.values():
args_dispatchers.update(ad_list)
for argspec, dispatcher in ad_list:
dispatcher.unbind(self.f, argspec)
del f.__dict__['__mnd__']
self.bound_to = {}
return args_dispatchers |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def insert_taxon_in_new_fasta_file(self, aln):
"""primer4clades infers the codon usage table from the taxon names in the sequences. These names need to be enclosed by square brackets and be present in the description of the FASTA sequence. The position is not important. I will insert the names in the description in a new FASTA file. Returns: Filename of modified FASTA file that includes the name of the taxon. """ |
new_seq_records = []
for seq_record in SeqIO.parse(aln, 'fasta'):
new_seq_record_id = "[{0}] {1}".format(self.taxon_for_codon_usage, seq_record.id)
new_seq_record = SeqRecord(seq_record.seq, id=new_seq_record_id)
new_seq_records.append(new_seq_record)
base_filename = os.path.splitext(aln)
new_filename = '{0}_modified{1}'.format(base_filename[0], base_filename[1])
SeqIO.write(new_seq_records, new_filename, "fasta")
return new_filename |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def group_primers(self, my_list):
"""Group elements in list by certain number 'n'""" |
new_list = []
n = 2
for i in range(0, len(my_list), n):
grouped_primers = my_list[i:i + n]
forward_primer = grouped_primers[0].split(" ")
reverse_primer = grouped_primers[1].split(" ")
formatted_primers = ">F_{0}\n{1}".format(forward_primer[1], forward_primer[0])
formatted_primers += "\n>R_{0}\n{1}".format(reverse_primer[1], reverse_primer[0])
new_list.append(formatted_primers)
return new_list |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def choose_best_amplicon(self, amplicon_tuples):
"""Iterates over amplicon tuples and returns the one with highest quality and amplicon length. """ |
quality = 0
amplicon_length = 0
best_amplicon = None
for amplicon in amplicon_tuples:
if int(amplicon[4]) >= quality and int(amplicon[5]) >= amplicon_length:
quality = int(amplicon[4])
amplicon_length = int(amplicon[5])
best_amplicon = amplicon
return best_amplicon |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run():
""" Run a command in the context of the system dependencies. """ |
parser = argparse.ArgumentParser()
parser.add_argument(
'--deps-def',
default=data_lines_from_file("system deps.txt")
+ data_lines_from_file("build deps.txt"),
help="A file specifying the dependencies (one per line)",
type=data_lines_from_file, dest="spec_deps")
parser.add_argument(
'--dep', action="append", default=[],
help="A specific dependency (multiple allowed)", dest="deps")
parser.add_argument(
'command', type=shlex.split,
default=shlex.split("python2.7 setup.py test"),
help="Command to invoke in the context of the dependencies")
parser.add_argument(
'--do-not-remove', default=False, action="store_true",
help="Keep any installed packages")
parser.add_argument(
'--aggressively-remove', default=False,
action="store_true",
help="When removing packages, also remove those automatically installed"
" as dependencies")
parser.add_argument(
'-l', '--log-level', default=logging.INFO,
type=log_level, help="Set log level (DEBUG, INFO, WARNING, ERROR)")
args = parser.parse_args()
logging.basicConfig(level=args.log_level)
context = dependency_context(
args.spec_deps + args.deps,
aggressively_remove=args.aggressively_remove)
with context as to_remove:
if args.do_not_remove:
del to_remove[:]
raise SystemExit(subprocess.Popen(args.command).wait()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dependency_context(package_names, aggressively_remove=False):
""" Install the supplied packages and yield. Finally, remove all packages that were installed. Currently assumes 'aptitude' is available. """ |
installed_packages = []
log = logging.getLogger(__name__)
try:
if not package_names:
logging.debug('No packages requested')
if package_names:
lock = yg.lockfile.FileLock(
'/tmp/.pkg-context-lock',
timeout=30 * 60)
log.info('Acquiring lock to perform install')
lock.acquire()
log.info('Installing ' + ', '.join(package_names))
output = subprocess.check_output(
['sudo', 'aptitude', 'install', '-y'] + package_names,
stderr=subprocess.STDOUT,
)
log.debug('Aptitude output:\n%s', output)
installed_packages = jaraco.apt.parse_new_packages(
output,
include_automatic=aggressively_remove)
if not installed_packages:
lock.release()
log.info('Installed ' + ', '.join(installed_packages))
yield installed_packages
except subprocess.CalledProcessError:
log.error("Error occurred installing packages")
raise
finally:
if installed_packages:
log.info('Removing ' + ','.join(installed_packages))
subprocess.check_call(
['sudo', 'aptitude', 'remove', '-y'] + installed_packages,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
)
lock.release() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
""" Get a tarball, extract it, change to that directory, yield, then clean up. `runner` is the function to invoke commands. `pushd` is a context manager for changing the directory. """ |
if target_dir is None:
target_dir = os.path.basename(url).replace('.tar.gz', '').replace(
'.tgz', '')
if runner is None:
runner = functools.partial(subprocess.check_call, shell=True)
# In the tar command, use --strip-components=1 to strip the first path and
# then
# use -C to cause the files to be extracted to {target_dir}. This ensures
# that we always know where the files were extracted.
runner('mkdir {target_dir}'.format(**vars()))
try:
getter = 'wget {url} -O -'
extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
cmd = ' | '.join((getter, extract))
runner(cmd.format(compression=infer_compression(url), **vars()))
with pushd(target_dir):
yield target_dir
finally:
runner('rm -Rf {target_dir}'.format(**vars())) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def infer_compression(url):
""" Given a URL or filename, infer the compression code for tar. """ |
# cheat and just assume it's the last two characters
compression_indicator = url[-2:]
mapping = dict(
gz='z',
bz='j',
xz='J',
)
# Assume 'z' (gzip) if no match
return mapping.get(compression_indicator, 'z') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def temp_dir(remover=shutil.rmtree):
""" Create a temporary directory context. Pass a custom remover to override the removal behavior. """ |
temp_dir = tempfile.mkdtemp()
try:
yield temp_dir
finally:
remover(temp_dir) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
""" Check out the repo indicated by url. If dest_ctx is supplied, it should be a context manager to yield the target directory for the check out. """ |
exe = 'git' if 'git' in url else 'hg'
with dest_ctx() as repo_dir:
cmd = [exe, 'clone', url, repo_dir]
if branch:
cmd.extend(['--branch', branch])
devnull = open(os.path.devnull, 'w')
stdout = devnull if quiet else None
subprocess.check_call(cmd, stdout=stdout)
yield repo_dir |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def device_from_request(request):
""" Determine's the device name from the request by first looking for an overridding cookie, and if not found then matching the user agent. Used at both the template level for choosing the template to load and also at the cache level as a cache key prefix. """ |
from yacms.conf import settings
try:
# If a device was set via cookie, match available devices.
for (device, _) in settings.DEVICE_USER_AGENTS:
if device == request.COOKIES["yacms-device"]:
return device
except KeyError:
# If a device wasn't set via cookie, match user agent.
try:
user_agent = request.META["HTTP_USER_AGENT"].lower()
except KeyError:
pass
else:
try:
user_agent = user_agent.decode("utf-8")
for (device, ua_strings) in settings.DEVICE_USER_AGENTS:
for ua_string in ua_strings:
if ua_string.lower() in user_agent:
return device
except (AttributeError, UnicodeDecodeError, UnicodeEncodeError):
pass
return "" |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def body(self):
""" String from `wsgi.input`. """ |
if self._body is None:
if self._fieldstorage is not None:
raise ReadBodyTwiceError()
clength = int(self.environ('CONTENT_LENGTH') or 0)
self._body = self._environ['wsgi.input'].read(clength)
if isinstance(self._body, bytes):
self._body = self._body.decode('utf8')
return self._body |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fieldstorage(self):
""" `cgi.FieldStorage` from `wsgi.input`. """ |
if self._fieldstorage is None:
if self._body is not None:
raise ReadBodyTwiceError()
self._fieldstorage = cgi.FieldStorage(
environ=self._environ,
fp=self._environ['wsgi.input']
)
return self._fieldstorage |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def params(self):
""" Parsed query string. """ |
if self._params is None:
self._params = self.arg_container()
data = compat.parse_qs(self.environ('QUERY_STRING') or '')
for k, v in data.items():
self._params[k] = v[0]
return self._params |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cookie(self):
""" Cookie values. """ |
if self._cookie is None:
self._cookie = self.arg_container()
data = compat.parse_qs(self.http_header('cookie') or '')
for k, v in data.items():
self._cookie[k.strip()] = v[0]
return self._cookie |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def data(self):
""" Values in request body. """ |
if self._data is None:
self._data = self.arg_container()
if isinstance(self.fieldstorage.value, list):
for k in self.fieldstorage.keys():
fname = self.fieldstorage[k].filename
if fname:
self._data[k] = (fname, self.fieldstorage[k].file)
else:
self._data[k] = self.fieldstorage.getfirst(k)
return self._data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def drain(self, p):
'''Reads the named pipe.'''
self.logging.info('Started.')
fd = os.open(p, os.O_RDWR | os.O_NONBLOCK)
gevent_os.make_nonblocking(fd)
while self.loop():
try:
lines = gevent_os.nb_read(fd, 4096).splitlines()
if len(lines) == 0:
sleep(0.5)
else:
self.consume(lines)
except OSError:
pass |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add(self, handler, name=None, exception_handlers=()):
"""Add a handler to the route. :param handler: The "handler" callable to add. :param name: Optional. When specified, the return value of this handler will be added to the context under ``name``. :param exception_handlers: Optional. A list of ``(types, handler)`` tuples, where ``types`` is an exception type (or tuple of types) to handle, and ``handler`` is a callable. See below for example. **Exception Handlers** When an exception occurs in a handler, ``exc_info`` will be temporarily added to the context and the list of exception handlers will be checked for an appropriate handler. If no handler can be found, the exception will be re-raised to the caller of the route. If an appropriate exception handler is found, it will be called (the context will be injected, so handlers may take an ``exc_info`` argument), and its return value will be used in place of the original handler's return value. Examples: 'bar' 'SUCCESS!' Traceback (most recent call last):
KeyError: 'foo' """ |
self.route.append((name, handler, exception_handlers)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add(self, match, handler):
"""Register a handler with the Router. :param match: The first argument passed to the :meth:`match` method when checking against this handler. :param handler: A callable or :class:`Route` instance that will handle matching calls. If not a Route instance, will be wrapped in one. """ |
self.routes.append((match, (
Route(handler) if not isinstance(handler, Route)
else handler
))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_annotated_data_x_y(timestamps, data, lbls):
""" DOESN'T work with OVERLAPPING labels :param timestamps: :param data: :param lbls: :return: """ |
timestamps = np.array(timestamps)
timestamp_step = timestamps[3]-timestamps[2]
current_new_timestamp = 0.0
new_timestamps = []
X = None
Y = []
classes = []
for i in range(0, len(timestamps)):
for lbl in lbls:
if lbl.start_seconds <= timestamps[i] < lbl.end_seconds:
if X is None:
X = data[i, :]
else:
X = np.vstack((X, data[i, :]))
Y.append(lbl.label)
new_timestamps.append(current_new_timestamp)
current_new_timestamp += timestamp_step
if lbl.label not in classes:
classes.append(lbl.label)
return X, Y, classes, new_timestamps |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def scope_logger(cls):
""" Class decorator for adding a class local logger Example: """ |
cls.log = logging.getLogger('{0}.{1}'.format(cls.__module__, cls.__name__))
return cls |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(self):
"""Run the thread, logging everything. """ |
self._finished.clear()
for line in iter(self.pipeReader.readline, ''):
logging.log(self.level, line.strip('\n'))
self.pipeReader.close()
self._finished.set() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load(self, source, filepath=None):
""" Load source as manifest attributes Arguments: source (string or file-object):
CSS source to parse and serialize to find metas and rules. It can be either a string or a file-like object (aka with a ``read()`` method which return string). Keyword Arguments: filepath (string):
Optional filepath to memorize if source comes from a file. Default is ``None`` as if source comes from a string. If ``source`` argument is a file-like object, you should not need to bother of this argument since filepath will be filled from source ``name`` attribute. Returns: dict: Dictionnary of serialized rules. """ |
# Set _path if source is a file-like object
try:
self._path = source.name
except AttributeError:
self._path = filepath
# Get source content either it's a string or a file-like object
try:
source_content = source.read()
except AttributeError:
source_content = source
# Parse and serialize given source
parser = TinycssSourceParser()
self._datas = parser.parse(source_content)
serializer = ManifestSerializer()
references = serializer.serialize(self._datas)
# Copy serialized metas
self.metas = serializer._metas
# Set every enabled rule as object attribute
for k, v in references.items():
self.set_rule(k, v)
return self._datas |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_rule(self, name, properties):
""" Set a rules as object attribute. Arguments: name (string):
Rule name to set as attribute name. properties (dict):
Dictionnary of properties. """ |
self._rule_attrs.append(name)
setattr(self, name, properties) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove_rule(self, name):
""" Remove a rule from attributes. Arguments: name (string):
Rule name to remove. """ |
self._rule_attrs.remove(name)
delattr(self, name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def to_json(self, indent=4):
""" Serialize metas and reference attributes to a JSON string. Keyword Arguments: indent (int):
Space indentation, default to ``4``. Returns: string: JSON datas. """ |
agregate = {
'metas': self.metas,
}
agregate.update({k: getattr(self, k) for k in self._rule_attrs})
return json.dumps(agregate, indent=indent) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def send(self, to, cc, subject, body, atts=None, delete=False):
"""Send an email action. :param to: receivers list :param cc: copy user list :param subject: email title :param body: email content body :param atts: email attachments :param delete: whether delete att :return: True or False """ |
email_cnt = MIMEMultipart()
email_cnt['From'] = Header(self.smtp_user, CHARSET_ENCODING)
email_cnt['To'] = Header(';'.join(to), CHARSET_ENCODING)
email_cnt['Cc'] = Header(';'.join(cc), CHARSET_ENCODING)
email_cnt['Subject'] = Header(subject, CHARSET_ENCODING)
email_cnt['Date'] = formatdate()
email_cnt.attach(MIMEText(body, 'html', CHARSET_ENCODING))
self.__add_att__(email_cnt, atts, delete)
try:
self.__login__()
self.smtp_conn.sendmail(self.smtp_user, to+cc, email_cnt.as_string())
with_att_msg = 'Empty'
if atts:
for i, att in enumerate(atts):
atts[i] = att[att.startswith('/')+1:]
with_att_msg = ','.join(atts)
'''Flush memory
'''
atts[:] = []
logger.info('Send email[%s] success.', subject)
logger.info('To users: %s.', ','.join(to+cc))
logger.info('With attachments: %s.', with_att_msg)
except Exception as e:
raise SendEmailException("Send email[%s] failed!!! Case: %s" % (subject, str(e))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def restrict(self, addr):
""" Drop an address from the set of addresses this proxy is permitted to introduce. :param addr: The address to remove. """ |
# Remove the address from the set
ip_addr = _parse_ip(addr)
if ip_addr is None:
LOG.warn("Cannot restrict address %r from proxy %s: "
"invalid address" % (addr, self.address))
else:
self.excluded.add(addr) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def accept(self, addr):
""" Add an address to the set of addresses this proxy is permitted to introduce. :param addr: The address to add. """ |
# Add the address to the set
ip_addr = _parse_ip(addr)
if ip_addr is None:
LOG.warn("Cannot add address %r to proxy %s: "
"invalid address" % (addr, self.address))
else:
self.accepted.add(addr) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate(self, proxy_ip, client_ip):
""" Looks up the proxy identified by its IP, then verifies that the given client IP may be introduced by that proxy. :param proxy_ip: The IP address of the proxy. :param client_ip: The IP address of the supposed client. :returns: True if the proxy is permitted to introduce the client; False if the proxy doesn't exist or isn't permitted to introduce the client. """ |
# First, look up the proxy
if self.pseudo_proxy:
proxy = self.pseudo_proxy
elif proxy_ip not in self.proxies:
return False
else:
proxy = self.proxies[proxy_ip]
# Now, verify that the client is valid
return client_ip in proxy |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def requires_auth(func):
"""Handle authentication checks. .. py:decorator:: requires_auth Checks if the token has expired and performs authentication if needed. """ |
@six.wraps(func)
def wrapper(self, *args, **kwargs):
if self.token_expired:
self.authenticate()
return func(self, *args, **kwargs)
return wrapper |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def headers(self):
"""Provide access to updated headers.""" |
self._headers.update(**{'Accept-Language': self.language})
if self.__token:
self._headers.update(
**{'Authorization': 'Bearer %s' % self.__token})
return self._headers |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def token_expired(self):
"""Provide access to flag indicating if token has expired.""" |
if self._token_timer is None:
return True
return timeutil.is_newer_than(self._token_timer, timeutil.ONE_HOUR) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def session(self):
"""Provide access to request session with local cache enabled.""" |
if self._session is None:
self._session = cachecontrol.CacheControl(
requests.Session(),
cache=caches.FileCache('.tvdb_cache'))
return self._session |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _exec_request(self, service, method=None, path_args=None, data=None, params=None):
"""Execute request.""" |
if path_args is None:
path_args = []
req = {
'method': method or 'get',
'url': '/'.join(str(a).strip('/') for a in [
cfg.CONF.tvdb.service_url, service] + path_args),
'data': json.dumps(data) if data else None,
'headers': self.headers,
'params': params,
'verify': cfg.CONF.tvdb.verify_ssl_certs,
}
LOG.debug('executing request (%s %s)', req['method'], req['url'])
resp = self.session.request(**req)
resp.raise_for_status()
return resp.json() if resp.text else resp.text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def authenticate(self):
"""Aquire authorization token for using thetvdb apis.""" |
if self.__token:
try:
resp = self._refresh_token()
except exceptions.TVDBRequestException as err:
# if a 401 is the cause try to login
if getattr(err.response, 'status_code', 0) == 401:
resp = self._login()
else:
raise
else:
resp = self._login()
self.__token = resp.get('token')
self._token_timer = timeutil.utcnow() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def search_series(self, **kwargs):
"""Provide the ability to search for a series. .. warning:: authorization token required The following search arguments currently supported: * name * imdbId * zap2itId :param kwargs: keyword arguments to search for series :returns: series record or series records :rtype: dict """ |
params = {}
for arg, val in six.iteritems(kwargs):
if arg in SERIES_BY:
params[arg] = val
resp = self._exec_request(
'search', path_args=['series'], params=params)
if cfg.CONF.tvdb.select_first:
return resp['data'][0]
return resp['data'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_episodes(self, series_id, **kwargs):
"""All episodes for a given series. Paginated with 100 results per page. .. warning:: authorization token required The following search arguments currently supported: * airedSeason * airedEpisode * imdbId * dvdSeason * dvdEpisode * absoluteNumber * page :param str series_id: id of series as found on thetvdb :parm kwargs: keyword args to search/filter episodes by (optional) :returns: series episode records :rtype: list """ |
params = {'page': 1}
for arg, val in six.iteritems(kwargs):
if arg in EPISODES_BY:
params[arg] = val
return self._exec_request(
'series',
path_args=[series_id, 'episodes', 'query'], params=params)['data'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def intersectingPoint(self, p):
""" given a point, get intervals in the tree that are intersected. :param p: intersection point :return: the list of intersected intervals """ |
# perfect match
if p == self.data.mid:
return self.data.ends
if p > self.data.mid:
# we know all intervals in self.data begin before p (if they began after
# p, they would have not included mid) we just need to find those that
# end after p
endAfterP = [r for r in self.data.ends
if (r.end >= p and not self.openEnded) or
(r.end > p and self.openEnded)]
if self.right is not None:
endAfterP.extend(self.right.intersectingPoint(p))
return endAfterP
if p < self.data.mid:
# we know all intervals in self.data end after p (if they ended before p,
# they would have not included mid) we just need to find those that start
# before p
startBeforeP = [r for r in self.data.starts if r.start <= p]
if self.left is not None:
startBeforeP.extend(self.left.intersectingPoint(p))
return startBeforeP |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def intersectingInterval(self, start, end):
""" given an interval, get intervals in the tree that are intersected. :param start: start of the intersecting interval :param end: end of the intersecting interval :return: the list of intersected intervals """ |
# find all intervals in this node that intersect start and end
l = []
for x in self.data.starts:
xStartsAfterInterval = (x.start > end and not self.openEnded) or \
(x.start >= end and self.openEnded)
xEndsBeforeInterval = (x.end < start and not self.openEnded) or \
(x.end <= start and self.openEnded)
if ((not xStartsAfterInterval) and (not xEndsBeforeInterval)):
l.append(x)
# process left subtree (if we have one) if the requested interval begins
# before mid
if self.left is not None and start <= self.data.mid:
l += self.left.intersectingInterval(start, end)
# process right subtree (if we have one) if the requested interval ends
# after mid
if self.right is not None and end >= self.data.mid:
l += self.right.intersectingInterval(start, end)
return l |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def intersectingIntervalIterator(self, start, end):
""" Get an iterator which will iterate over those objects in the tree which intersect the given interval - sorted in order of start index :param start: find intervals in the tree that intersect an interval with with this start index (inclusive) :param end: find intervals in the tree that intersect an interval with with this end index (exclusive) :return: an iterator that will yield intersected intervals """ |
items = self.intersectingInterval(start, end)
items.sort(key=lambda x: x.start)
for item in items:
yield item |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def initiate_tasks(self):
""" Loads all tasks using `TaskLoader` from respective configuration option """ |
self.tasks_classes = TaskLoader().load_tasks(
paths=self.configuration[Configuration.ALGORITHM][Configuration.TASKS][Configuration.PATHS]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def instantiate_tasks(self):
""" All loaded tasks are initialized. Depending on configuration fails in such instantiations may be silent """ |
self.tasks_instances = {}
for task_name, task_class in self.tasks_classes.items():
try:
self.tasks_instances[task_name] = task_class()
except Exception as ex:
if not self.configuration[Configuration.ALGORITHM][Configuration.IOSF]:
raise GOSTaskException("An exception happened during the task instantiation."
"{exception}".format(exception=ex)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _have(self, name=None):
"""Check if a configure flag is set. If called without argument, it returns all HAVE_* items. Example: """ |
if name is None:
return (
(k, v) for k, v in self.env.items()
if k.startswith('HAVE_')
)
return self.env.get('HAVE_' + self.env_key(name)) == True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _lib(self, name, only_if_have=False):
"""Specify a linker library. Example: LDFLAGS={{ lib("rt") }} {{ lib("pthread", True) }} Will unconditionally add `-lrt` and check the environment if the key `HAVE_LIBPTHREAD` is set to be true, then add `-lpthread`. """ |
emit = True
if only_if_have:
emit = self.env.get('HAVE_LIB' + self.env_key(name))
if emit:
return '-l' + name
return '' |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _with(self, option=None):
"""Check if a build option is enabled. If called without argument, it returns all WITH_* items. Example: """ |
if option is None:
return (
(k, v) for k, v in self.env.items()
if k.startswith('WITH_')
)
return self.env.get('WITH_' + option.upper()) == True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_codes(self, codes, reject=False):
""" Set the accepted or rejected codes codes list. :param codes: A list of the response codes. :param reject: If True, the listed codes will be rejected, and the conversion will format as "-"; if False, only the listed codes will be accepted, and the conversion will format as "-" for all the others. """ |
self.codes = set(codes)
self.reject = reject |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def accept(self, code):
""" Determine whether to accept the given code. :param code: The response code. :returns: True if the code should be accepted, False otherwise. """ |
if code in self.codes:
return not self.reject
return self.reject |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _needescape(c):
""" Return True if character needs escaping, else False. """ |
return not ascii.isprint(c) or c == '"' or c == '\\' or ascii.isctrl(c) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def escape(cls, string):
""" Utility method to produce an escaped version of a given string. :param string: The string to escape. :returns: The escaped version of the string. """ |
return ''.join([cls._escapes[c] if cls._needescape(c) else c
for c in string.encode('utf8')]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def submit(self):
""" Posts the form's data and returns the resulting Page Returns Page - The resulting page """ |
u = urlparse(self.url)
if not self.action:
self.action = self.url
elif self.action == u.path:
self.action = self.url
else:
if not u.netloc in self.action:
path = "/".join(u.path.split("/")[1:-1])
if self.action.startswith("/"):
path = path + self.action
else:
path = path + "/" + self.action
self.action = "http://" + u.netloc + "/" + path
return self.usr.getPage(self.action, self.items, {'Referer': self.url}, self.usePin) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compute_hash(attributes, ignored_attributes=None):
""" Computes a hash code for the given dictionary that is safe for persistence round trips """ |
ignored_attributes = list(ignored_attributes) if ignored_attributes else []
tuple_attributes = _convert(attributes.copy(), ignored_attributes)
hasher = hashlib.sha256(str(tuple_attributes).encode('utf-8', errors='ignore'))
return hasher.hexdigest() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def SimpleRowColumn(field, *args, **kwargs):
""" Shortcut for simple row with only a full column """ |
if isinstance(field, basestring):
field = Field(field, *args, **kwargs)
return Row(
Column(field),
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save(self, commit=True):
"""Save and send""" |
contact = super(ContactFormBase, self).save()
context = {'contact': contact}
context.update(get_site_metas())
subject = ''.join(render_to_string(self.mail_subject_template, context).splitlines())
content = render_to_string(self.mail_content_template, context)
send_mail(subject, content,
settings.DEFAULT_FROM_EMAIL,
settings.CONTACT_FORM_TO,
fail_silently=not settings.DEBUG)
return contact |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def bark_filter(global_conf, **local_conf):
""" Factory function for Bark. Returns a function which, when passed the application, returns an instance of BarkMiddleware. :param global_conf: The global configuration, from the [DEFAULT] section of the PasteDeploy configuration file. :param local_conf: The local configuration, from the filter section of the PasteDeploy configuration file. """ |
# First, parse the configuration
conf_file = None
sections = {}
for key, value in local_conf.items():
# 'config' key causes a load of a configuration file; settings
# in the local_conf will override settings in the
# configuration file, however
if key == 'config':
conf_file = value
elif '.' in key:
sect, _sep, opt = key.partition('.')
sect_dict = sections.setdefault(sect, {})
sect_dict[opt] = value # note: a string
# Now that we've loaded local_conf, process conf_file (if any)
if conf_file:
cp = ConfigParser.SafeConfigParser()
cp.read([conf_file])
for sect in cp.sections():
for opt, value in cp.options(sect):
sect_dict = sections.setdefault(sect, {})
# By using setdefault(), we allow local_conf to
# override the configuration file
sect_dict.setdefault(opt, value)
# OK, the configuration is all read; next step is to turn the
# configuration into logging handlers
handlers = {}
proxies = None
for sect, sect_dict in sections.items():
if sect == 'proxies':
# Reserved for proxy configuration
try:
proxies = bark.proxy.ProxyConfig(sect_dict)
except KeyError as exc:
LOG.warn("Cannot configure proxy handling: option %s is "
"missing from the proxy configuration" % exc)
continue # Pragma: nocover
# First, determine the logging format
try:
format = bark.format.Format.parse(sect_dict.pop('format'))
except KeyError:
LOG.warn("No format specified for log %r; skipping." % sect)
continue
# Next, determine the handler type
handle_type = sect_dict.pop('type', 'file')
# Now, let's construct a handler; this will be a callable
# taking the formatted message to log
try:
handler = bark.handlers.get_handler(handle_type, sect, sect_dict)
except Exception as exc:
LOG.warn("Cannot load handler of type %r for log %r: %s" %
(handle_type, sect, exc))
continue
# We now have a handler and a format; bundle them up
handlers[sect] = (format, handler)
# Construct the wrapper which is going to instantiate the
# middleware
def wrapper(app):
return BarkMiddleware(app, handlers, proxies)
return wrapper |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def blog_post_feed(request, format, **kwargs):
""" Blog posts feeds - maps format to the correct feed view. """ |
try:
return {"rss": PostsRSS, "atom": PostsAtom}[format](**kwargs)(request)
except KeyError:
raise Http404() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self):
""" Connects to RabbitMQ """ |
self.connection = Connection(self.broker_url)
e = Exchange('mease', type='fanout', durable=False, delivery_mode=1)
self.exchange = e(self.connection.default_channel)
self.exchange.declare() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def publish(self, message_type, client_id, client_storage, *args, **kwargs):
""" Publishes a message Uses `self.pack` instead of 'msgpack' serializer on kombu for backend consistency """ |
if self.connection.connected:
message = self.exchange.Message(
self.pack(message_type, client_id, client_storage, args, kwargs))
self.exchange.publish(message, routing_key='') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self):
""" Connects to RabbitMQ and starts listening """ |
logger.info("Connecting to RabbitMQ on {broker_url}...".format(
broker_url=self.broker_url))
super(RabbitMQSubscriber, self).connect()
q = Queue(exchange=self.exchange, exclusive=True, durable=False)
self.queue = q(self.connection.default_channel)
self.queue.declare()
self.thread = Thread(target=self.listen)
self.thread.setDaemon(True)
self.thread.start() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def listen(self):
""" Listens to messages """ |
with Consumer(self.connection, queues=self.queue, on_message=self.on_message,
auto_declare=False):
for _ in eventloop(self.connection, timeout=1, ignore_timeouts=True):
pass |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main_loop(self):
"""Runs the main game loop.""" |
while True:
for e in pygame.event.get():
self.handle_event(e)
self.step()
pygame.time.wait(5) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def quit(self, event):
"""Quit the game.""" |
self.logger.info("Quitting.")
self.on_exit()
sys.exit() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setCredentials(self, user, password):
"""! Set authentication credentials. @param user Username. @param password Password. """ |
self._checkUserAndPass(user, password)
self.user = user
self.password = password |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_setting_with_envfallback(setting, default=None, typecast=None):
""" Get the given setting and fall back to the default of not found in ``django.conf.settings`` or ``os.environ``. :param settings: The setting as a string. :param default: The fallback if ``setting`` is not found. :param typecast: A function that converts the given value from string to another type. E.g.: Use ``typecast=int`` to convert the value to int before returning. """ |
try:
from django.conf import settings
except ImportError:
return default
else:
fallback = getattr(settings, setting, default)
value = os.environ.get(setting, fallback)
if typecast:
value = typecast(value)
return value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_file_locations(self, file_locations=[]):
""" Adds a list of file locations to the current list Args: file_locations: list of file location tuples """ |
if not hasattr(self, '__file_locations__'):
self.__file_locations__ = copy.copy(file_locations)
else:
self.__file_locations__ += copy.copy(file_locations) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reset(self, **kwargs):
""" Reset the triplestore with all of the data """ |
self.drop_all(**kwargs)
file_locations = self.__file_locations__
self.__file_locations__ = []
self.load(file_locations, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def drop_all(self, **kwargs):
""" Drops all definitions""" |
conn = self.__get_conn__(**kwargs)
conn.update_query("DROP ALL")
self.loaded = []
self.loaded_times = {} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_file(self, filepath, **kwargs):
""" loads a file into the defintion triplestore args: filepath: the path to the file """ |
log.setLevel(kwargs.get("log_level", self.log_level))
filename = os.path.split(filepath)[-1]
if filename in self.loaded:
if self.loaded_times.get(filename,
datetime.datetime(2001,1,1)).timestamp() \
< os.path.getmtime(filepath):
self.drop_file(filename, **kwargs)
else:
return
conn = self.__get_conn__(**kwargs)
conn.load_data(graph=getattr(__NSM__.kdr, filename).clean_uri,
data=filepath,
# log_level=logging.DEBUG,
is_file=True)
self.__update_time__(filename, **kwargs)
log.warning("\n\tfile: '%s' loaded\n\tconn: '%s'\n\tpath: %s",
filename,
conn,
filepath)
self.loaded.append(filename) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_directory(self, directory, **kwargs):
""" loads all rdf files in a directory args: directory: full path to the directory """ |
log.setLevel(kwargs.get("log_level", self.log_level))
conn = self.__get_conn__(**kwargs)
file_extensions = kwargs.get('file_extensions', conn.rdf_formats)
file_list = list_files(directory,
file_extensions,
kwargs.get('include_subfolders', False),
include_root=True)
for file in file_list:
self.load_file(file[1], **kwargs)
log.setLevel(self.log_level) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read(fileobj, version=None):
"""Read tz data from a binary file. @param fileobj: @param version: @return: TZFileData """ |
magic = fileobj.read(5)
if magic[:4] != b"TZif":
raise ValueError("not a zoneinfo file")
if version is None:
version = int(magic[4:]) if magic[4] else 0
fileobj.seek(20)
# Read the counts:
# [0] - The number of UT/local indicators stored in the file.
# [1] - The number of standard/wall indicators stored in the file.
# [2] - The number of leap seconds for which data entries are stored
# in the file.
# [3] - The number of transition times for which data entries are
# stored in the file.
# [4] - The number of local time types for which data entries are
# stored in the file (must not be zero).
# [5] - The number of characters of time zone abbreviation strings
# stored in the file.
(ttisgmtcnt, ttisstdcnt, leapcnt,
timecnt, typecnt, charcnt) = _read_counts(fileobj)
if version >= 2:
# Skip to the counts in the second header.
data_size = (5 * timecnt +
6 * typecnt +
charcnt +
8 * leapcnt +
ttisstdcnt +
ttisgmtcnt)
fileobj.seek(data_size + 20, os.SEEK_CUR)
# Re-read the counts.
(ttisgmtcnt, ttisstdcnt, leapcnt,
timecnt, typecnt, charcnt) = _read_counts(fileobj)
ttfmt = 'q'
else:
ttfmt = 'i'
times = array(ttfmt)
times.fromfile(fileobj, timecnt)
if sys.byteorder != 'big':
times.byteswap()
type_indices = array('B')
type_indices.fromfile(fileobj, timecnt)
# Read local time types.
type_infos = []
for i in range(typecnt):
type_infos.append(struct.unpack(">iBB", fileobj.read(6)))
abbrs = fileobj.read(charcnt)
if version > 0:
# Skip to POSIX TZ string
fileobj.seek(12 * leapcnt + ttisstdcnt + ttisgmtcnt, os.SEEK_CUR)
posix_string = fileobj.read().strip().decode('ascii')
else:
posix_string = None
# Convert type_infos
for i, (gmtoff, isdst, abbrind) in enumerate(type_infos):
abbr = abbrs[abbrind:abbrs.find(0, abbrind)].decode()
type_infos[i] = (gmtoff, isdst, abbr)
return TZFileData(version, type_infos, times, type_indices, posix_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def convert(data):
""" convert a standalone unicode string or unicode strings in a mapping or iterable into byte strings. """ |
if isinstance(data, unicode):
return data.encode('utf-8')
elif isinstance(data, str):
return data
elif isinstance(data, collections.Mapping):
return dict(map(convert, data.iteritems()))
elif isinstance(data, collections.Iterable):
return type(data)(map(convert, data))
else:
return data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_fetch_func(base_url, async, **kwargs):
""" make a fetch function based on conditions of 1) async 2) ssl """ |
if async:
client = AsyncHTTPClient(force_instance=True, defaults=kwargs)
return partial(async_fetch, httpclient=client)
else:
client = HTTPClient(force_instance=True, defaults=kwargs)
return partial(sync_fetch, httpclient=client) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _getAuthHeaders(self):
""" Get authentication headers. If we have valid header data already, they immediately return it. If not, then get new authentication data. If we are currently in the process of getting the header data, put this request into a queue to be handled when the data are received. @returns: A deferred that will eventually be called back with the header data """ |
def _handleAuthBody(body):
self.msg("_handleAuthBody: %(body)s", body=body)
try:
body_parsed = json.loads(body)
access_token = body_parsed['access']['token']
tenant_id = access_token['tenant']['id'].encode('ascii')
auth_token = access_token['id'].encode('ascii')
self.auth_headers["X-Tenant-Id"] = tenant_id
self.auth_headers["X-Auth-Token"] = auth_token
self._state = self.AUTHENTICATED
self.msg("_handleAuthHeaders: found token %(token)s"
" tenant id %(tenant_id)s",
token=self.auth_headers["X-Auth-Token"],
tenant_id=self.auth_headers["X-Tenant-Id"])
# Callback all queued auth headers requests
while not self._headers_requests.empty():
self._headers_requests.get().callback(self.auth_headers)
except ValueError:
# We received a bad response
return fail(MalformedJSONError("Malformed keystone"
" response received."))
def _handleAuthResponse(response):
if response.code == httplib.OK:
self.msg("_handleAuthResponse: %(response)s accepted",
response=response)
body = Deferred()
response.deliverBody(StringIOReceiver(body))
body.addCallback(_handleAuthBody)
return body
else:
self.msg("_handleAuthResponse: %(response)s rejected",
response=response)
return fail(
KeystoneAuthenticationError("Keystone"
" authentication credentials"
" rejected"))
self.msg("_getAuthHeaders: state is %(state)s", state=self._state)
if self._state == self.AUTHENTICATED:
# We are authenticated, immediately succeed with the current
# auth headers
self.msg("_getAuthHeaders: succeed with %(headers)s",
headers=self.auth_headers)
return succeed(self.auth_headers)
elif (self._state == self.NOT_AUTHENTICATED or
self._state == self.AUTHENTICATING):
# We cannot satisfy the auth header request immediately,
# put it in a queue
self.msg("_getAuthHeaders: defer, place in queue")
auth_headers_deferred = Deferred()
self._headers_requests.put(auth_headers_deferred)
if self._state == self.NOT_AUTHENTICATED:
self.msg("_getAuthHeaders: not authenticated, start"
" authentication process")
# We are not authenticated, and not in the process of
# authenticating.
# Set our state to AUTHENTICATING and begin the
# authentication process
self._state = self.AUTHENTICATING
d = self.agent.request('POST',
self.auth_url,
Headers({
"Content-type": ["application/json"]
}),
self._getAuthRequestBodyProducer())
d.addCallback(_handleAuthResponse)
d.addErrback(auth_headers_deferred.errback)
return auth_headers_deferred
else:
# Bad state, fail
return fail(RuntimeError("Invalid state encountered.")) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main():
""" Project's main method which will parse the command line arguments, run a scan using the TagCubeClient and exit. """ |
cmd_args = TagCubeCLI.parse_args()
try:
tagcube_cli = TagCubeCLI.from_cmd_args(cmd_args)
except ValueError, ve:
# We get here when there are no credentials configured
print '%s' % ve
sys.exit(1)
try:
sys.exit(tagcube_cli.run())
except ValueError, ve:
# We get here when the configured credentials had some issue (invalid)
# or there was some error (such as invalid profile name) with the params
print '%s' % ve
sys.exit(2) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def register(filetypes):
""" Decorator to register a class as a checker for extensions. """ |
def decorator(clazz):
for ext in filetypes:
checkers.setdefault(ext, []).append(clazz)
return clazz
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check(self, paths):
""" Return list of error dicts for all found errors in paths. The default implementation expects `tool`, and `tool_err_re` to be defined. tool: external binary to use for checking. tool_err_re: regexp that can match output of `tool` -- must provide a groupdict with at least "filename", "lineno", "colno", and "msg" keys. See example checkers. """ |
if not paths:
return ()
cmd_pieces = [self.tool]
cmd_pieces.extend(self.tool_args)
return self._check_std(paths, cmd_pieces) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_version(cls):
""" Return the version number of the tool. """ |
cmd_pieces = [cls.tool, '--version']
process = Popen(cmd_pieces, stdout=PIPE, stderr=PIPE)
out, err = process.communicate()
if err:
return ''
else:
return out.splitlines()[0].strip() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _check_std(self, paths, cmd_pieces):
""" Run `cmd` as a check on `paths`. """ |
cmd_pieces.extend(paths)
process = Popen(cmd_pieces, stdout=PIPE, stderr=PIPE)
out, err = process.communicate()
lines = out.strip().splitlines() + err.strip().splitlines()
result = []
for line in lines:
match = self.tool_err_re.match(line)
if not match:
if self.break_on_tool_re_mismatch:
raise ValueError(
'Unexpected `%s` output: %r' % (
' '.join(cmd_pieces),
paths,
line))
continue
vals = match.groupdict()
# All tools should at least give us line numbers, but only
# some give column numbers.
vals['lineno'] = int(vals['lineno'])
vals['colno'] = \
int(vals['colno']) if vals['colno'] is not None else ''
result.append(vals)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def replace(dict,line):
""" Find and replace the special words according to the dictionary. Parameters ========== dict : Dictionary A dictionary derived from a yaml file. Source language as keys and the target language as values. line : String A string need to be processed. """ |
words = line.split()
new_line = ""
for word in words:
fst = word[0]
last = word[-1]
# Check if the word ends with a punctuation
if last == "," or last == ";" or last == ".":
clean_word = word[0:-1]
last = last + " "
elif last == "]":
clean_word = word[0:-1]
else:
clean_word = word
last = " "
# Check if the word starts with "["
if fst == "[":
clean_word = clean_word[1:]
else:
clean_word = clean_word
fst = ""
find = dict.get(clean_word)
if find == None:
new_line = new_line + fst + str(clean_word) + last
else:
new_line = new_line + fst + str(find) + last
return new_line |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def translate(src_filename, dest_filename, dest_lang, src_lang='auto', specialwords_filename=''):
""" Converts a source file to a destination file in the selected language. Parameters ========== src_filename : String Relative file path to the original MarkDown source file. dest_filename : String Relative file path to where the translated MarkDown file should go. dest_lang : String The language of the destination file. Must be the correct 2-letter ISO-639-1 abbreviation from https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes src_lang : String (OPTIONAL) The language of the source file. Only needed if the source file contains multiple languages. Like dest_lang, must be the correct ISO-639-1 abbreviation from https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes specialwords_filename : String (OPTIONAL) YAML file containing special translations of words. Must map a string of the translation direction (e.g. 'en_es') to a sequence of specially translated words. Examples ======== Suppose you have the following directory in English: data/ doc_en.md special.yaml special.yaml is organized as follows: en_es: - tank : bote #TODO: add more/better translation examples To translate it to Spanish: The 4th parameter can be omitted if the source file has only one language, and the 5th can be omitted if there are no special translations. """ |
translator = Translator() # Initialize translator object
with open(src_filename) as srcfile, open(dest_filename, 'w') as destfile:
lines = srcfile.readlines()
specialwords_dict = {}
# If special words file exists, place special word mappings into specialwords_dict
if specialwords_filename != '':
with yaml.load(open(specialwords_filename)) as specialwords_fulllist:
# Gets source language if not passed through
if src_lang == 'auto':
src_lang == str(translator.detect(lines[0]))[14:16]
# Attempts to add the correct dictionary of special words
try:
specialwords_dict = specialwords_dict_full[src_lang + '_' + dest_lang]
except KeyError:
print('Special words file doesn\'t contain required language translation!')
# Parses each line for special cases and ignores them when translating
for line in lines:
line = line.strip()
# Parses for code blocks and ignores them entirely
if line.startswith("```"):
line = line
else:
# Parses for URL's and file links and ignores them
if line.find("[") != -1 and line.find("]") != -1 and line.find("(") != -1 and line.find(")") != -1:
ignore_start = line.find("(")
ignore_end = line.find(")")
head = replace(specialwords_dict,line[0:ignore_start])
tail = replace(specialwords_dict,line[ignore_end+1:])
head = translator.translate(head, dest_lang, src_lang).text
tail = translator.translate(tail, dest_lang, src_lang).text
line = head + line[ignore_start:ignore_end+1] + tail
# Translates normally if there are no special cases
else:
line = translator.translate(line, dest_lang, src_lang).text
# Write to destination file
destfile.write(line + '\n') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def deprecated(f):
"""Decorate a function object as deprecated. Work nicely with the @command and @subshell decorators. Add a __deprecated__ field to the input object and set it to True. """ |
def inner_func(*args, **kwargs):
print(textwrap.dedent("""\
This command is deprecated and is subject to complete
removal at any later version without notice.
"""))
f(*args, **kwargs)
inner_func.__deprecated__ = True
inner_func.__doc__ = f.__doc__
inner_func.__name__ = f.__name__
if iscommand(f):
inner_func.__command__ = f.__command__
return inner_func |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def helper(*commands):
"""Decorate a function to be the helper function of commands. Arguments: commands: Names of command that should trigger this function object. Interface of helper methods: @helper('some-command') def help_foo(self, args):
''' Arguments: args: A list of arguments. Returns: A string that is the help message. ''' pass """ |
def decorated_func(f):
f.__help_targets__ = list(commands)
return f
return decorated_func |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def completer(*commands):
"""Decorate a function to be the completer function of commands. Arguments: commands: Names of command that should trigger this function object. Interface of completer methods: @completer('some-other_command') def complete_foo(self, args, text):
''' Arguments: args: A list of arguments. The first token, i.e, the command itself, is not included. text: The scope of text being replaced. A few examples, with '$' representing the shell prompt and '|' represents the cursor position: $ | $ history| handled by the __driver_completer() method $ history | args = [] text = '' $ history cle| args = [] text = 'cle' $ history clear | args = ['clear'] text = '' Returns: A list of candidates. If no candidate was found, return either [] or None. ''' pass """ |
def decorated_func(f):
f.__complete_targets__ = list(commands)
return f
return decorated_func |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def subshell(shell_cls, *commands, **kwargs):
"""Decorate a function to conditionally launch a _ShellBase subshell. Arguments: shell_cls: A subclass of _ShellBase to be launched. commands: Names of command that should trigger this function object. kwargs: The keyword arguments for the command decorator method. Interface of methods decorated by this decorator method: @command(SomeShellClass, 'foo', 'bar') def bar(self, cmd, args):
'''The command 'foo' invokes this method then launches the subshell. Arguments: cmd: A string, the name of the command that triggered this function. This is useful for knowing which command, in this case, 'foo' or 'bar', triggered this method. args: The list of arguments passed along with the command. Returns: There are three categories of valid return values. None, False, or anything that evaluates to False: The subshell is not invoked. This is useful for making a command conditionally launch a subshell. String: A string will appended to the prompt string to uniquely identify the subshell. A 2-tuple of type (string, dict):
The string will be appended to the prompt string. The dictionary stores the data passed to the subshell. These data are the context of the subshell. The parent shell must conform to the subhshell class in terms of which key-value pairs to pass to the subshell. ''' pass """ |
def decorated_func(f):
def inner_func(self, cmd, args):
retval = f(self, cmd, args)
# Do not launch the subshell if the return value is None.
if not retval:
return
# Pass the context (see the doc string) to the subshell if the
# return value is a 2-tuple. Otherwise, the context is just an empty
# dictionary.
if isinstance(retval, tuple):
prompt, context = retval
else:
prompt = retval
context = {}
return self.launch_subshell(shell_cls, cmd, args,
prompt = prompt, context = context)
inner_func.__name__ = f.__name__
inner_func.__doc__ = f.__doc__
obj = command(*commands, **kwargs)(inner_func) if commands else inner_func
obj.__launch_subshell__ = shell_cls
return obj
return decorated_func |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.