text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main(argv=None):
""" Execute the application CLI. Arguments are taken from sys.argv by default. """ |
args = _cmdline(argv)
config.load(args.config)
results = get_package_list(args.search_term)
results = sorted(results, key=lambda a: sort_function(a[1]), reverse=True)
results_normalized = list()
last_result = None
for result in results:
if result[0] == last_result:
continue
results_normalized.append(result)
last_result = result[0]
print('\n'.join(["%s - %s" % (_[0], _[1]) for _ in results_normalized]))
return 0 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_permissions(urlpatterns, permissions={}):
"""Generate names for permissions.""" |
for pattern in urlpatterns:
if isinstance(pattern, urlresolvers.RegexURLPattern):
perm = generate_perm_name(pattern.callback)
if is_allowed_view(perm) and perm not in permissions:
permissions[ACL_CODE_PREFIX + perm] = ACL_NAME_PREFIX + perm
elif isinstance(pattern, urlresolvers.RegexURLResolver):
generate_permissions(pattern.url_patterns, permissions)
return permissions |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_allowed_view(perm):
"""Check if permission is in acl list.""" |
# Check if permission is in excluded list
for view in ACL_EXCLUDED_VIEWS:
module, separator, view_name = view.partition('*')
if view and perm.startswith(module):
return False
# Check if permission is in acl list
for view in ACL_ALLOWED_VIEWS:
module, separator, view_name = view.partition('*')
if separator and not module and not view_name:
return True
elif separator and module and perm.startswith(module):
return True
elif separator and view_name and perm.endswith(view_name):
return True
elif not separator and view == perm:
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_location(query, format, api_key):
"""Get geographic data of a lab in a coherent way for all labs.""" |
# Play nice with the API...
sleep(1)
geolocator = OpenCage(api_key=api_key, timeout=10)
# Variables for storing the data
data = {"city": None,
"address_1": None,
"postal_code": None,
"country": None,
"county": None,
"state": None,
"country_code": None,
"latitude": None,
"longitude": None,
"continent": None}
road = ""
number = ""
# Default None values
location_data = {"city": None,
"road": None,
"house_number": None,
"postcode": None,
"country": None,
"county": None,
"state": None,
"ISO_3166-1_alpha-2": None,
"country_code": None,
"lat": None,
"lng": None}
# Reverse geocoding ... from coordinates to address
if format == "reverse":
# If the query (coordinates) is not empty
if query is None or len(query) < 3:
pass
else:
location = geolocator.reverse(query)
if location is not None:
location_data = location[0].raw[u'components']
location_data["lat"] = location[0].raw[u'geometry']["lat"]
location_data["lng"] = location[0].raw[u'geometry']["lng"]
# Direct geocoding ... from address to coordinates and full address
if format == "direct":
# If the query (address) is not empty
if query is None or len(query) < 3:
pass
else:
location = geolocator.geocode(query)
if location is not None:
location_data = location.raw[u'components']
location_data["lat"] = location.raw[u'geometry']["lat"]
location_data["lng"] = location.raw[u'geometry']["lng"]
# Extract the meaningful data
for component in location_data:
if component == "town" or component == "city":
data["city"] = location_data[component]
if component == "road":
road = location_data[component]
if component == "house_number":
number = location_data[component]
if component == "postcode":
data["postal_code"] = location_data[component]
if component == "country":
data["country"] = location_data[component]
if component == "county":
data["county"] = location_data[component]
if component == "state":
data["state"] = location_data[component]
if component == "ISO_3166-1_alpha-2":
data["country_code"] = location_data[component]
# The address need to be reconstructed
data["address_1"] = unicode(road) + " " + unicode(number)
data["latitude"] = location_data["lat"]
data["longitude"] = location_data["lng"]
# Format the country code to three letters
try:
country_data = transformations.cca2_to_ccn(data["country_code"])
data["country_code"] = transformations.ccn_to_cca3(country_data)
except:
data["country_code"] = None
# Get the continent
try:
country_data = transformations.cc_to_cn(data["country_code"])
data["continent"] = transformations.cn_to_ctn(country_data)
except:
data["continent"] = None
# Return the final data
return data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def interp(self, *args):
""" This method takes a list of SQL snippets and returns a SQL statement and a list of bind variables to be passed to the DB API's execute method. """ |
sql = ""
bind = ()
def _append_sql(sql, part):
"Handle whitespace when appending properly."
if len(sql) == 0:
return part
elif sql[-1] == ' ':
return sql + part
else:
return sql + ' ' + part
for arg in args:
if type(arg) is str:
# Strings are treated as raw SQL.
sql = _append_sql(sql, arg)
elif isinstance(arg, Esc):
# If this is an instance of Esc, ask the object
# how to represent the data given the context.
arg_sql, arg_bind = arg.to_string(sql)
sql = _append_sql(sql, arg_sql)
bind += arg_bind
else:
# Any argument given that is not a string or Esc
# is an error.
arg_sql, arg_bind = self.esc(arg).to_string(sql)
sql = _append_sql(sql, arg_sql)
bind += arg_bind
return (sql, bind) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def esc(self, val):
""" Returns the given object in the appropriate wrapper class from esc_types.py. In most cases, you will not need to call this directly. However, if you are passing a string to the interp method that should be used as an SQL bind value and not raw SQL, you must pass it to this method to avoid a SQL injection vulnerability. For example: The following is wrong! This could lead to a SQL injection attack. ('SELECT * FROM table WHERE first_name = John', ()) This is the correct way. ('SELECT * FROM table WHERE first_name = ?', ('John',)) """ |
if type(val) in self.type_map:
return self.type_map[type(val)](val)
else:
return Esc(val) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def api_request(api_base_url='http://localhost:8080/', path='', method='get', data=None, params={}, verify=True, cert=list()):
""" Wrapper function for requests :param api_base_url: Base URL for requests :param path: Path to request :param method: HTTP method :param data: Data for post (ignored for GETs) :param params: Dict of key, value query params :param verify: True/False/CA_File_Name to perform SSL Verification of CA Chain :param cert: list of cert and key to use for client authentication """ |
method = method.lower()
headers = {
'Accept': 'application/json',
'Content-type': 'application/json',
}
methods = {
'get': requests.get,
'post': requests.post,
}
if path[0] != '/':
path = '/{0}'.format(path)
if params:
path += '?{0}'.format(urllib.urlencode(params))
url = '{0}{1}'.format(api_base_url, path)
resp = methods[method](url, data=json.dumps(data), headers=headers,
verify=verify, cert=cert)
return resp |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_ssh_creds(config, args):
""" Set ssh credentials into config. Note that these values might also be set in ~/.bangrc. If they are specified both in ~/.bangrc and as command-line arguments to ``bang``, then the command-line arguments win. """ |
creds = config.get(A.DEPLOYER_CREDS, {})
creds[A.creds.SSH_USER] = args.user if args.user else creds.get(
A.creds.SSH_USER,
DEFAULT_SSH_USER,
)
if args.ask_pass:
creds[A.creds.SSH_PASS] = getpass.getpass('SSH Password: ')
config[A.DEPLOYER_CREDS] = creds |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run_bang(alt_args=None):
""" Runs bang with optional list of strings as command line options. If ``alt_args`` is not specified, defaults to parsing ``sys.argv`` for command line options. """ |
parser = get_parser()
args = parser.parse_args(alt_args)
source = args.config_specs or get_env_configs()
if not source:
return
config = Config.from_config_specs(source)
if args.playbooks:
config[A.PLAYBOOKS] = args.playbooks
if args.dump_config:
if args.dump_config in ('yaml', 'yml'):
import yaml
print yaml.safe_dump(dict(config))
elif args.dump_config == 'json':
import json
print json.dumps(config)
else:
print config
sys.exit()
set_ssh_creds(config, args)
annoy(config)
stack = Stack(config)
if args.ansible_list:
stack.show_inventory(
os.isatty(sys.stdout.fileno())
)
return
initialize_logging(config)
# TODO: config.validate()
if args.deploy:
stack.deploy()
if args.configure:
stack.configure()
config.autoinc() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def monitor(self, timeout):
""" Monitor the process, check whether it runs out of time. """ |
def check(self, timeout):
time.sleep(timeout)
self.stop()
wather = threading.Thread(target=check)
wather.setDaemon(True)
wather.start() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def hdf5_storable(type_or_storable, *args, **kwargs):
'''Registers a `Storable` instance in the global service.'''
if not isinstance(type_or_storable, Storable):
type_or_storable = default_storable(type_or_storable)
hdf5_service.registerStorable(type_or_storable, *args, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def hdf5_not_storable(_type, *args, **kwargs):
'''Tags a type as not serializable.'''
hdf5_service.registerStorable(not_storable(_type), *args, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def compile_and_process(self, in_path):
"""compile a file, save it to the ouput file if the inline flag true""" |
out_path = self.path_mapping[in_path]
if not self.embed:
pdebug("[%s::%s] %s -> %s" % (
self.compiler_name,
self.name,
os.path.relpath(in_path),
os.path.relpath(out_path)),
groups=["build_task"],
autobreak=True)
else:
pdebug("[%s::%s] %s -> <cache>" % (
self.compiler_name,
self.name,
os.path.relpath(in_path)),
groups=["build_task"],
autobreak=True)
compiled_string = self.compile_file(in_path)
if not self.embed:
if compiled_string != "":
with open(out_path, "w") as f:
f.write(compiled_string)
return compiled_string |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def collect_output(self):
""" helper function to gather the results of `compile_and_process` on all target files """ |
if self.embed:
if self.concat:
concat_scripts = [self.compiled_scripts[path]
for path in self.build_order]
return [self.embed_template_string % '\n'.join(concat_scripts)]
else:
return [self.embed_template_string %
self.compiled_scripts[path]
for path in self.build_order]
else:
return [self.external_template_string %
os.path.join(
self.relative_directory,
os.path.relpath(
self.out_path_of(path),
self.output_directory))
for path in self.build_order
if self.compiled_scripts[path] != ""] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build(self):
"""build the scripts and return a string""" |
if not self.embed:
mkdir_recursive(self.output_directory)
# get list of script files in build order
self.build_order = remove_dups(
reduce(lambda a, b: a + glob.glob(b),
self.build_targets,
[]))
self.build_order_output = [self.out_path_of(t)
for (t) in self.build_order]
self.path_mapping = dict(zip(
self.build_order,
self.build_order_output))
self.compiled_scripts = {}
exceptions, values = partition(
lambda x: isinstance(x, Exception),
[self.compile_and_process(target)
for target in self.build_order])
self.compiled_scripts.update(dict(values))
saneExceptions, insaneExceptions = partition(
lambda x: isinstance(x, TaskExecutionException),
exceptions)
if len(insaneExceptions) != 0:
raise insaneExceptions[0]
if len(exceptions) != 0:
raise TaskExecutionException(
"Precompiler Errors (%s):" % type(self).__name__,
"\n".join([
x.header + "\n " +
x.message.replace("\n", "\n ")
for x in exceptions]))
return self.collect_output() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_experiments(experiment_ids=None, user_id=None, client_id=None, bucket_if_necessary=True, user_data=None):
""" Retrieve the experiments the user is a part of @param experiment_ids : Either a single or list of experiment ids to retreive @param user_id : An identifier for the user @param client_id : Bernoulli Client ID - will default to BERNOULLI_CLIENT_ID ENV variable @param bucket_if_necessary : Choose a variant for the user if one has not been chosen @param user_data : Dictionary of user information to be used by the segment filters """ |
if not client_id:
client_id = os.environ.get('BERNOULLI_CLIENT_ID')
if not client_id:
raise Exception("client_id is required")
if type(experiment_ids) is dict:
experiment_ids = ','.join(experiment_ids)
params = {
'clientId': client_id,
'experimentIds': experiment_ids,
'userId': user_id,
'bucketIfNecessary': bucket_if_necessary,
}
if user_data is None:
user_data = {}
try:
response = requests.get(BASE_URL, params=dict(params.items() + user_data.items()))
except requests.ConnectionError:
raise Exception("Unable to access service")
val = response.json()
if val['status'] != 'ok':
raise Exception(val['message'])
return val['value'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def record_goal_attained(experiment_id, user_id, client_id = None):
""" Record that a variant was successful for a user @param experiment_id : A single experiment id @param user_id : An identifier for the user @param client_id : Bernoulli Client ID """ |
if not client_id:
client_id = os.environ.get('BERNOULLI_CLIENT_ID')
if not client_id:
raise Exception("client_id is required")
try:
response = requests.post(BASE_URL, data={
'clientId': client_id,
'userId': user_id,
'experimentId': experiment_id,
})
except requests.ConnectionError:
raise Exception("Unable to access services")
val = response.json()
if val['status'] != 'ok':
raise Exception(val['message'])
return val['value'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def yaml_dump_hook(cfg, text: bool=False):
""" Dumps all the data into a YAML file. """ |
data = cfg.config.dump()
if not text:
yaml.dump(data, cfg.fd, Dumper=cfg.dumper, default_flow_style=False)
else:
return yaml.dump(data, Dumper=cfg.dumper, default_flow_style=False) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _create_djset(args, cls):
""" Return a DjSecret object """ |
name = args.get('--name')
settings = args.get('--settings')
if name:
return cls(name=name)
elif settings:
return cls(name=settings)
else:
return cls() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_args(args, cls):
""" Parse a docopt dictionary of arguments """ |
d = _create_djset(args, cls)
key_value_pair = args.get('<key>=<value>')
key = args.get('<key>')
func = None
if args.get('add') and key_value_pair:
fargs = tuple(args.get('<key>=<value>').split('='))
if fargs[1]:
func = d.set
elif args.get('remove') and key:
func = d.remove
fargs = (args.get('<key>'),)
kwargs = {'glob': args.get('--global')}
if func:
return func, fargs, kwargs
else:
return None, None, None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def prompt_update_all(config: 'Config'):
"""Prompt each field of the configuration to the user.""" |
click.echo()
click.echo('Welcome !')
click.echo('Press enter to keep the defaults or enter a new value to update the configuration.')
click.echo('Press Ctrl+C at any time to quit and save')
click.echo()
for field in config:
type_ = config.__type__(field)
hint = config.__hint__(field) + ' ({})'.format(type_.__name__)
if isinstance(type_, conftypes.SubConfigType):
continue
# we prompt the paths through prompt_file and not click
if type_ is conftypes.path:
config[field] = prompt_file(hint, default=config[field])
continue
if isinstance(type_, conftypes.ConfigType):
# config[field] is always real data, but we want to show something that is the closest
# possible to what the user needs to enter
# thus, we show what we would store in the json
default = type_.save(config[field])
else:
default = config[field]
# a too long hint is awful
if len(str(default)) > 14:
default = str(default)[:10] + '...'
# ask untill we have the right type
value = click.prompt(hint, default=default, type=type_)
# click doesnt convert() the default if nothing is entered, so it wont be valid
# however we don't care because default means that we don't have to update
if value == default:
LOGGER.debug('same value and default, skipping set. %r == %r', value, default)
continue
config[field] = value |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_config(configclass: type(Config)):
"""Command line function to update and the a config.""" |
# we build the real click command inside the function, because it needs to be done
# dynamically, depending on the config.
# we ignore the type errors, keeping the the defaults if needed
# everything will be updated anyway
config = configclass() # type: Config
def print_list(ctx, param, value):
# they do like that in the doc (http://click.pocoo.org/6/options/#callbacks-and-eager-options)
# so I do the same... but I don't now why.
# the only goal is to call __print_list__()
if not value or ctx.resilient_parsing:
return param
config.__print_list__()
ctx.exit()
def show_conf(ctx, param, value):
# see print_list
if not value or ctx.resilient_parsing:
return param
config.__show__()
ctx.exit()
def reset(ctx, param, value):
# see print_list
if not value or ctx.resilient_parsing:
return param
click.confirm('Are you sure you want to reset ALL fields to the defaults ? This action is not reversible.', abort=True)
# that doesn't exist
configclass.__config_path__, config_path = '', configclass.__config_path__
# So the file won't be opened and only the default will be loaded.
config = configclass()
# Thus we can save the defaults
# To the right place again
configclass.__config_path__ = config_path
config.__save__()
ctx.exit()
def clean(ctx, param, value):
# see print_list
if not value or ctx.resilient_parsing:
return param
config.__save__()
click.echo('Cleaned !')
ctx.exit()
@click.command(context_settings={'ignore_unknown_options': True})
@click.option('-c', '--clean', is_eager=True, is_flag=True, expose_value=False, callback=clean,
help='Clean the file where the configutation is stored.')
@click.option('-l', '--list', is_eager=True, is_flag=True, expose_value=False, callback=print_list,
help='List the availaible configuration fields.')
@click.option('--reset', is_flag=True, is_eager=True, expose_value=False, callback=reset,
help='Reset all the fields to their default value.')
@click.option('-s', '--show', is_eager=True, is_flag=True, expose_value=False, callback=show_conf,
help='View the configuration.')
@click.argument('fields-to-set', nargs=-1, type=click.UNPROCESSED)
def command(fields_to_set: 'Tuple[str]'):
"""
I manage your configuration.
If you call me with no argument, you will be able to set each field
in an interactive prompt. I can show your configuration with -s,
list the available field with -l and set them by --name-of-field=whatever.
"""
# with a context manager, the config is always saved at the end
with config:
if len(fields_to_set) == 1 and '=' not in fields_to_set[0]:
# we want to update a part of the config
sub = fields_to_set[0]
if sub in config:
if isinstance(config[sub], SubConfig):
# the part is a subconfig
prompt_update_all(config[sub])
else:
# TODO: dynamic prompt for one field
raise click.BadParameter('%s is not a SubConfig of the configuration')
else:
raise click.BadParameter('%s is not a field of the configuration')
elif fields_to_set:
dct = {}
for field in fields_to_set:
field, _, value = field.partition('=')
dct[field] = value
# save directly what is passed if something was passed whitout the interactive prompt
config.__update__(dct)
else:
# or update all
prompt_update_all(config)
# this is the real function for the CLI
LOGGER.debug('start command')
command()
LOGGER.debug('end command') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def configureLogger(logFolder, logFile):
''' Start the logger instance and configure it '''
# Set debug level
logLevel = 'DEBUG'
logger = logging.getLogger()
logger.setLevel(logLevel)
# Format
formatter = logging.Formatter('%(asctime)s - %(levelname)s | %(name)s -> %(message)s', '%Y-%m-%d %H:%M:%S')
# Remove default handler to keep only clean one
for hdlr in logger.handlers:
logger.removeHandler(hdlr)
# Create missing folder if needed
if not os.path.exists(logFolder):
os.makedirs(logFolder, 0700)
#
# ----------------------------
# CREATE CONSOLE HANDLER
# ----------------------------
#
# Create console handler
consoleh = logging.StreamHandler()
consoleh.setLevel(logLevel)
consoleh.setFormatter(formatter)
# Set our custom handler
logger.addHandler(consoleh)
#
# ----------------------------
# CREATE FILE HANDLER
# ----------------------------
#
fileh = logging.FileHandler(logFile, 'a')
fileh.setLevel(logLevel)
fileh.setFormatter(formatter)
# Set our custom handler
logger.addHandler(fileh) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def printWelcomeMessage(msg, place=10):
''' Print any welcome message '''
logging.debug('*' * 30)
welcome = ' ' * place
welcome+= msg
logging.debug(welcome)
logging.debug('*' * 30 + '\n') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def on_chat(self, data):
''' Transfert a message to everybody '''
# XXX: we cannot use on_message as it's 'official' one already used
# by sockjsroom to create multiple on_* elements (like on_chat),
# so we use on_chat instead of on_message
# data => message
if self.roomId != '-1':
self.publishToRoom(self.roomId, 'chat', {
'username': self.username,
'time': datetime.now(),
'message': str(data['message'])
}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def on_leave(self):
''' Quit chat room '''
# Only if user has time to call self.initialize
# (sometimes it's not the case)
if self.roomId != '-1':
# Debug
logging.debug('chat: leave room (roomId: %s)' % self.roomId)
# Say to other users the current user leave room
self.publishToOther(self.roomId, 'leave', {
'username': self.username
})
# Remove sockjsroom link to this room
self.leave(self.roomId)
# Erasing data
self.initialize() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self, target, acceptor):
""" Initiate a connection from the tendril manager's endpoint. Once the connection is completed, a UDPTendril object will be created and passed to the given acceptor. :param target: The target of the connection attempt. :param acceptor: A callable which will initialize the state of the new UDPTendril object. """ |
# Call some common sanity-checks
super(UDPTendrilManager, self).connect(target, acceptor, None)
# Construct the Tendril
tend = UDPTendril(self, self.local_addr, target)
try:
# Set up the application
tend.application = acceptor(tend)
except application.RejectConnection:
# The acceptor raised a RejectConnection
return None
# OK, let's track the tendril
self._track_tendril(tend)
# Might as well return the tendril, too
return tend |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def listener(self, acceptor, wrapper):
""" Listens for new connections to the manager's endpoint. Once a new connection is received, a UDPTendril object is generated for it and it is passed to the acceptor, which must initialize the state of the connection. If no acceptor is given, no new connections can be initialized. :param acceptor: If given, specifies a callable that will be called with each newly received UDPTendril; that callable is responsible for initial acceptance of the connection and for setting up the initial state of the connection. If not given, no new connections will be accepted by the UDPTendrilManager. :param wrapper: A callable taking, as its first argument, a socket.socket object. The callable must return a valid proxy for the socket.socket object, which will subsequently be used to communicate on the connection. """ |
# OK, set up the socket
sock = socket.socket(self.addr_family, socket.SOCK_DGRAM)
with utils.SocketCloser(sock):
# Bind to our endpoint
sock.bind(self.endpoint)
# Get the assigned port number
self.local_addr = sock.getsockname()
# Call any wrappers
if wrapper:
sock = wrapper(sock)
# Senders need the socket, too...
self._sock = sock
self._sock_event.set()
# OK, now go into the listening loop with an error threshold
# of 10
closer = utils.SocketCloser(sock, 10,
ignore=[application.RejectConnection])
while True:
with closer:
data, addr = sock.recvfrom(self.recv_bufsize)
# Look up the tendril or create a new one
try:
tend = self[(self.local_addr, addr)]
except KeyError:
if not acceptor:
# Can't accept new connections
continue
# Construct a Tendril
tend = UDPTendril(self, self.local_addr, addr)
# Set up the application
tend.application = acceptor(tend)
# OK, let's track the tendril
self._track_tendril(tend)
# We now have a tendril; process the received data
try:
tend._recv_frameify(data)
except Exception as exc:
# Close the Tendril
tend.close()
# Notify the application what happened
tend.closed(exc) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_lb_nodes(self, lb_id, nodes):
""" Adds nodes to an existing LBaaS instance :param string lb_id: Balancer id :param list nodes: Nodes to add. {address, port, [condition]} :rtype :class:`list` """ |
log.info("Adding load balancer nodes %s" % nodes)
resp, body = self._request(
'post',
'/loadbalancers/%s/nodes' % lb_id,
data={'nodes': nodes})
return body |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def match_lb_nodes(self, lb_id, existing_nodes, host_addresses, host_port):
""" Add and remove nodes to match the host addresses and port given, based on existing_nodes. HPCS doesn't allow a load balancer with no backends, so we'll add first, delete after. :param string lb_id: Load balancer id :param :class:`list` of :class:`dict` existing_nodes: Existing nodes :param :class:`list` host_addresses: Node host addresses :param string port: Node port """ |
delete_filter = lambda n: \
n['address'] not in host_addresses or \
str(n['port']) != str(host_port)
delete_nodes = filter(delete_filter, existing_nodes)
delete_node_ids = [n['id'] for n in delete_nodes]
delete_node_hosts = [n['address'] for n in delete_nodes]
current_nodes = set([n['address'] for n in existing_nodes])
current_nodes -= set(delete_node_hosts)
add_nodes = host_addresses - current_nodes
if add_nodes:
nodes_to_add = [
{'address': n, 'port': str(host_port)}
for n in add_nodes
]
args = (lb_id, nodes_to_add)
self.add_lb_nodes(*args)
if delete_node_ids:
args = (lb_id, delete_node_ids)
self.remove_lb_nodes(*args)
log.info("Were %d nodes. Added %d nodes; deleted %d nodes" %
(len(existing_nodes), len(add_nodes), len(delete_nodes))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove_lb_nodes(self, lb_id, node_ids):
""" Remove one or more nodes :param string lb_id: Balancer id :param list node_ids: List of node ids """ |
log.info("Removing load balancer nodes %s" % node_ids)
for node_id in node_ids:
self._request('delete', '/loadbalancers/%s/nodes/%s' % (lb_id, node_id)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def soup(self, *args, **kwargs):
"""Parse the currently loaded website. Optionally, SoupStrainer can be used to only parse relevant parts of the page. This can be particularly useful if the website is complex or perfomance is a factor. <https://www.crummy.com/software/BeautifulSoup/bs4/doc/#soupstrainer> Args: *args: Optional positional arguments that `SoupStrainer` takes. **kwargs: Optional keyword argument that `SoupStrainer` takes. Returns: A `BeautifulSoup` object. Raises: NoWebsiteLoadedError: If no website is currently loaded. ParsingError: If the current response isn't supported by `bs4` """ |
if self._url is None:
raise NoWebsiteLoadedError('website parsing requires a loaded website')
content_type = self._response.headers.get('Content-Type', '')
if not any(markup in content_type for markup in ('html', 'xml')):
raise ParsingError('unsupported content type \'{}\''.format(content_type))
strainer = SoupStrainer(*args, **kwargs)
return BeautifulSoup(self._response.content, self.parser, parse_only=strainer) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get(self, url, **kwargs):
"""Send a GET request to the specified URL. Method directly wraps around `Session.get` and updates browser attributes. <http://docs.python-requests.org/en/master/api/#requests.get> Args: url: URL for the new `Request` object. **kwargs: Optional arguments that `Request` takes. Returns: `Response` object of a successful request. """ |
response = self.session.get(url, **kwargs)
self._url = response.url
self._response = response
return response |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def post(self, **kwargs):
"""Send a POST request to the currently loaded website's URL. The browser will automatically fill out the form. If `data` dict has been passed into ``kwargs``, the contained input values will override the automatically filled out values. Returns: `Response` object of a successful request. Raises: NoWebsiteLoadedError: If no website is currently loaded. """ |
if self._url is None:
raise NoWebsiteLoadedError('request submission requires a loaded website')
data = kwargs.get('data', {})
for i in self.soup('form').select('input[name]'):
if i.get('name') not in data:
data[i.get('name')] = i.get('value', '')
kwargs['data'] = data
response = self.session.post(self._url, **kwargs)
self._url = response.url
self._response = response
return response |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_howto(request=None):
""" Searches for "how_to.md" files in app directories. Creates user-friendly admin how-to section from apps that have them. """ |
how_tos = {}
for app in settings.INSTALLED_APPS:
mod = import_module(app)
app_dir = os.path.dirname(mod.__file__)
how_to_file = os.path.join(app_dir, 'how_to.md')
if os.path.exists(how_to_file):
contents = open(how_to_file).read()
how_tos[app] = markdown.markdown(contents)
return render(request, 'admin/how-to/index.html', {'how_tos': how_tos}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def formfield_for_foreignkey_helper(inline, *args, **kwargs):
""" The implementation for ``RelatedContentInline.formfield_for_foreignkey`` This takes the takes all of the ``args`` and ``kwargs`` from the call to ``formfield_for_foreignkey`` and operates on this. It returns the updated ``args`` and ``kwargs`` to be passed on to ``super``. This is solely an implementation detail as it's easier to test a function than to provide all of the expectations that the ``GenericTabularInline`` has. """ |
db_field = args[0]
if db_field.name != "related_type":
return args, kwargs
initial_filter = getattr(settings, RELATED_TYPE_INITIAL_FILTER,
False)
if "initial" not in kwargs and initial_filter:
# TODO: handle gracefully if unable to load and in non-debug
initial = RelatedType.objects.get(**initial_filter).pk
kwargs["initial"] = initial
return args, kwargs |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def usage(self):
""" A usage string that describes the signature. """ |
return u' '.join(u'<%s>' % pattern.usage for pattern in self.patterns) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse(self, argv):
""" Parses the given `argv` and returns a dictionary mapping argument names to the values found in `argv`. """ |
rv = {}
for pattern in self.patterns:
pattern.apply(rv, argv)
return rv |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_record(self, name, record_id):
"""Retrieve a record with a given type name and record id. Args: name (string):
The name which the record is stored under. record_id (int):
The id of the record requested. Returns: :class:`cinder_data.model.CinderModel`: The cached model. """ |
if name in self._cache:
if record_id in self._cache[name]:
return self._cache[name][record_id] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_records(self, name):
"""Return all the records for the given name in the cache. Args: name (string):
The name which the required models are stored under. Returns: list: A list of :class:`cinder_data.model.CinderModel` models. """ |
if name in self._cache:
return self._cache[name].values()
else:
return [] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_record(self, name, record_id, record):
"""Save a record into the cache. Args: name (string):
The name to save the model under. record_id (int):
The record id. record (:class:`cinder_data.model.CinderModel`):
The model """ |
if name not in self._cache:
self._cache[name] = {}
self._cache[name][record_id] = record |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def hone_cache(maxsize=128, maxage=None, refineby='startswith', store_partials=False):
""" A caching decorator that follows after the style of lru_cache. Calls that are sharper than previous requests are returned from the cache after honing in on the requested results using the `refineby` technique. The `refineby` technique can be `startswith`, `container` or a user defined function used to provide a subset of results. Eg. @hone_cache() def completer(prefix):
print("MISS") return set(x for x in {'foo', 'foobar', 'baz'} if x.startswith(prefix)) completer('f') # -> {'foo', 'foobar'} MISS completer('fo') # -> {'foo', 'foobar'} completer('foob') # -> {'foobar'} completer('') # -> {'foo', 'foobar', 'baz'} MISS completer('ba') # -> {'baz'} If while trying to hone a cache hit there is an exception the wrapped function will be called as if it was a full miss. The `maxage` argument is an option time-to-live in seconds for each cache result. Any cache entries over the maxage are lazily replaced. """ |
if not callable(refineby):
finder = hone_cache_finders[refineby]
else:
finder = refineby
def decorator(inner_func):
wrapper = make_hone_cache_wrapper(inner_func, maxsize, maxage, finder,
store_partials)
return functools.update_wrapper(wrapper, inner_func)
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_hone_cache_wrapper(inner_func, maxsize, maxage, finder, store_partials):
""" Keeps a cache of requests we've already made and use that for generating results if possible. If the user asked for a root prior to this call we can use it to skip a new lookup using `finder`. A top-level lookup will effectively serves as a global cache. """ |
hits = misses = partials = 0
cache = TTLMapping(maxsize, maxage)
def wrapper(*args):
nonlocal hits, misses, partials
radix = args[-1]
# Attempt fast cache hit first.
try:
r = cache[radix]
except KeyError:
pass
else:
hits += 1
return r
for i in range(len(radix) - 1, -1, -1):
partial_radix = radix[:i]
try:
partial = cache[partial_radix]
except KeyError:
continue
try:
r = finder(radix, partial_radix, partial)
except:
break # Treat any exception as a miss.
partials += 1
if store_partials:
cache[radix] = r
return r
misses += 1
cache[radix] = r = inner_func(*args)
return r
def cache_info():
""" Emulate lru_cache so this is a low touch replacement. """
return HoneCacheInfo(hits, misses, maxsize, len(cache), maxage,
partials, finder)
def cache_clear():
""" Clear cache and stats. """
nonlocal hits, misses, partials
hits = misses = partials = 0
cache.clear()
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return functools.update_wrapper(wrapper, inner_func) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def ttl_cache(maxage, maxsize=128):
""" A time-to-live caching decorator that follows after the style of lru_cache. The `maxage` argument is time-to-live in seconds for each cache result. Any cache entries over the maxage are lazily replaced. """ |
def decorator(inner_func):
wrapper = make_ttl_cache_wrapper(inner_func, maxage, maxsize)
return functools.update_wrapper(wrapper, inner_func)
return decorator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_ttl_cache_wrapper(inner_func, maxage, maxsize, typed=False):
""" Use the function signature as a key for a ttl mapping. Any misses will defer to the wrapped function and its result is stored for future calls. """ |
hits = misses = 0
cache = TTLMapping(maxsize, maxage)
def wrapper(*args, **kwargs):
nonlocal hits, misses
key = functools._make_key(args, kwargs, typed)
try:
result = cache[key]
except KeyError:
misses += 1
result = cache[key] = inner_func(*args, **kwargs)
else:
hits += 1
return result
def cache_info():
""" Emulate lru_cache so this is a low touch replacement. """
return TTLCacheInfo(hits, misses, maxsize, len(cache), maxage)
def cache_clear():
""" Clear cache and stats. """
nonlocal hits, misses
hits = misses = 0
cache.clear()
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
return functools.update_wrapper(wrapper, inner_func) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(command, options, args):
"""Run the requested command. args is either a list of descriptions or a list of strings to filter by""" |
if command == "backend":
subprocess.call(("sqlite3", db_path))
if command == "add":
dp = pdt.Calendar()
due = mktime(dp.parse(options.due)[0]) if options.due else None
print "added tasks..."
[Task(desc, due).add() for desc in args]
return
filters = args if len(args) else None
rows = Query(filters, options).find()
tasks = [Task(r["desc"], r["due"]) for r in rows]
if command == "list":
for t in tasks:
print "\t *", t
if command == "done":
print "done with..."
finished_tasks = []
for t in tasks:
finished = t.done()
if finished:
finished_tasks.append(t)
if not finished_tasks:
return
print ""
print "finished tasks:"
for t in finished_tasks:
print "\t X", t
if command == "remove":
print "remove..."
removed_tasks = []
for t in tasks:
removed = t.remove()
if removed:
removed_tasks.append(t)
if not removed_tasks:
return
print ""
print "removed tasks:"
for t in removed_tasks:
print "\t RM", t |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def mark_read(self):
""" Mark notifications as read. CURRENT UNSUPPORTED: https://github.com/kippt/api-documentation/blob/master/endpoints/notifications/POST_notifications.md """ |
# Obviously remove the exception when Kippt says the support it.
raise NotImplementedError(
"The Kippt API does not yet support marking notifications as read."
)
data = json.dumps({"action": "mark_seen"})
r = requests.post(
"https://kippt.com/api/notifications",
headers=self.kippt.header,
data=data
)
return (r.json()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_programs():
"""Returns a generator that yields the available executable programs :returns: a generator that yields the programs available after a refresh_listing() :rtype: generator """ |
programs = []
os.environ['PATH'] += os.pathsep + os.getcwd()
for p in os.environ['PATH'].split(os.pathsep):
if path.isdir(p):
for f in os.listdir(p):
if _is_executable(path.join(p, f)):
yield f |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _underscore_run_program(name, *args, **kwargs):
"""Runs the 'name' program, use this if there are illegal python method characters in the program name 'Hello' 'Hello' 'Hello' """ |
if name in get_programs() or kwargs.get("shell", False):
return _run_program(name, *args, **kwargs)
else:
raise ProgramNotFoundException() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def refresh_listing():
"""Refreshes the list of programs attached to the perform module from the path""" |
for program in get_programs():
if re.match(r'^[a-zA-Z_][a-zA-Z_0-9]*$', program) is not None:
globals()[program] = partial(_run_program, program)
globals()["_"] = _underscore_run_program |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load(path=None):
""" Compile le lemmatiseur localement """ |
if path is None:
path = os.path.join(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data"))
path = os.path.join(path, "compiled.pickle")
with open(path, "rb") as file:
return load(file) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _lemmatise_assims(self, f, *args, **kwargs):
""" Lemmatise un mot f avec son assimilation :param f: Mot à lemmatiser :param pos: Récupère la POS :param get_lemma_object: Retrieve Lemma object instead of string representation of lemma :param results: Current results """ |
forme_assimilee = self.assims(f)
if forme_assimilee != f:
for proposal in self._lemmatise(forme_assimilee, *args, **kwargs):
yield proposal |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _lemmatise_roman_numerals(self, form, pos=False, get_lemma_object=False):
""" Lemmatise un mot f si c'est un nombre romain :param form: Mot à lemmatiser :param pos: Récupère la POS :param get_lemma_object: Retrieve Lemma object instead of string representation of lemma """ |
if estRomain(form):
_lemma = Lemme(
cle=form, graphie_accentuee=form, graphie=form, parent=self, origin=0, pos="a",
modele=self.modele("inv")
)
yield Lemmatiseur.format_result(
form=form,
lemma=_lemma,
with_pos=pos,
raw_obj=get_lemma_object
)
if form.upper() != form:
yield from self._lemmatise_roman_numerals(form.upper(), pos=pos, get_lemma_object=get_lemma_object) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _lemmatise_contractions(self, f, *args, **kwargs):
""" Lemmatise un mot f avec sa contraction :param f: Mot à lemmatiser :yield: Match formated like in _lemmatise() """ |
fd = f
for contraction, decontraction in self._contractions.items():
if fd.endswith(contraction):
fd = f[:-len(contraction)]
if "v" in fd or "V" in fd:
fd += decontraction
else:
fd += deramise(decontraction)
yield from self._lemmatise(fd, *args, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _lemmatise_suffixe(self, f, *args, **kwargs):
""" Lemmatise un mot f si il finit par un suffixe :param f: Mot à lemmatiser :yield: Match formated like in _lemmatise() """ |
for suffixe in self._suffixes:
if f.endswith(suffixe) and suffixe != f:
yield from self._lemmatise(f[:-len(suffixe)], *args, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(host, port, username, password):
"""Connect and login to an FTP server and return ftplib.FTP object.""" |
# Instantiate ftplib client
session = ftplib.FTP()
# Connect to host without auth
session.connect(host, port)
# Authenticate connection
session.login(username, password)
return session |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get(self, remote, local=None, keep_dir_structure=False):
""" Download a remote file on the fto sever to a local directory. :param remote: File path of remote source file :param local: Directory of local destination directory :param keep_dir_structure: If True, replicates the remote files folder structure """ |
if local and os.path.isdir(local):
os.chdir(local)
elif keep_dir_structure:
# Replicate the remote files folder structure
for directory in remote.split(os.sep)[:-1]:
if not os.path.isdir(directory):
os.mkdir(directory)
os.chdir(directory)
# Change to the correct directory if remote is a path not just a name
if os.sep in remote:
directory, file_name = remote.rsplit(os.sep, 1)
self.chdir(directory)
else:
file_name = remote
# Download the file and get response
response = self._retrieve_binary(file_name)
# Rename downloaded files if local is a file_name string
if local and isinstance(local, str):
os.rename(file_name, local)
return response |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def chdir(self, directory_path, make=False):
"""Change directories and optionally make the directory if it doesn't exist.""" |
if os.sep in directory_path:
for directory in directory_path.split(os.sep):
if make and not self.directory_exists(directory):
try:
self.session.mkd(directory)
except ftplib.error_perm:
# Directory already exists
pass
self.session.cwd(directory)
else:
self.session.cwd(directory_path) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def listdir(self, directory_path=None, hidden_files=False):
""" Return a list of files and directories in a given directory. :param directory_path: Optional str (defaults to current directory) :param hidden_files: Include hidden files :return: Directory listing """ |
# Change current directory if a directory path is specified, otherwise use current
if directory_path:
self.chdir(directory_path)
# Exclude hidden files
if not hidden_files:
return [path for path in self.session.nlst() if not path.startswith('.')]
# Include hidden files
else:
return self.session.nlst() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete(self, file_path):
"""Remove the file named filename from the server.""" |
if os.sep in file_path:
directory, file_name = file_path.rsplit(os.sep, 1)
self.chdir(directory)
return self.session.delete(file_name)
else:
return self.session.delete(file_path) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _retrieve_binary(self, file_name):
"""Retrieve a file in binary transfer mode.""" |
with open(file_name, 'wb') as f:
return self.session.retrbinary('RETR ' + file_name, f.write) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _store_binary(self, local_path, remote):
"""Store a file in binary via ftp.""" |
# Destination directory
dst_dir = os.path.dirname(remote)
# Destination file name
dst_file = os.path.basename(remote)
# File upload command
dst_cmd = 'STOR {0}'.format(dst_file)
with open(local_path, 'rb') as local_file:
# Change directory if needed
if dst_dir != dst_file:
self.chdir(dst_dir, make=True)
# Upload file & return response
return self.session.storbinary(dst_cmd, local_file) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def clean_new_password2(self):
"""Validate password when set""" |
password1 = self.cleaned_data.get('new_password1')
password2 = self.cleaned_data.get('new_password2')
if password1 or password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
password_validation.validate_password(password2, self.instance)
return password2 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _run_command(self, arguments: List[str], input_data: Any=None, output_encoding: str="utf-8") -> str: """ Run a command as a subprocess. Ignores errors given over stderr if there is output on stdout (this is the case where baton has been run correctly and has expressed the error in it's JSON out, which can be handled more appropriately upstream to this method.) :param arguments: the arguments to run :param input_data: the input data to pass to the subprocess :param output_encoding: optional specification of the output encoding to expect :return: the process' standard out """ |
process = subprocess.Popen(arguments, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
if isinstance(input_data, List):
for to_write in input_data:
to_write_as_json = json.dumps(to_write)
process.stdin.write(str.encode(to_write_as_json))
input_data = None
else:
input_data = str.encode(json.dumps(input_data))
timeout_in_seconds = self.timeout_queries_after.total_seconds() if self.timeout_queries_after is not None \
else None
out, error = process.communicate(input=input_data, timeout=timeout_in_seconds)
if len(out) == 0 and len(error) > 0:
raise RuntimeError(error)
return out.decode(output_encoding).rstrip() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def collection_choices():
"""Return collection choices.""" |
from invenio_collections.models import Collection
return [(0, _('-None-'))] + [
(c.id, c.name) for c in Collection.query.all()
] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _query_api(self, endpoint, **kwargs):
""" Query the API. """ |
try:
response = requests.get(
'{api}/{endpoint}?{args}'.format(
api=self.url,
endpoint=endpoint,
args=urllib.urlencode(kwargs)),
headers={
'Authorization': 'Token {token}'.format(token=self.token)},
timeout=self.timeout)
except requests.RequestException as err:
raise ServiceUnavailable(err)
if response.status_code == 404:
raise NoResults()
try:
response.raise_for_status()
except requests.HTTPError as http_err:
raise ServerException(http_err)
return response.json() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_random_string(template_dict, key='start'):
"""Generates a random excuse from a simple template dict. Based off of drow's generator.js (public domain). Grok it here: http://donjon.bin.sh/code/random/generator.js Args: template_dict: Dict with template strings. key: String with the starting index for the dict. (Default: 'start') Returns: Generated string. """ |
data = template_dict.get(key)
#if isinstance(data, list):
result = random.choice(data)
#else:
#result = random.choice(data.values())
for match in token_regex.findall(result):
word = generate_random_string(template_dict, match) or match
result = result.replace('{{{0}}}'.format(match), word)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def bofh_excuse(how_many=1):
"""Generate random BOFH themed technical excuses! Args: how_many: Number of excuses to generate. (Default: 1) Returns: A list of BOFH excuses. """ |
excuse_path = os.path.join(os.path.dirname(__file__), 'bofh_excuses.json')
with open(excuse_path, 'r') as _f:
excuse_dict = json.load(_f)
return [generate_random_string(excuse_dict) for _ in range(int(how_many))] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_current_waypoints(boatd=None):
'''
Get the current set of waypoints active from boatd.
:returns: The current waypoints
:rtype: List of Points
'''
if boatd is None:
boatd = Boatd()
content = boatd.get('/waypoints')
return [Point(*coords) for coords in content.get('waypoints')] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get_home_position(boatd=None):
'''
Get the current home position from boatd.
:returns: The configured home position
:rtype: Points
'''
if boatd is None:
boatd = Boatd()
content = boatd.get('/waypoints')
home = content.get('home', None)
if home is not None:
lat, lon = home
return Point(lat, lon)
else:
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get(self, endpoint):
'''Return the result of a GET request to `endpoint` on boatd'''
json_body = urlopen(self.url(endpoint)).read().decode('utf-8')
return json.loads(json_body) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def post(self, content, endpoint=''):
'''
Issue a POST request with `content` as the body to `endpoint` and
return the result.
'''
url = self.url(endpoint)
post_content = json.dumps(content).encode('utf-8')
headers = {'Content-Type': 'application/json'}
request = Request(url, post_content, headers)
response = urlopen(request)
return json.loads(response.read().decode('utf-8')) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def wind(self):
'''
Return the direction of the wind in degrees.
:returns: wind object containing direction bearing and speed
:rtype: Wind
'''
content = self._cached_boat.get('wind')
return Wind(
Bearing(content.get('absolute')),
content.get('speed'),
Bearing(content.get('apparent'))
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def position(self):
'''
Return the current position of the boat.
:returns: current position
:rtype: Point
'''
content = self._cached_boat
lat, lon = content.get('position')
return Point(lat, lon) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def set_rudder(self, angle):
'''
Set the angle of the rudder to be `angle` degrees.
:param angle: rudder angle
:type angle: float between -90 and 90
'''
angle = float(angle)
request = self.boatd.post({'value': float(angle)}, '/rudder')
return request.get('result') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def set_sail(self, angle):
'''
Set the angle of the sail to `angle` degrees
:param angle: sail angle
:type angle: float between -90 and 90
'''
angle = float(angle)
request = self.boatd.post({'value': float(angle)}, '/sail')
return request.get('result') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def start(self, name):
'''
End the current behaviour and run a named behaviour.
:param name: the name of the behaviour to run
:type name: str
'''
d = self.boatd.post({'active': name}, endpoint='/behaviours')
current = d.get('active')
if current is not None:
return 'started {}'.format(current)
else:
return 'no behaviour running' |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def register_types(name, *types):
""" Register a short name for one or more content types. """ |
type_names.setdefault(name, set())
for t in types:
# Redirecting the type
if t in media_types:
type_names[media_types[t]].discard(t)
# Save the mapping
media_types[t] = name
type_names[name].add(t) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_types(self):
""" Retrieve a set of all recognized content types for this translator object. """ |
# Convert translators into a set of content types
content_types = set()
for name in self.translators:
content_types |= type_names[name]
return content_types |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
async def get_search_page(self, term: str):
"""Get search page. This function will get the first link from the search term we do on term and then it will return the link we want to parse from. :param term: Light Novel to Search For """ |
# Uses the BASEURL and also builds link for the page we want using the term given
params = {'s': term, 'post_type': 'seriesplan'}
async with self.session.get(self.BASEURL, params=params) as response:
# If the response is 200 OK
if response.status == 200:
search = BeautifulSoup(await response.text(), 'lxml')
# Return the link that we need
return search.find('a', class_='w-blog-entry-link').get('href')
else:
# Raise an error with the response status
raise aiohttp.ClientResponseError(response.status) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_aliases(parse_info):
"""get aliases from parse info. :param parse_info: Parsed info from html soup. """ |
return [
div.string.strip()
for div in parse_info.find('div', id='editassociated')
if div.string is not None
] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_related_series(parse_info):
"""get related_series from parse info. :param parse_info: Parsed info from html soup. """ |
seriesother_tags = [x for x in parse_info.select('h5.seriesother')]
sibling_tag = [x for x in seriesother_tags if x.text == 'Related Series'][0]
siblings_tag = list(sibling_tag.next_siblings)
# filter valid tag
# valid tag is all tag before following tag
# <h5 class="seriesother">Recommendations</h5>
valid_tag = []
keypoint_found = False
for x in siblings_tag:
# change keypoint if condition match
if x.name == 'h5' and x.attrs['class'] == ['seriesother']:
keypoint_found = True
if not keypoint_found and x.strip is not None:
if x.strip():
valid_tag.append(x)
elif not keypoint_found:
valid_tag.append(x)
# only one item found and it is 'N/A
if len(valid_tag) == 1:
if valid_tag[0].strip() == 'N/A':
return None
# items are combination between bs4 and text
# merge and return them as list of text
if len(valid_tag) % 2 == 0:
zipped_list = zip(valid_tag[::2], valid_tag[1::2])
result = []
for x in zipped_list:
result.append('{} {}'.format(x[0].text, x[1]))
return result
raise ValueError("Valid tag isn't recognizeable.\n{}".format("\n".join(valid_tag))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_fm_ext(self, freq, amplitude, peak_freq_dev=None, output_state=True):
"""Sets the func generator to frequency modulation with external modulation. freq is the carrier frequency in Hz.""" |
if peak_freq_dev is None:
peak_freq_dev = freq
commands = ['FUNC SIN', # set to output sine functions
'FM:STAT ON',
'FREQ {0}'.format(freq),
'FM:SOUR EXT',
# 'FM:FREQ {0}'.format(freq),
'FM:DEV {0}'.format(peak_freq_dev),
'VOLT {0}'.format(amplitude),
'VOLT:OFFS 0'] # set to frequency modulation
if output_state is True:
commands.append('OUTP ON')
else:
commands.append('OUTP OFF')
command_string = '\n'.join(commands)
print_string = '\n\t' + command_string.replace('\n', '\n\t')
logging.info(print_string)
self.instr.write(command_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_burst(self, freq, amplitude, period, output_state=True):
"""Sets the func generator to burst mode with external trigerring.""" |
ncyc = int(period*freq)
commands = ['FUNC SIN',
'BURS:STAT ON',
'BURS:MODE TRIG', # external trigger
'TRIG:SOUR EXT',
'TRIG:SLOP POS',
'FREQ {0}'.format(freq),
'VOLT {0}'.format(amplitude),
'VOLT:OFFS 0',
'BURS:NCYC {0}'.format(ncyc)]
if output_state is True:
commands.append('OUTP ON')
else:
commands.append('OUTP OFF')
command_string = '\n'.join(commands)
print_string = '\n\t' + command_string.replace('\n', '\n\t')
logging.info(print_string)
self.instr.write(command_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_arbitrary(self, freq, low_volt, high_volt, output_state=True):
"""Programs the function generator to output the arbitrary waveform.""" |
commands = ['FUNC USER',
'BURS:STAT OFF',
'SWE:STAT OFF',
'FM:STAT OFF',
'FREQ {0}'.format(freq),
'VOLT:HIGH {0}'.format(high_volt),
'VOLT:LOW {0}'.format(low_volt),
]
if output_state is True:
commands.append('OUTP ON')
else:
commands.append('OUTP OFF')
command_string = '\n'.join(commands)
print_string = '\n\t' + command_string.replace('\n', '\n\t')
logging.info(print_string)
self.instr.write(command_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_continuous(self, freq, amplitude, offset, output_state=True):
"""Programs the Stanford MW function generator to output a continuous sine wave. External 'triggering' is accomplished using the MW switch.""" |
commands = ['MODL 0', #disable any modulation
'FREQ {0}'.format(freq)
]
if freq > 4.05e9:
commands.append('AMPH {0}'.format(amplitude)) #set rear RF doubler amplitude
if offset > 0.0:
print('HIGH FREQUENCY OUTPUT IS AC ONLY')
if output_state is True:
commands.append('ENBH 1') #enable output
else:
commands.append('ENBH 0')
elif freq < 62.5e6:
commands.extend(['AMPL {0}'.format(amplitude), 'OFSL {0}'.format(offset)]) #set front BNC amplitude
if output_state is True:
commands.append('ENBL 1') #enable output
else:
commands.append('ENBL 0')
command_string = '\n'.join(commands)
print_string = '\n\t' + command_string.replace('\n', '\n\t')
logging.info(print_string)
self.instr.write(command_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_freqsweep_ext(self, amplitude, sweep_low_end, sweep_high_end, offset=0.0, output_state=True):
"""Sets the Stanford MW function generator to freq modulation with external modulation. freq is the carrier frequency in Hz.""" |
sweep_deviation = round(abs(sweep_low_end - sweep_high_end)/2.0,6)
freq = sweep_low_end + sweep_deviation
commands = ['TYPE 3', #set to sweep
'SFNC 5', #external modulation
'FREQ {0}'.format(freq),
'SDEV {0}'.format(sweep_deviation),
'MODL 1' #enable modulation
]
if freq > 4.05e9:
commands.append('AMPH {0}'.format(amplitude)) #set rear RF doubler amplitude
if offset > 0.0:
print('HIGH FREQUENCY OUTPUT IS AC ONLY')
if output_state is True:
commands.append('ENBH 1') #enable output
else:
commands.append('ENBH 0')
elif freq < 62.5e6:
commands.extend(['AMPL {0}'.format(amplitude), 'OFSL {0}'.format(offset)]) #set front BNC amplitude
if output_state is True:
commands.append('ENBL 1') #enable output
else:
commands.append('ENBL 0')
command_string = '\n'.join(commands)
print_string = '\n\t' + command_string.replace('\n', '\n\t')
logging.info(print_string)
self.instr.write(command_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def disable_all(self, disable):
"""Disables all modulation and outputs of the Standford MW func. generator""" |
commands = ['ENBH 0', #disable high freq. rear output
'ENBL 0', #disable low freq. front bnc
'MODL 0' #disable modulation
]
command_string = '\n'.join(commands)
print_string = '\n\t' + command_string.replace('\n', '\n\t')
logging.info(print_string)
if disable:
self.instr.write(command_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_continuous(self, freq, amplitude, offset, phase, channel=2):
"""Programs the function generator to output a continuous sine wave.""" |
commands = [':SOUR{0}:APPL:SIN '.format(channel),
'{0},'.format(freq),
'{0},'.format(amplitude),
'{0},'.format(offset),
'{0}'.format(phase),
]
command_string = ''.join(commands)
logging.info(command_string)
self.instr.write(command_string) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_requires(__fname: str) -> List[str]: """Parse ``pip``-style requirements files. This is a *very* naïve parser, but very few packages make use of the more advanced features. Support for other features will be added only when packages in the wild depend on them. Args: __fname: Base file to pass Returns: Parsed dependencies """ |
deps = []
with open(__fname) as req_file:
entries = [s.split('#')[0].strip() for s in req_file.readlines()]
for dep in entries:
if not dep:
continue
elif dep.startswith('-r '):
include = dep.split()[1]
if '/' not in include:
include = path.join(path.dirname(__fname), include)
deps.extend(parse_requires(include))
continue
elif ';' in dep:
dep, marker = [s.strip() for s in dep.split(';')]
# Support for other markers will be added when they’re actually
# found in the wild
match = re.fullmatch(r"""
(?:python_version) # Supported markers
\s*
(?:<=?|==|>=?) # Supported comparisons
\s*
(?P<quote>(?:'|"))(?:[\d\.]+)(?P=quote) # Test
""", marker, re.VERBOSE)
if not match:
raise ValueError('Invalid marker {!r}'.format(marker))
env = {
'__builtins__': {},
'python_version': '{}.{}'.format(*version_info[:2]),
}
if not eval(marker, env): # pylint: disable=eval-used
continue
deps.append(dep)
return deps |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_email(filepaths, collection_name):
"""Create an email message object which implements the email.message.Message interface and which has the files to be shared attached to it. """ |
outer = MIMEMultipart()
outer.preamble = 'Here are some files for you'
def add_file_to_outer(path):
if not os.path.isfile(path):
return
# Guess the content type based on the file's extension. Encoding
# will be ignored, although we should check for simple things like
# gzip'd or compressed files.
ctype, encoding = mimetypes.guess_type(path)
if ctype is None or encoding is not None:
# No guess could be made, or the file is encoded (compressed), so
# use a generic bag-of-bits type.
ctype = 'application/octet-stream'
maintype, subtype = ctype.split('/', 1)
if maintype == 'image':
fp = open(path, 'rb')
msg = MIMEImage(fp.read(), _subtype=subtype)
fp.close()
elif maintype == 'audio':
fp = open(path, 'rb')
msg = MIMEAudio(fp.read(), _subtype=subtype)
fp.close()
elif maintype == 'text':
# We do this to catch cases where text files have
# an encoding we can't guess correctly.
try:
fp = open(path, 'r')
msg = MIMEText(fp.read(), _subtype=subtype)
fp.close()
except UnicodeDecodeError:
fp = open(path, 'rb')
msg = MIMEBase(maintype, subtype)
msg.set_payload(fp.read())
encoders.encode_base64(msg)
fp.close()
else:
fp = open(path, 'rb')
msg = MIMEBase(maintype, subtype)
msg.set_payload(fp.read())
fp.close()
# Encode the payload using Base64
encoders.encode_base64(msg)
# Set the filename parameter
msg.add_header('Content-Disposition', 'attachment',
filename=os.path.basename(path))
outer.attach(msg)
outer.attach(MIMEText("Here are some files I've thrown at you."))
for path in filepaths:
add_file_to_outer(path)
return outer |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def enter_config_value(self, key, default=""):
''' Prompts user for a value
'''
value = input('Please enter a value for ' + key + ': ')
if value:
return value
else:
return default |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _run_introspection(self, runtime='', whitelist=[], verbose=False):
""" Figure out which objects are opened by a test binary and are matched by the white list. :param runtime: The binary to run. :type runtime: str :param whitelist: A list of regular expressions describing acceptable library names :type whitelist: [str] """ |
found_objects = set()
try:
# Retrieve list of successfully opened objects
strace = subprocess.Popen(['strace', runtime], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(_, stderr) = strace.communicate()
opened_objects = set()
for line in stderr.split('\n'):
if 'open' in line and 'ENOENT' not in line:
start = line.index('"')
end = line.index('"', start + 1)
opened_objects.add(line[start + 1:end])
# filter opened objects through white list.
for obj in opened_objects:
for wl in whitelist:
m = re.match('.*' + wl + '[\..*]?', obj)
if m:
found_objects.add(obj)
if verbose:
print('Found whitelisted {} at path {}'.format(wl, obj))
continue
except Exception as e:
print e
return found_objects |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __get_container_path(self, host_path):
""" A simple helper function to determine the path of a host library inside the container :param host_path: The path of the library on the host :type host_path: str """ |
libname = os.path.split(host_path)[1]
return os.path.join(_container_lib_location, libname) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def inside_softimage():
"""Returns a boolean indicating if the code is executed inside softimage.""" |
try:
import maya
return False
except ImportError:
pass
try:
from win32com.client import Dispatch as disp
disp('XSI.Application')
return True
except:
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_item(self, path, name, icon=None, url=None, order=None, permission=None, active_regex=None):
""" Add new menu item to menu :param path: Path of menu :param name: Display name :param icon: CSS icon :param url: link to page :param order: Sort order :param permission: :return: """ |
if self.root_item is None:
self.root_item = MenuItem('ROOT', 'ROOT')
root_item = self.root_item
current_path = ''
for node in path.split('/')[:-1]:
if not node:
continue
current_path = '/' + '{}/{}'.format(current_path, node).strip('/')
new_root = root_item.child_by_code(node)
if not new_root: # Create menu item if not exists
new_root = MenuItem(current_path, name=str(node).capitalize())
root_item.add_child(new_root)
root_item = new_root
new_item = MenuItem(path, name, icon, url, order, permission, active_regex)
current_item = root_item.child_by_code(path.split('/')[-1])
if current_item:
current_item.merge(new_item)
else:
root_item.add_child(new_item) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def merge(self, item):
"""Merge Menu item data""" |
self.name = item.name
if item.icon:
self.icon = item.icon
if item.url:
self.url = item.url
if item.order:
self.order = item.order
if item.permission:
self.permission = item.permission |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_child(self, item):
"""Add child to menu item""" |
item.depth = self.depth + 1
self.childs.append(item)
self.childs = sorted(self.childs, key=lambda item: item.order if item.order else 999) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def child_by_code(self, code):
""" Get child MenuItem by its last path code :param code: :return: MenuItem or None """ |
for child in self.childs:
if child.path.split('/')[-1] == code:
return child
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_active(self, path):
"""Check if given path is active for current item""" |
if self.url == '/' and self.url == path:
return True
elif self.url == '/':
return False
if self.url and path.startswith(self.url):
return True
if self.active_regex and re.compile(self.active_regex).match(path):
return True
for child in self.childs:
if child.is_active(path):
return True
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.