text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def pts_scale(pts=[], f=1.0):
'''Return given points scaled by factor f from origin.
'''
assert isinstance(pts, list) and len(pts) > 0
l_pt_prev = None
for pt in pts:
assert isinstance(pt, tuple)
l_pt = len(pt)
assert l_pt > 1
for i in pt:
assert isinstance(i, float)
if l_pt_prev is not None:
assert l_pt == l_pt_prev
l_pt_prev = l_pt
assert isinstance(f, float)
return [pt_scale(pt, f) for pt in pts] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def angle_diff(start_a=[0.0], end_a=[0.0], direction=True):
'''Return difference in angle from start_a to end_a.
Direction follows the right-hand-rule so positive is counter-clockwise.
'''
assert isinstance(start_a, list)
assert isinstance(end_a, list)
l_angle = len(start_a)
assert l_angle > 0
assert l_angle == len(end_a)
for i in start_a:
assert isinstance(i, float)
assert abs(i) <= 2*pi
for i in end_a:
assert isinstance(i, float)
assert abs(i) <= 2*pi
assert isinstance(direction, bool)
# Convert True/False to 1/-1
inv = 2 * int(direction) - 1
diff = [end_a[i] - start_a[i] for i in range(l_angle)]
diff = [(2*pi + d) if d < 0.0 else d for d in diff]
return [d*inv for d in diff] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def gen_polygon_pts(n_pts=3, radius=[1.0]):
'''Generate points for a polygon with a number of radiuses.
This makes it easy to generate shapes with an arbitrary number of sides,
regularly angled around the origin.
A single radius will give a simple shape such as a square, hexagon, etc.
Multiple radiuses will give complex shapes like stars, gear wheels, ratchet
wheels, etc.
'''
assert isinstance(n_pts, int) and n_pts > 0
assert isinstance(radius, list)
l_rad = len(radius)
assert l_rad > 0
for i in radius:
assert isinstance(i, float)
return [pt_rotate((radius[i % l_rad], 0.0), [i*2*pi/n_pts]) \
for i in range(n_pts)] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _add_params_docstring(params):
""" Add params to doc string """ |
p_string = "\nAccepts the following paramters: \n"
for param in params:
p_string += "name: %s, required: %s, description: %s \n" % (param['name'], param['required'], param['description'])
return p_string |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _create_api_method(cls, name, api_method):
""" Create dynamic class methods based on the Cloudmonkey precached_verbs """ |
def _api_method(self, **kwargs):
# lookup the command
command = api_method['name']
if kwargs:
return self._make_request(command, kwargs)
else:
kwargs = {}
return self._make_request(command, kwargs)
_api_method.__doc__ = api_method['description']
_api_method.__doc__ += _add_params_docstring(api_method['params'])
_api_method.__name__ = str(name)
setattr(cls, _api_method.__name__, _api_method) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate(backup):
"""Use this endpoint to start a backup validation. You must specify the backup type in the endpoint. Specify JSON data for backup archive info. { 'archive_path': '/path/to/archive' } Data must be valid, otherwise it will abort with a 400 code. First, it will try to search for an existing extension definition in the configuration file. If no matching extension is found, it will abort with a 404 code. It will then plan the backup validation by sending a message to the broker. """ |
data = request.json
if not data:
abort(400, 'No data received')
try:
archive_path = data['archive_path']
except KeyError:
abort(400, 'Missing key \'archive_path\' in data')
try:
config['extension'][backup]
except KeyError:
abort(404, 'No extension configuration found for: {}'.format(backup))
workdir = ''.join([config['valigator']['tmp_dir'], '/', generate_uuid()])
backup_data = {'archive_path': archive_path,
'workdir': workdir,
'image': config['extension'][backup]['image'],
'command': config['extension'][backup]['command']}
validate_backup.delay(config, backup_data) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main(conf):
"""Main function, entry point of the program.""" |
global config
config = load_configuration(conf)
app.conf.update(config['celery'])
run(host=config['valigator']['bind'], port=config['valigator']['port']) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def record_variant_id(record):
"""Get variant ID from pyvcf.model._Record""" |
if record.ID:
return record.ID
else:
return record.CHROM + ':' + str(record.POS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def wasp_snp_directory(vcf, directory, sample_name=None):
""" Convert VCF file into input for WASP. Only bi-allelic heterozygous sites are used. Parameters: vcf : str Path to VCF file. directory : str Output directory. This is the directory that will hold the files for WASP. sample_name : str If provided, use this sample name to get heterozygous SNPs from VCF file. """ |
chrom = []
pos = []
ref = []
alt = []
vcf_reader = pyvcf.Reader(open(vcf, 'r'))
if sample_name:
def condition(record, sample_name):
return sample_name in [x.sample for x in record.get_hets()]
else:
def condition(record, sample_name):
return len(record.get_hets()) > 0
for record in vcf_reader:
if condition(record, sample_name):
if len(record.ALT) == 1:
chrom.append(record.CHROM)
pos.append(record.POS)
ref.append(record.REF)
alt.append(record.ALT[0].sequence)
df = pd.DataFrame([chrom, pos, ref, alt],
index=['chrom', 'position', 'RefAllele', 'AltAllele']).T
if not os.path.exists(directory):
os.makedirs(directory)
for c in set(df.chrom):
tdf = df[df.chrom == c]
if tdf.shape[0] > 0:
f = gzip.open(os.path.join(directory, '{}.snps.txt.gz'.format(c)),
'wb')
lines = (tdf.position.astype(str) + '\t' + tdf.RefAllele + '\t' +
tdf.AltAllele)
f.write('\n'.join(lines) + '\n')
f.close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def vcf_as_df(fn):
""" Read VCF file into pandas DataFrame. Parameters: fn : str Path to VCF file. Returns ------- df : pandas.DataFrame The VCF file as a data frame. Note that all header information is thrown away. """ |
header_lines = 0
with open(fn, 'r') as f:
line = f.readline().strip()
header_lines += 1
while line[0] == '#':
line = f.readline().strip()
header_lines += 1
header_lines -= 2
df = pd.read_table(fn, skiprows=header_lines, header=0)
df.columns = ['CHROM'] + list(df.columns[1:])
return df |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_het_matrix(fn):
""" Make boolean matrix of samples by variants. One indicates that the sample is heterozygous for that variant. Parameters: vcf : str Path to VCF file. """ |
# TODO: parallelize?
vcf_df = vcf_as_df(fn)
variant_ids = vcf_df.apply(lambda x: df_variant_id(x), axis=1)
vcf_reader = pyvcf.Reader(open(fn, 'r'))
record = vcf_reader.next()
hets = pd.DataFrame(0, index=variant_ids,
columns=[x.sample for x in record.samples])
vcf_reader = pyvcf.Reader(open(fn, 'r'))
for record in vcf_reader:
h = record.get_hets()
i = record_variant_id(record)
hets.ix[i, [x.sample for x in h]] = 1
return hets |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def current(self):
"""Returns the current user """ |
if not has_request_context():
return self.no_req_ctx_user_stack.top
user_stack = getattr(_request_ctx_stack.top, 'user_stack', None)
if user_stack and user_stack.top:
return user_stack.top
return _get_user() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_user_token(self, user, salt=None):
"""Generates a unique token associated to the user """ |
return self.token_serializer.dumps(str(user.id), salt=salt) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_password(self, user, password, skip_validation=False):
"""Updates the password of a user """ |
pwcol = self.options["password_column"]
pwhash = self.bcrypt.generate_password_hash(password)
if not skip_validation:
self.validate_password(user, password, pwhash)
if self.options['prevent_password_reuse']:
user.previous_passwords = [getattr(user, pwcol)] + (user.previous_passwords or [])
if self.options['max_password_reuse_saved']:
user.previous_passwords = user.previous_passwords[:self.options['max_password_reuse_saved']]
setattr(user, pwcol, pwhash)
user.last_password_change_at = datetime.datetime.utcnow()
user.must_reset_password_at_login = False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def login_required(self, fresh=False, redirect_to=None):
"""Ensures that a user is authenticated """ |
if not self.logged_in() or (fresh and not self.login_manager.login_fresh()):
if redirect_to:
resp = redirect(redirect_to)
else:
resp = self.login_manager.unauthorized()
current_context.exit(resp, trigger_action_group="missing_user") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _login(self, user, provider=None, remember=False, force=False, **attrs):
"""Updates user attributes and login the user in flask-login """ |
user.last_login_at = datetime.datetime.now()
user.last_login_provider = provider or self.options["default_auth_provider_name"]
user.last_login_from = request.remote_addr
populate_obj(user, attrs)
save_model(user)
flask_login.login_user(user, remember=remember, force=force) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_password_confirm(self, form, trigger_action_group=None):
"""Checks that the password and the confirm password match in the provided form. Won't do anything if any of the password fields are not in the form. """ |
pwcol = self.options['password_column']
pwconfirmfield = pwcol + "_confirm"
if pwcol in form and pwconfirmfield in form and form[pwconfirmfield].data != form[pwcol].data:
if self.options["password_confirm_failed_message"]:
flash(self.options["password_confirm_failed_message"], "error")
current_context.exit(trigger_action_group=trigger_action_group) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reset_password(self, token=None, login_user=None):
"""Resets the password of the user identified by the token """ |
pwcol = self.options['password_column']
if not token:
if "token" in request.view_args:
token = request.view_args["token"]
elif "token" in request.values:
token = request.values["token"]
else:
raise OptionMissingError(("Missing 'token' option or 'token' view arg "
"or 'token' GET paramater in 'reset_password' action"))
user = self.find_by_token(token, salt="password-reset", max_age=self.options["reset_password_ttl"])
if user is None:
if self.options["reset_password_error_message"]:
flash(self.options["reset_password_error_message"], "error")
current_context.exit(trigger_action_group="reset_password_failed")
self.update_password_from_form(user)
self.reset_password_signal.send(self, user=user)
if (login_user is None and self.options["login_user_on_reset_password"]) or login_user:
flask_login.login_user(user)
return user |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_password_from_form(self, user=None, form=None):
"""Updates the user password using a form """ |
user = user or self.current
if not form and "form" in current_context.data and request.method == "POST":
form = current_context.data.form
elif not form:
raise OptionMissingError("Missing a form in 'update_user_password' action")
self._update_password_from_form(user, form)
save_model(user)
self.update_user_password_signal.send(self, user=user) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_user_password(self, user, password=None, form=None):
"""Checks if the password matches the one of the user. If no password is provided, the current form will be used """ |
pwcol = self.options['password_column']
if password is None:
if not form and "form" in current_context.data and request.method == "POST":
form = current_context.data.form
if form:
password = form[pwcol].data
else:
raise OptionMissingError("Missing 'password' option or a form")
current_pwd = getattr(user, pwcol)
if not current_pwd or not self.bcrypt.check_password_hash(current_pwd, password):
current_context.exit(trigger_action_group="password_mismatch") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_unique_attr(self, attrs, user=None, form=None, flash_msg=None):
"""Checks that an attribute of the current user is unique amongst all users. If no value is provided, the current form will be used. """ |
user = user or self.current
ucol = self.options["username_column"]
email = self.options["email_column"]
if not isinstance(attrs, (list, tuple, dict)):
attrs = [attrs]
for name in attrs:
if isinstance(attrs, dict):
value = attrs[name]
else:
form = form or current_context.data.get("form")
if not form:
raise OptionMissingError("Missing 'value' option or form in 'check_user_unique_attr' action")
value = form[name].data
if name == ucol and not self.options["username_case_sensitive"]:
filters = (ucol + '_lcase', value.strip().lower())
elif name == emailcol:
filters = (emailcol, value.strip().lower())
else:
filters = (name, value.strip())
if self.query.filter({"$and": [filters, ("id__ne", user.id)]}).count() > 0:
if flash_msg is None:
flash_msg = "The %s is already in use" % name
if flash_msg:
flash(flash_msg, "error")
current_context.exit(trigger_action_group="user_attr_not_unique") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def oauth_signup(self, provider, attrs, defaults, redirect_url=None):
"""Start the signup process after having logged in via oauth """ |
session["oauth_user_defaults"] = defaults
session["oauth_user_attrs"] = dict(provider=provider, **attrs)
if not redirect_url:
redirect_url = request.args.get("next")
return redirect(url_for('users.oauth_signup', next=redirect_url)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_valid_format_order(cls, format_target, format_order=None):
""" Checks to see if the target format string follows the proper style """ |
format_order = format_order or cls.parse_format_order(format_target)
cls.validate_no_token_duplicates(format_order)
format_target = cls.remove_tokens(format_target, format_order)
format_target = cls.remove_static_text(format_target)
cls.validate_separator_characters(format_target)
cls.validate_matched_parenthesis(format_target)
return format_order |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _handle_errors(self, errors_list):
""" Handles errors list Output Format: [(DOMIAN, LINE, COLUMN, LEVEL, TYPE_NAME, MESSAGE),] Ex.: [(PARSER, 3, 51, FATAL, ERR_TAG_NAME_MISMATCH, Opening and ending tag mismatch: statpage line 3 and startpage), (SCHEMASV, 2, 0, ERROR, SCHEMAV_CVC_ELT_1, Element 'wizard': No matching global declaration available for the validation root)] """ |
errors = []
for error in errors_list:
errors.append((error.domain_name, error.line, error.column,
error.level_name, error.type_name, error.message))
return errors |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_validation_errors(self, xml_input):
""" This method returns a list of validation errors. If there are no errors an empty list is returned """ |
errors = []
try:
parsed_xml = etree.parse(self._handle_xml(xml_input))
self.xmlschema.assertValid(parsed_xml)
except (etree.DocumentInvalid, etree.XMLSyntaxError), e:
errors = self._handle_errors(e.error_log)
except AttributeError:
raise CannotValidate('Set XSD to validate the XML')
return errors |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate(self, xml_input):
""" This method validate the parsing and schema, return a boolean """ |
parsed_xml = etree.parse(self._handle_xml(xml_input))
try:
return self.xmlschema.validate(parsed_xml)
except AttributeError:
raise CannotValidate('Set XSD to validate the XML') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def deserialize(self, xml_input, *args, **kwargs):
""" Convert XML to dict object """ |
return xmltodict.parse(xml_input, *args, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _import_all_modules():
"""dynamically imports all modules in the package""" |
import traceback
import os
global results
globals_, locals_ = globals(), locals()
def load_module(modulename, package_module):
try:
names = []
module = __import__(package_module, globals_, locals_, [modulename])
for name in module.__dict__:
if not name.startswith('_'):
globals_[name] = module.__dict__[name]
names.append(name)
except Exception:
traceback.print_exc()
raise
return module, names
def load_dir(abs_dirpath, rel_dirpath=''):
results = []
# dynamically import all the package modules
for filename in os.listdir(abs_dirpath):
rel_filepath = os.path.join(rel_dirpath, filename)
abs_filepath = os.path.join(abs_dirpath, filename)
if filename[0] != '_' and os.path.isfile(abs_filepath) and filename.split('.')[-1] in ('py', 'pyw'):
modulename = '.'.join(os.path.normpath(os.path.splitext(rel_filepath)[0]).split(os.sep))
package_module = '.'.join([__name__, modulename])
module, names = load_module(modulename, package_module)
results += names
elif os.path.isdir(abs_filepath):
results += load_dir(abs_filepath, rel_filepath)
return results
return load_dir(os.path.dirname(__file__)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def compress_folder_dump(path, target):
'''
Compress folder dump to tar.gz file
'''
import tarfile
if not path or not os.path.isdir(path):
raise SystemExit(_error_codes.get(105))
name_out_file = (target + 'dump-' +
datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S'))
tar = tarfile.open(name_out_file + '.tar.gz', 'w:gz')
tar.add(path, arcname='dump')
tar.close()
return (name_out_file, name_out_file + '.tar.gz') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def encrypt_file(path, output, password=None):
'''
Encrypt file with AES method and password.
'''
if not password:
password = PASSWORD_FILE
query = 'openssl aes-128-cbc -salt -in {0} -out {1} -k {2}'
with hide('output'):
local(query.format(path, output, password))
os.remove(path) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def decrypt_file(path, password=None):
'''
Decrypt file with AES method and password.
'''
global PASSWORD_FILE
if not password:
password = PASSWORD_FILE
if path and not os.path.isfile(path):
raise SystemExit(_error_codes.get(106))
query = 'openssl aes-128-cbc -d -salt -in {0} -out {1} -k {2}'
with hide('output'):
local(query.format(path, path + '.tar.gz', PASSWORD_FILE)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def optional_actions(encrypt, path, compress_file, **kwargs):
'''
Optional actions about of AWS S3 and encrypt file.
'''
yes = ('y', 'Y')
file_to_upload = normalize_path(path) + compress_file[1]
if encrypt in yes:
encrypt_file(compress_file[1], compress_file[0])
file_to_upload = compress_file[0]
if kwargs.get('s3') in yes:
factory_uploader('S3', name_backup=file_to_upload,
bucket_name=AWS_BUCKET_NAME, action='upload')
if kwargs.get('glacier') in yes:
factory_uploader('Glacier', name_backup=file_to_upload,
vault_name=AWS_VAULT_NAME,
path=os.path.join(os.path.expanduser('~'),
'.zoort.db'),
action='upload')
if kwargs.get('dropbox') in yes:
factory_uploader('Dropbox', name_backup=file_to_upload,
action='upload')
if kwargs.get('swift') in yes:
factory_uploader('Swift', name_backup=file_to_upload,
action='upload') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def main():
'''Main entry point for the mongo_backups CLI.'''
args = docopt(__doc__, version=__version__)
if args.get('backup'):
backup_database(args)
if args.get('backup_all'):
backup_all(args)
if args.get('decrypt'):
decrypt_file(args.get('<path>'))
if args.get('configure'):
configure(service='all')
if args.get('configure-aws'):
configure(service='aws')
if args.get('configure-dropbox'):
configure(service='dropbox')
if args.get('configure-swift'):
configure(service='swift')
if args.get('download_all'):
download_all() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def backup_database(args):
'''
Backup one database from CLI
'''
username = args.get('<user>')
password = args.get('<password>')
database = args['<database>']
host = args.get('<host>') or '127.0.0.1'
path = args.get('--path') or os.getcwd()
s3 = args.get('--upload_s3')
glacier = args.get('--upload_glacier')
dropbox = args.get('--upload_dropbox')
swift = args.get('--upload_swift')
encrypt = args.get('--encrypt') or 'Y'
if not database:
raise SystemExit(_error_codes.get(101))
if path and not os.path.isdir(path):
raise SystemExit(_error_codes.get(105))
query = 'mongodump -d {database} --host {host} '
if username:
query += '-u {username} '
if password:
query += '-p {password} '
if path:
query += '-o {path}/dump'
local(query.format(username=username,
password=password,
database=database,
host=host,
path=path))
compress_file = compress_folder_dump(
normalize_path(path) + 'dump', normalize_path(path))
shutil.rmtree(normalize_path(path) + 'dump')
optional_actions(encrypt, path, compress_file,
s3=s3, glacier=glacier, dropbox=dropbox,
swift=swift) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def backup_all(args):
'''
Backup all databases with access user.
'''
username = None
password = None
auth = args.get('--auth')
path = args.get('--path')
s3 = args.get('--upload_s3')
glacier = args.get('--upload_glacier')
dropbox = args.get('--upload_dropbox')
swift = args.get('--upload_swift')
encrypt = args.get('--encrypt') or 'Y'
if (ADMIN_USER and ADMIN_PASSWORD):
username = ADMIN_USER
password = ADMIN_PASSWORD
if not path:
path = os.getcwd()
if path and not os.path.isdir(path):
raise SystemExit(_error_codes.get(105))
if auth:
query = 'mongodump -u {username} -p {password} '
else:
query = 'mongodump '
if path:
query += '-o {path}/dump'
local(query.format(username=username,
password=password,
path=path))
compress_file = compress_folder_dump(
normalize_path(path) + 'dump', normalize_path(path))
shutil.rmtree(normalize_path(path) + 'dump')
optional_actions(encrypt, path, compress_file, s3=s3, glacier=glacier,
dropbox=dropbox, swift=swift) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_history(self):
"""Returns the history from cache or DB or a newly created one.""" |
if hasattr(self, '_history'):
return self._history
try:
self._history = APICallDayHistory.objects.get(
user=self.user, creation_date=now().date())
except APICallDayHistory.DoesNotExist:
self._history = APICallDayHistory(user=self.user)
self._history.amount_api_calls = 0
return self._history |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_rate_limit_exceeded(self):
"""Returns ``True`` if the rate limit is exceeded, otherwise False.""" |
history = self.get_history()
if history.amount_api_calls >= settings.UNSHORTEN_DAILY_LIMIT:
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def log_api_call(self):
"""Increases the amount of logged API calls for the user by 1.""" |
history = self.get_history()
history.amount_api_calls += 1
self._history = history.save()
return history |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def de_duplicate(items):
"""Remove any duplicate item, preserving order [1, 2] """ |
result = []
for item in items:
if item not in result:
result.append(item)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read_configs(__pkg: str, __name: str = 'config', *, local: bool = True) -> ConfigParser: """Process configuration file stack. We export the time parsing functionality of ``jnrbase`` as custom converters for :class:`configparser.ConfigParser`: =================== =========================================== Method Function =================== =========================================== ``.getdatetime()`` :func:`~jnrbase.iso_8601.parse_datetime` ``.gethumantime()`` :func:`~jnrbase.human_time.parse_timedelta` ``.gettimedelta()`` :func:`~jnrbase.iso_8601.parse_delta` =================== =========================================== Args: __pkg: Package name to use as base for config files __name: File name to search for within config directories local: Whether to include config files from current directory Returns: Parsed configuration files """ |
configs = get_configs(__pkg, __name)
if local:
localrc = path.abspath('.{}rc'.format(__pkg))
if path.exists(localrc):
configs.append(localrc)
cfg = ConfigParser(converters={
'datetime': parse_datetime,
'humandelta': parse_timedelta,
'timedelta': parse_delta,
})
cfg.read(configs, 'utf-8')
cfg.configs = configs
if 'NO_COLOUR' in environ or 'NO_COLOR' in environ:
cfg.colour = False
elif __pkg in cfg:
if 'colour' in cfg[__pkg]:
cfg.colour = cfg[__pkg].getboolean('colour')
if 'color' in cfg[__pkg]:
cfg.colour = cfg[__pkg].getboolean('color')
else:
cfg.colour = True
return cfg |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _start(self):
""" Starts the underlying send and receive threads. """ |
# Initialize the locks
self._recv_lock = coros.Semaphore(0)
self._send_lock = coros.Semaphore(0)
# Boot the threads
self._recv_thread = gevent.spawn(self._recv)
self._send_thread = gevent.spawn(self._send)
# Link the threads such that we get notified if one or the
# other exits
self._recv_thread.link(self._thread_error)
self._send_thread.link(self._thread_error) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _recv(self):
""" Implementation of the receive thread. Waits for data to arrive on the socket, then passes the data through the defined receive framer and sends it on to the application. """ |
# Outer loop: receive some data
while True:
# Wait until we can go
self._recv_lock.release()
gevent.sleep() # Yield to another thread
self._recv_lock.acquire()
recv_buf = self._sock.recv(self.recv_bufsize)
# If it's empty, the peer closed the other end
if not recv_buf:
# Manually kill the send thread; do this manually
# instead of calling close() because close() will kill
# us, and since close() would be running in our thread
# context, it would never get around to killing the
# send thread
if self._send_thread:
self._send_thread.kill()
self._send_thread = None
# Manually close the socket
self._sock.close()
self._sock = None
# Make sure the manager knows we're closed
super(TCPTendril, self).close()
# Notify the application
self.closed()
# As our last step, commit seppuku; this will keep
# _thread_error() from notifying the application of an
# erroneous exit from the receive thread
raise gevent.GreenletExit()
# Process the received data
self._recv_frameify(recv_buf) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _thread_error(self, thread):
""" Handles the case that the send or receive thread exit or throw an exception. """ |
# Avoid double-killing the thread
if thread == self._send_thread:
self._send_thread = None
if thread == self._recv_thread:
self._recv_thread = None
# Figure out why the thread exited
if thread.successful():
exception = socket.error('thread exited prematurely')
elif isinstance(thread.exception, gevent.GreenletExit):
# Thread was killed; don't do anything but close
self.close()
return
else:
exception = thread.exception
# Close the connection...
self.close()
# Notify the application what happened
self.closed(exception) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def wrap(self, wrapper):
""" Allows the underlying socket to be wrapped, as by an SSL connection. :param wrapper: A callable taking, as its first argument, a socket.socket object. The callable must return a valid proxy for the socket.socket object, which will subsequently be used to communicate on the connection. Note: Be extremely careful with calling this method after the TCP connection has been initiated. The action of this method affects both sending and receiving streams simultaneously, and no attempt is made to deal with buffered data, other than ensuring that both the sending and receiving threads are at stopping points. """ |
if self._recv_thread and self._send_thread:
# Have to suspend the send/recv threads
self._recv_lock.acquire()
self._send_lock.acquire()
# Wrap the socket
self._sock = wrapper(self._sock)
# OK, restart the send/recv threads
if self._recv_thread and self._send_thread:
# Release our locks
self._send_lock.release()
self._recv_lock.release() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def close(self):
""" Close the connection. Kills the send and receive threads, as well as closing the underlying socket. """ |
if self._recv_thread:
self._recv_thread.kill()
self._recv_thread = None
if self._send_thread:
self._send_thread.kill()
self._send_thread = None
if self._sock:
self._sock.close()
self._sock = None
# Make sure to notify the manager we're closed
super(TCPTendril, self).close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self, target, acceptor, wrapper=None):
""" Initiate a connection from the tendril manager's endpoint. Once the connection is completed, a TCPTendril object will be created and passed to the given acceptor. :param target: The target of the connection attempt. :param acceptor: A callable which will initialize the state of the new TCPTendril object. :param wrapper: A callable taking, as its first argument, a socket.socket object. The callable must return a valid proxy for the socket.socket object, which will subsequently be used to communicate on the connection. For passing extra arguments to the acceptor or the wrapper, see the ``TendrilPartial`` class; for chaining together multiple wrappers, see the ``WrapperChain`` class. """ |
# Call some common sanity-checks
super(TCPTendrilManager, self).connect(target, acceptor, wrapper)
# Set up the socket
sock = socket.socket(self.addr_family, socket.SOCK_STREAM)
with utils.SocketCloser(sock, ignore=[application.RejectConnection]):
# Bind to our endpoint
sock.bind(self.endpoint)
# Connect to our target
sock.connect(target)
# Call any wrappers
if wrapper:
sock = wrapper(sock)
# Now, construct a Tendril
tend = TCPTendril(self, sock)
# Finally, set up the application
tend.application = acceptor(tend)
# OK, let's track the tendril
self._track_tendril(tend)
# Start the tendril
tend._start()
# Might as well return the tendril, too
return tend
# The acceptor raised a RejectConnection exception, apparently
sock.close()
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def listener(self, acceptor, wrapper):
""" Listens for new connections to the manager's endpoint. Once a new connection is received, a TCPTendril object is generated for it and it is passed to the acceptor, which must initialize the state of the connection. If no acceptor is given, no new connections can be initialized. :param acceptor: If given, specifies a callable that will be called with each newly received TCPTendril; that callable is responsible for initial acceptance of the connection and for setting up the initial state of the connection. If not given, no new connections will be accepted by the TCPTendrilManager. :param wrapper: A callable taking, as its first argument, a socket.socket object. The callable must return a valid proxy for the socket.socket object, which will subsequently be used to communicate on the connection. """ |
# If we have no acceptor, there's nothing for us to do here
if not acceptor:
# Not listening on anything
self.local_addr = None
# Just sleep in a loop
while True:
gevent.sleep(600)
return # Pragma: nocover
# OK, set up the socket
sock = socket.socket(self.addr_family, socket.SOCK_STREAM)
with utils.SocketCloser(sock):
# Set up SO_REUSEADDR
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind to our endpoint
sock.bind(self.endpoint)
# Get the assigned port number
self.local_addr = sock.getsockname()
# Call any wrappers
if wrapper:
sock = wrapper(sock)
# Initiate listening
sock.listen(self.backlog)
# OK, now go into an accept loop with an error threshold of 10
closer = utils.SocketCloser(sock, 10,
ignore=[application.RejectConnection])
while True:
with closer:
cli, addr = sock.accept()
# OK, the connection has been accepted; construct a
# Tendril for it
tend = TCPTendril(self, cli, addr)
# Set up the application
with utils.SocketCloser(cli):
tend.application = acceptor(tend)
# Make sure we track the new tendril, but only if
# the acceptor doesn't throw any exceptions
self._track_tendril(tend)
# Start the tendril
tend._start() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def getdim(lsp):
'''
Obtain the dimensionality of a .lsp file. This should work for all well
formatted .lsp files.
Parameters:
-----------
lsp : .lsp string
Returns a list of dimensions.
'''
dims= ['x','y', 'z'];
rxs = ['{}-cells *([0-9]+)'.format(x) for x in ['x','y','z']];
return [
x for x,rx in zip(dims,rxs)
if re.search(rx,lsp) and int(re.search(rx,lsp).group(1)) > 0 ]; |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def getpexts(lsp):
'''
Get information from pext planes. This might or might not work, use with
caution!
Parameters:
-----------
lsp : .lsp string
Returns a list of dicts with information for all pext planes
'''
lines=lsp.split('\n');
#unfortunately regex doesn't work here
lns,planens = zip(
*[ (i,int(re.search('^ *extract *([0-9]+)',line).group(1)))
for i,line in enumerate(lines)
if re.search('^ *extract *[0-9]+', line)]);
if len(lns) == 0: return [];
end = lns[-1];
for i,line in enumerate(lines[end+1:]):
if re.match(' *\[',line): break;
end += i;
lineranges = zip(lns,(lns+(end,))[1:]);
planes=dict()
for (i,end),plane in zip(lineranges,planens):
d=dict();
labels = [
'species',
'direction',
'position',];
datarx = [
'^ *species *([0-9]+)',
'^ *direction *([xXyYzZ])',
'^ *at *(.*)',];
convs = [
lambda s: int(s),
lambda i: i,
lambda s: np.array(
map(float,s.split(' '))),
];
for line in lines[i:end]:
for label,rx,conv in zip(labels,datarx,convs):
if re.match(rx,line):
d[label]=conv(re.match(rx,line).group(1));
pass
pass
planes[plane] = d;
return planes; |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def percentile(sorted_list, percent, key=lambda x: x):
"""Find the percentile of a sorted list of values. Arguments --------- sorted_list : list A sorted (ascending) list of values. percent : float A float value from 0.0 to 1.0. key : function, optional An optional function to compute a value from each element of N. Returns ------- float The desired percentile of the value list. Examples -------- 7.0 8.0 8.5 11.0 11.0 4.0 """ |
if not sorted_list:
return None
if percent == 1:
return float(sorted_list[-1])
if percent == 0:
return float(sorted_list[0])
n = len(sorted_list)
i = percent * n
if ceil(i) == i:
i = int(i)
return (sorted_list[i-1] + sorted_list[i]) / 2
return float(sorted_list[ceil(i)-1]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_driver_api_catalog(driver):
# noqa: E501 """Retrieve the api catalog Retrieve the api catalog # noqa: E501 :param driver: The driver to use for the request. ie. github :type driver: str :rtype: Response """ |
response = errorIfUnauthorized(role='developer')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add(driver.getApiEndpointCatalog())
return Response(status=200, body=response.getResponseBody()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_driver_api_status(driver):
# noqa: E501 """Retrieve the status of an api backing a driver Retrieve the status of an api backing a driver # noqa: E501 :param driver: The driver to use for the request. ie. github :type driver: str :rtype: Response """ |
response = errorIfUnauthorized(role='developer')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add({"format": driver.getApiFormat()})
response.body.add({"description": driver.getApiDescription()})
response.body.add({"status": driver.getApiStatus()})
response.body.add({"auth-type": driver.getApiAuthType()})
endpoints = {}
endpoints['base'] = driver.getApiBaseEndpoint()
endpoints['catalog'] = driver.getApiCatalogEndpoint()
endpoints['auth'] = driver.getApiAuthEndpoint()
response.body.add({'endpoints': endpoints})
options = {}
options['authenticatable'] = driver.isApiAuthenticated()
options['authentication-separate'] = driver.isApiAuthenticationSeparateRequest()
options['cataloggable'] = driver.isApiCataloggable()
options['tokenable'] = driver.isApiTokenable()
response.body.add({'options': options})
return Response(status=200, body=response.getResponseBody()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_engine(engine):
""" Parse the engine uri to determine where to store loggs """ |
engine = (engine or '').strip()
backend, path = URI_RE.match(engine).groups()
if backend not in SUPPORTED_BACKENDS:
raise NotImplementedError(
"Logg supports only {0} for now.".format(SUPPORTED_BACKENDS))
log.debug('Found engine: {0}'.format(engine))
return backend, path |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _init_repo(self):
""" create and initialize a new Git Repo """ |
log.debug("initializing new Git Repo: {0}".format(self._engine_path))
if os.path.exists(self._engine_path):
log.error("Path already exists! Aborting!")
raise RuntimeError
else:
# create the repo if it doesn't already exist
_logg_repo = git.Repo.init(path=self._engine_path, mkdir=True)
record = "idid Logg repo initialized on {0}".format(today())
c = _logg_repo.index.commit(record)
assert c.type == 'commit'
log.info('Created git repo [{0}]'.format(self._engine_path))
return _logg_repo |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _load_repo(self):
""" Load git repo using GitPython """ |
if self._logg_repo:
return self._logg_repo
try:
_logg_repo = git.Repo(self._engine_path)
log.debug('Loaded git repo [{0}]'.format(self._engine_path))
except Exception:
# FIXME: should this be automatic?
# log.error("Git repo doesn't exist! run ``idid init``")
_logg_repo = self._init_repo()
return _logg_repo |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def add_arguments(self, parser):
'''Add generic command-line arguments to a top-level argparse parser.
After running this, the results from ``argparse.parse_args()``
can be passed to :meth:`main`.
'''
commands = set(name[3:] for name in dir(self) if name.startswith('do_'))
parser.add_argument('action', help='action to run', nargs='?',
choices=list(commands))
parser.add_argument('arguments', help='arguments specific to ACTION',
nargs=argparse.REMAINDER) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def main(self, args):
'''Run a single command, or else the main shell loop.
`args` should be the :class:`argparse.Namespace` object after
being set up via :meth:`add_arguments`.
'''
if args.action:
self.runcmd(args.action, args.arguments)
else:
self.cmdloop() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def runcmd(self, cmd, args):
'''Run a single command from pre-parsed arguments.
This is intended to be run from :meth:`main` or somewhere else
"at the top level" of the program. It may raise
:exc:`exceptions.SystemExit` if an argument such as ``--help``
that normally causes execution to stop is encountered.
'''
dof = getattr(self, 'do_' + cmd, None)
if dof is None:
return self.default(' '.join([cmd] + args))
argf = getattr(self, 'args_' + cmd, None)
if argf is not None:
parser = argparse.ArgumentParser(
prog=cmd,
description=getattr(dof, '__doc__', None))
argf(parser)
argl = parser.parse_args(args)
else:
argl = ' '.join(args)
return dof(argl) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def do_help(self, args):
'''print help on a command'''
if args.command:
f = getattr(self, 'help_' + args.command, None)
if f:
f()
return
f = getattr(self, 'do_' + args.command, None)
if not f:
msg = self.nohelp % (args.command,)
self.stdout.write('{0}\n'.format(msg))
return
docstr = getattr(f, '__doc__', None)
f = getattr(self, 'args_' + args.command, None)
if f:
parser = argparse.ArgumentParser(
prog=args.command,
description=docstr)
f(parser)
parser.print_help(file=self.stdout)
else:
if not docstr:
docstr = self.nohelp % (args.command,)
self.stdout.write('{0}\n'.format(docstr))
else:
Cmd.do_help(self, '') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_kb_mappings_file(kbname, kbfile, separator):
"""Add KB values from file to given KB returning rows added.""" |
num_added = 0
with open(kbfile) as kb_fd:
for line in kb_fd:
if not line.strip():
continue
try:
key, value = line.split(separator)
except ValueError:
# bad split, pass
current_app.logger.error("Error splitting: {0}".format(line))
continue
add_kb_mapping(kbname, key, value)
num_added += 1
return num_added |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def by_id(cls, semantictag_id, autoflush=True):
'''Return the semantic tag with the given id, or None.
:param semantictag_id: the id of the semantic tag to return
:type semantictag_id: string
:returns: the semantic tag with the given id, or None if there is no tag with
that id
:rtype: ckan.model.semantictag.SemanticTag # TODO check this
'''
query = meta.Session.query(SemanticTag).filter(SemanticTag.id==semantictag_id)
query = query.autoflush(autoflush)
semantictag = query.first()
return semantictag |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def by_URI(cls, URI, label=None, autoflush=True):
'''Return the semantic ag with the given URI, or None.
:param URI: the URI of the semantic tag to return
:type URI: string (URI format)
:param label: URI's label (optional, default: None)
:type label: string
:returns: the semantic tag object with the given id or URI, or None if there is
no tag with that id or name
:rtype: ckan.model.semantictag.SemanticTag #TODO check this
'''
if label:
query = meta.Session.query(SemanticTag).filter(SemanticTag.label==label)
else:
query = meta.Session.query(SemanticTag).filter(SemanticTag.URI==URI)
query = query.autoflush(autoflush)
semantictag = query.first()
return semantictag |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def get(cls, tag_id_or_URI, label=None):
'''Return the tag with the given id or URI, or None.
:param tag_id_or_name: the id or name of the tag to return
:type tag_id_or_name: string
:returns: the tag object with the given id or name, or None if there is
no tag with that id or name
:rtype: ckan.model.tag.Tag
'''
# First try to get the tag by ID.
semantictag = SemanticTag.by_id(tag_id_or_URI)
if semantictag:
return semantictag
else:
semantictag = SemanticTag.by_URI(tag_id_or_URI)
return semantictag |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def search_by_URI(cls, search_term):
'''Return all tags whose URI or label contain a given string.
:param search_term: the string to search for in the URI or label names
:type search_term: string
:returns: a list of semantictags that match the search term
:rtype: list of ckan.model.semantictag.SemanticTag objects
'''
#TODO include label search
query = meta.Session.query(SemanticTag)
search_term = search_term.strip().lower()
query = query.filter(SemanticTag.URI.contains(search_term))
query = query.distinct().join(SemanticTag.tag_semantictags)
return query |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def all(cls):
'''Return all tags that are currently applied to any dataset.
:returns: a list of all tags that are currently applied to any dataset
:rtype: list of ckan.model.tag.Tag objects
'''
# if vocab_id_or_name:
# vocab = vocabulary.Vocabulary.get(vocab_id_or_name)
# if vocab is None:
# # The user specified an invalid vocab.
# raise ckan.logic.NotFound("could not find vocabulary '%s'"
# % vocab_id_or_name)
# query = meta.Session.query(Tag).filter(Tag.vocabulary_id==vocab.id)
# else:
query = meta.Session.query(SemanticTag)
query = query.distinct().join(TagSemanticTag)
# query = query.filter_by(state='active')
return query |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def tags(self):
'''Return a list of all tags that have this semantic tag, sorted by name.
:rtype: list of ckan.model.tag.Tag objects
'''
q = meta.Session.query(_tag.Tag)
q = q.join(TagSemanticTag)
q = q.filter_by(tag_id=self.id)
# q = q.filter_by(state='active')
q = q.order_by(_tag.Tag.name)
tags = q.all()
return tags |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def by_id(cls, predicate_id, autoflush=True):
'''Return the predicate with the given id, or None.
:param predicate_id: the id of the predicate to return
:type predicate_id: string
:returns: the predicate with the given id, or None if there is no predicate with
that id
:rtype: ckan.model.semantictag.Predicate
'''
query = meta.Session.query(Predicate).filter(Predicate.id==predicate_id)
query = query.autoflush(autoflush)
return query.first() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def list_unique(cls):
'''Return all unique namespaces
:returns: a list of all predicates
:rtype: list of ckan.model.semantictag.Predicate objects
'''
query = meta.Session.query(Predicate).distinct(Predicate.namespace)
return query.all() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def by_name(self, tag_name, semantictag_URI,
autoflush=True):
'''Return the TagSemanticTag for the given tag name and semantic tag URI, or None.
:param tag_name: the name of the tag to look for
:type tag_name: string
:param tag_URI: the name of the tag to look for
:type tag_URI: string
:returns: the TagSemanticTag for the given tag name and semantic tag URI, or None
if there is no TagSemanticTag for those semantic tag and tag names
:rtype: ckan.model.tag_semanictag.TagSemanticTag
'''
query = (meta.Session.query(TagSemanticTag)
.filter(_tag.Tag.name==tag_name)
.filter(SemanticTag.URI==semantictag_URI))
query = query.autoflush(autoflush)
return query.one()[0] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def temp_directory(*args, **kwargs):
""" Context manager returns a path created by mkdtemp and cleans it up afterwards. """ |
path = tempfile.mkdtemp(*args, **kwargs)
try:
yield path
finally:
shutil.rmtree(path) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse(self):
"""Parse command line arguments and options. Returns: Dictionary containing all given command line arguments and options. """ |
(options, args) = self.parser.parse_args()
self._set_attributes(args, options)
return self._create_dictionary() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def prt_detail(self):
"""Nicely print stats information. """ |
screen = [
"Detail info of %s: " % self.abspath,
"total size = %s" % string_SizeInBytes(self.size_total),
"number of sub folders = %s" % self.num_folder_total,
"number of total files = %s" % self.num_file_total,
"lvl 1 file size = %s" % string_SizeInBytes(self.size_current),
"lvl 1 folder number = %s" % self.num_folder_current,
"lvl 1 file number = %s" % self.num_file_current,
]
print("\n".join(screen)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add(self, abspath_or_winfile, enable_verbose=True):
"""Add absolute path or WinFile to FileCollection. """ |
if isinstance(abspath_or_winfile, str): # abspath
if abspath_or_winfile in self.files:
if enable_verbose:
print("'%s' already in this collections" %
abspath_or_winfile)
else:
self.files.setdefault(abspath_or_winfile, WinFile(abspath_or_winfile))
elif isinstance(abspath_or_winfile, WinFile): # WinFile
if abspath_or_winfile.abspath in self.files:
if enable_verbose:
print("'%s' already in this collections" %
abspath_or_winfile)
else:
self.files.setdefault(abspath_or_winfile.abspath, abspath_or_winfile)
else:
raise TypeError |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove(self, abspath_or_winfile, enable_verbose=True):
"""Remove absolute path or WinFile from FileCollection. """ |
if isinstance(abspath_or_winfile, str): # abspath
try:
del self.files[abspath_or_winfile]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" %
abspath_or_winfile)
elif isinstance(abspath_or_winfile, WinFile): # WinFile
try:
del self.files[abspath_or_winfile.abspath]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" %
abspath_or_winfile)
else:
raise TypeError |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def iterfiles(self):
"""Yield all WinFile object. """ |
try:
for path in self.order:
yield self.files[path]
except:
for winfile in self.files.values():
yield winfile |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def iterpaths(self):
"""Yield all WinFile's absolute path. """ |
try:
for path in self.order:
yield path
except:
for path in self.files:
yield path |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def show_patterned_file(dir_path, pattern=list(), filename_only=True):
"""Print all file that file name contains ``pattern``. """ |
pattern = [i.lower() for i in pattern]
if filename_only:
def filter(winfile):
for p in pattern:
if p in winfile.fname.lower():
return True
return False
else:
def filter(winfile):
for p in pattern:
if p in winfile.abspath.lower():
return True
return False
fc = FileCollection.from_path_by_criterion(
dir_path, filter, keepboth=False)
if filename_only:
fc.sort_by("fname")
else:
fc.sort_by("abspath")
table = {p: "<%s>" % p for p in pattern}
lines = list()
lines.append("Results:")
for winfile in fc.iterfiles():
lines.append(" %s" % winfile)
if filename_only:
lines.append("Above are all files that file name contains %s" % pattern)
else:
lines.append("Above are all files that abspath contains %s" % pattern)
text = "\n".join(lines)
print(text)
with open("__show_patterned_file__.log", "wb") as f:
f.write(text.encode("utf-8")) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
async def _get(self, url: str) -> str: """A small wrapper method which makes a quick GET request Parameters url : str The URL to get. Returns ------- str The raw html of the requested page. Raises ------ RuneConnectionError If the GET response status is not 200. """ |
async with self.session.get(url, headers=self.HEADERS) as r:
if r.status == 200:
return await r.text()
else:
raise RuneConnectionError(r.status) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update(self):
""" Update the state """ |
vm = self._cs_api.list_virtualmachines(id=self.id)[0]
self.is_running = self._is_running(vm.state) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unlock_keychain(username):
""" If the user is running via SSH, their Keychain must be unlocked first. """ |
if 'SSH_TTY' not in os.environ:
return
# Don't unlock if we've already seen this user.
if username in _unlocked:
return
_unlocked.add(username)
if sys.platform == 'darwin':
sys.stderr.write("You are running under SSH. Please unlock your local OS X KeyChain:\n")
subprocess.call(['security', 'unlock-keychain'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save_password(entry, password, username=None):
""" Saves the given password in the user's keychain. :param entry: The entry in the keychain. This is a caller specific key. :param password: The password to save in the keychain. :param username: The username to get the password for. Default is the current user. """ |
if username is None:
username = get_username()
has_keychain = initialize_keychain()
if has_keychain:
try:
keyring.set_password(entry, username, password)
except Exception as e:
log.warn("Unable to set password in keyring. Continuing..")
log.debug(e) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def remove_password(entry, username=None):
""" Removes the password for the specific user in the user's keychain. :param entry: The entry in the keychain. This is a caller specific key. :param username: The username whose password is to be removed. Default is the current user. """ |
if username is None:
username = get_username()
has_keychain = initialize_keychain()
if has_keychain:
try:
keyring.delete_password(entry, username)
except Exception as e:
print e
log.warn("Unable to delete password in keyring. Continuing..")
log.debug(e) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_password(entry=None, username=None, prompt=None, always_ask=False):
""" Prompt the user for a password on stdin. :param username: The username to get the password for. Default is the current user. :param entry: The entry in the keychain. This is a caller specific key. :param prompt: The entry in the keychain. This is a caller specific key. :param always_ask: Force the user to enter the password every time. """ |
password = None
if username is None:
username = get_username()
has_keychain = initialize_keychain()
# Unlock the user's keychain otherwise, if running under SSH, 'security(1)' will thrown an error.
unlock_keychain(username)
if prompt is None:
prompt = "Enter %s's password: " % username
if has_keychain and entry is not None and always_ask is False:
password = get_password_from_keyring(entry, username)
if password is None:
password = getpass.getpass(prompt=prompt)
return password |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate_password(entry, username, check_function, password=None, retries=1, save_on_success=True, prompt=None, **check_args):
""" Validate a password with a check function & retry if the password is incorrect. Useful for after a user has changed their password in LDAP, but their local keychain entry is then out of sync. :param str entry: The keychain entry to fetch a password from. :param str username: The username to authenticate :param func check_function: Check function to use. Should take (username, password, **check_args) :param str password: The password to validate. If `None`, the user will be prompted. :param int retries: Number of retries to prompt the user for. :param bool save_on_success: Save the password if the validation was successful. :param str prompt: Alternate prompt to use when asking for the user's password. :returns: `True` on successful authentication. `False` otherwise. :rtype: bool """ |
if password is None:
password = get_password(entry, username, prompt)
for _ in xrange(retries + 1):
if check_function(username, password, **check_args):
if save_on_success:
save_password(entry, password, username)
return True
log.error("Couldn't successfully authenticate your username & password..")
password = get_password(entry, username, prompt, always_ask=True)
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def format_exception(exc, indent=0, pad=' '):
""" Take an exception object and return a generator with vtml formatted exception traceback lines. """ |
from_msg = None
if exc.__cause__ is not None:
indent += yield from format_exception(exc.__cause__, indent)
from_msg = traceback._cause_message.strip()
elif exc.__context__ is not None and not exc.__suppress_context__:
indent += yield from format_exception(exc.__context__, indent)
from_msg = traceback._context_message.strip()
padding = pad * indent
if from_msg:
yield '\n%s%s\n' % (padding, from_msg)
yield '%s<b><u>Traceback (most recent call last)</u></b>' % padding
tblist = traceback.extract_tb(exc.__traceback__)
tbdepth = len(tblist)
for x in tblist:
depth = '%d.' % tbdepth
yield '%s<dim>%-3s</dim> <cyan>File</cyan> "<blue>%s</blue>", ' \
'line <u>%d</u>, in <b>%s</b>' % (padding, depth, x.filename,
x.lineno, x.name)
yield '%s %s' % (padding, x.line)
tbdepth -= 1
yield '%s<b><red>%s</red>: %s</b>' % (padding, type(exc).__name__, exc)
return indent + 1 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def print_exception(*args, file=None, **kwargs):
""" Print the formatted output of an exception object. """ |
for line in format_exception(*args, **kwargs):
vtml.vtmlprint(line, file=file) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create(cls, config_file=None):
""" Return the default configuration. """ |
if cls.instance is None:
cls.instance = cls(config_file)
# Load config file, possibly overwriting the defaults
cls.instance.load_ini()
if config_file and config_file != cls.instance.config_file:
raise RuntimeError("Configuration initialized a second time with a different file!")
return cls.instance |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_ini(self):
""" Load the given .INI file. """ |
if not self.config_file:
return
# Load INI file
ini_file = ConfigParser.SafeConfigParser()
if not ini_file.read(self.config_file):
raise ConfigParser.ParsingError("Global configuration file %r not found!" % (
self.config_file,
))
"""
# Make sure there's our global settings section
if not ini_file.has_section(self.SECTION):
raise ConfigParser.ParsingError("%r needs to have a [%s] section!" % (
self.config_file, self.SECTION,
))
# Get the given values
for key, val in ini_file.items(self.SECTION):
# Ensure that all names are known (to prevent uncaught typos)
if key not in self.KEYS:
raise ConfigParser.ParsingError("%r has an unknown key %s in the [%s] section!" % (
self.config_file, key, self.SECTION,
))
# Do some shell-like path expansion
val = os.path.expanduser(os.path.expandvars(val))
# Set as attribute for easy access
setattr(self, key, val)
"""
self._validate() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def multiple_chunks(self, chunk_size=None):
""" Returns ``True`` if you can expect multiple chunks. NB: If a particular file representation is in memory, subclasses should always return ``False`` -- there's no good reason to read from memory in chunks. """ |
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
return self.size > chunk_size |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def content(self, value):
""" Set content to byte string, encoding if necessary """ |
if isinstance(value, bytes):
self._content = value
else:
self._content = value.encode(ENCODING)
self.size = len(value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def md5hash(self):
"""Return the MD5 hash string of the file content""" |
digest = hashlib.md5(self.content).digest()
return b64_string(digest) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read(self, chunk_size=None):
""" Return chunk_size of bytes, starting from self.pos, from self.content. """ |
if chunk_size:
data = self.content[self.pos:self.pos + chunk_size]
self.pos += len(data)
return data
else:
return self.content |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def import_egg(string):
""" Import a controller class from an egg. Uses the entry point group "appathy.controller". """ |
# Split the string into a distribution and a name
dist, _sep, name = string.partition('#')
return pkg_resources.load_entry_point(dist, 'appathy.controller', name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def first(sequence, message=None):
"""The first item in that sequence If there aren't any, raise a ValueError with that message """ |
try:
return next(iter(sequence))
except StopIteration:
raise ValueError(message or ('Sequence is empty: %s' % sequence)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def last(sequence, message=None):
"""The last item in that sequence If there aren't any, raise a ValueError with that message """ |
try:
return sequence.pop()
except AttributeError:
return list(sequence).pop()
except IndexError:
raise ValueError(message or f'Sequence is empty: {sequence}') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def first_that(predicate, sequence, message=None):
"""The first item in that sequence that matches that predicate If none matches raise a KeyError with that message """ |
try:
return next(ifilter(predicate, sequence))
except StopIteration:
raise KeyError(message or 'Not Found') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def expand_window(center, window_size, array_size):
"""Generate a bounded windows. maxlength = 2 * window_size + 1, lower bound is 0 and upper bound is ``array_size - 1``. Example:: [47, 48, 49, 50, 51, 52, 53] [0, 1, 2, 3, 4, 5] [95, 96, 97, 98, 99] """ |
if center - window_size < 0:
lower = 0
else:
lower = center - window_size
if center + window_size + 1 > array_size:
upper = array_size
else:
upper = center + window_size + 1
return np.array(range(lower, upper)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def initial_populate(self, data):
""" Populate a newly created config object with data. If it was populated, this returns True. If it wasn't, this returns False. It is recommended to run a .dump() and .reload() after running this. """ |
if self.config.parsed:
return False
# Otherwise, create a new ConfigKey.
self.config.load_from_dict(data)
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def apply_defaults(self, other_config):
""" Applies default values from a different ConfigObject or ConfigKey object to this ConfigObject. If there are any values in this object that are also in the default object, it will use the values from this object. """ |
if isinstance(other_config, self.__class__):
self.config.load_from_dict(other_config.config, overwrite=False)
else:
self.config.load_from_dict(other_config, overwrite=False) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def reload(self):
""" Automatically reloads the config file. This is just an alias for self.load().""" |
if not self.fd.closed: self.fd.close()
self.fd = open(self.fd.name, 'r')
self.load() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.