text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stop(self): """Stops a a playing animation. A subsequent call to play will start from the beginning."""
if self.state == PygAnimation.PLAYING: self.index = 0 # set up for first image in list self.elapsed = 0 self.nIterationsLeft = 0 elif self.state == PygAnimation.STOPPED: pass # nothing to do elif self.state == PygAnimation.PAUSED: self.index = 0 # set up for first image in list self.elapsed = 0 self.state = PygAnimation.STOPPED
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pause(self): """Pauses a playing animation. A subsequent call to play will continue where it left off."""
if self.state == PygAnimation.PLAYING: self.elapsedAtPause = self.elapsed # only change state if it was playing self.state = PygAnimation.PAUSED elif self.state == PygAnimation.STOPPED: pass # nothing to do elif self.state == PygAnimation.PAUSED: pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update(self): """Updates the currently running animation. This method should be called in every frame where you want an animation to run. Its job is to figure out if it is time to move onto the next image in the animation. """
returnValue = False # typical return value if self.state != PygAnimation.PLAYING: return returnValue # The job here is to figure out the index of the image to show # and the matching elapsed time threshold for the current image self.elapsed = (time.time() - self.playingStartTime) if self.elapsed > self.elapsedStopTime: # anim finished if self.loop: # restart the animation self.playingStartTime = time.time() self.nextElapsedThreshold = self.endTimesList[0] else: # not looping self.nIterationsLeft = self.nIterationsLeft - 1 if self.nIterationsLeft == 0: # done self.state = PygAnimation.STOPPED if self.callBack != None: # if there is a callBack self.callBack(self.nickname) # do it returnValue = True # animation has ended else: # another iteration - start over again self.playingStartTime = time.time() self.nextElapsedThreshold = self.endTimesList[0] self.index = 0 elif self.elapsed > self.nextElapsedThreshold: # Time to move on to next picture self.index = self.index + 1 self.nextElapsedThreshold = self.endTimesList[self.index] return returnValue
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw(self): """Draws the current frame of the animation Should be called in every frame. """
# Assumes that self.index has been set earlier (typically in update method) # it is used as the index of the current image/endTime/loc theImage = self.imagesList[self.index] # choose the image to show if theImage is None: # if there is no image to show return if self.visible: theOffset = self.offsetsList[self.index] theLoc = ((self.loc[0] + theOffset[0]), (self.loc[1] + theOffset[1])) self.window.blit(theImage, theLoc)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cache(cls, key_attrs, expires=None): """Decorates a method to provide cached-memoization using a combination of the positional arguments, keyword argments, and whitelisted instance attributes. """
def decorator(func): @functools.wraps(func) def wrapper(self, *args, **kwargs): module = type(self).__module__ classname = type(self).__name__ method_name = func.__name__ data = {} if isinstance(key_attrs, basestring): _key_attrs = key_attrs.split() else: _key_attrs = key_attrs for key_attr in _key_attrs: key_value = getattr(self, key_attr) if isinstance(key_value, dict): key_value = ('dict', sorted(key_value.items())) elif isinstance(key_value, set): key_value = ('set', sorted(key_value)) else: key_value = (type(key_value).__name__, key_value) data[key_attr] = key_value data = sorted(data.items()) result_cache, new = cls.get_or_create( module=module, classname=classname, method_name=method_name, data=data, args=args, kwargs=sorted(kwargs.items()), ) if new: result_cache.result = func(self, *args, **kwargs) result_cache.save(expires) return result_cache.result return wrapper return decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def assertCallerError(self, expected_arg0, callable_obj, *args, **kwargs): """ Assert that a callable raises an AssertionError with a particular argument. :param expected_arg0: The expected value for the AssertionError instance's first argument (i.e., instance.args[0]). """
try: callable_obj(*args, **kwargs) self.fail('Expected AssertionError, but no exception raised') except AssertionError, exc: self.assertEqual(exc.args[0], expected_arg0) except Exception, exc: self.fail('Expected AssertionError, but got %s' % repr(exc))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_region_nt_counts(region, bam, stranded=False): """ Get counts of each nucleotide from a bam file for a given region. If R1 and R2 reads both overlap a position, only one count will be added. If the R1 and R2 reads disagree at a position they both overlap, that read pair is not used for that position. Can optionally output strand-specific counts. Parameters region : str or list Region of type chrom:start-end, chrom:start-end:strand, or [chrom, start, end]. The strand is ignored for chrom:start-end:strand. For chrom:start-end, the coordinates are one-based inclusive. For example, the query chr1:10-11 will give you the counts for the 10th and 11th bases of chr1. For [chrom, start, end], the coordinates are zero-based and end exclusive (like a bed file). The query [chr1, 9, 11] will give you the coverage of the 10th and 11th bases of chr1. The region value is passed directly to pysam's pileup function. bam : pysam.calignmentfile.AlignmentFile or str Bam file opened with pysam or path to bam file (must be sorted and indexed). stranded : boolean Boolean indicating whether read data is stranded and stranded nucleotide counts should be returned. Assumes R1 read on reverse strand implies + strand coverage etc. Returns ------- counts : pandas.DataFrame Data frame with the counts for each base in the region. The index of this data frame is one-based for compatibility with VCF files. """
# TODO: I should figure out what the different possible values are that # pysam could give me back (so far I only have ATCGN). Can I get deletions # and insertions? # TODO: This could probably be parallelized. if type(bam) == str: bam = pysam.AlignmentFile(bam, 'rb') if type(region) is str: r = parse_region(region) if len(r) == 3: chrom, start, end = r elif len(r) == 4: chrom, start, end, strand = r start = int(start) end = int(end) ind = ['{}:{}'.format(chrom, x) for x in range(start, end + 1)] pp = bam.pileup(region=region, truncate=True) elif type(region) is (list or tuple): chrom, start, end = region ind = ['{}:{}'.format(chrom, x) for x in range(int(start) + 1, int(end) + 1)] pp = bam.pileup(chrom, start, end, truncate=True) cols = ['A', 'T', 'C', 'G', 'N'] if stranded: cols = ['{}+'.format(x) for x in cols] + ['{}-'.format(x) for x in cols] counts = pd.DataFrame(0, index=ind, columns=cols) for pc in pp: # Most of this code deals with R1 and R2 reads that overlap so that we # don't get two counts from one fragment. pos = pc.reference_pos + 1 r1_qnames = [] r1_nts = [] r2_qnames = [] r2_nts = [] for pr in pc.pileups: qnames = [r1_qnames, r2_qnames][pr.alignment.is_read2] nts = [r1_nts, r2_nts][pr.alignment.is_read2] nt = _pos_nt(pr, pc.reference_pos, stranded) if nt: qnames.append(pr.alignment.qname) nts.append(nt) r1 = pd.Series(r1_nts, index=r1_qnames) r2 = pd.Series(r2_nts, index=r2_qnames) df = pd.DataFrame([r1, r2], index=['R1', 'R2']).T singles = df[df.isnull().sum(axis=1) == 1] doubles = df.dropna() vcs = [] vcs.append(singles['R1'].value_counts()) vcs.append(singles['R2'].value_counts()) doubles = doubles[doubles.R1 == doubles.R2] vcs.append(doubles.R1.value_counts()) for vc in vcs: counts.ix['{}:{}'.format(chrom, pos), vc.index] += vc return counts
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _pos_nt(pr, pos, stranded=False): """ Given a pileup read and a position, return the base that is covered by the read at the given position if the position is covered. Parameters pr : pysam.calignmentfile.PileupRead Region of type chrom:start-end, chrom:start-end:strand, or [chrom, pos : int Zero-based position of the nucleotide of interest in genomic coordinates. stranded : boolean Boolean indicating whether data is stranded and stranded nucleotide should be returned. Assumes R1 read on reverse strand implies + strand coverage etc. Returns ------- nt : str or None If None, then the read did not cover the position. If not None, returns the nucleotide at that position (with + or - appended to indicate strand if desired). """
nt = None bases = dict(zip(pr.alignment.get_reference_positions(), list(pr.alignment.seq.upper()))) if pos in bases.keys(): nt = bases[pos] if nt and stranded: strand = None if pr.alignment.is_read1 and pr.alignment.is_reverse: strand = '+' if pr.alignment.is_read2 and not pr.alignment.is_reverse: strand = '+' if pr.alignment.is_read1 and not pr.alignment.is_reverse: nt = str(Seq(nt).reverse_complement()) strand = '-' if pr.alignment.is_read2 and pr.alignment.is_reverse: nt = str(Seq(nt).reverse_complement()) strand = '-' nt = '{}{}'.format(nt, strand) return nt
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def nt_counts(bam, positions, stranded=False, vcf=False, bed=False): """ Find the number of nucleotides covered at all positions in a bed or vcf file. Parameters bam : str or pysam.calignmentfile.AlignmentFile Bam file opened with pysam or path to bam file (must be sorted and indexed). positions : str or pybedtools.BedTool Path to bed or vcf file or pybedtools.BedTool object. The extension is used to determine whether the file is a bed or vcf (.bed vs .vcf). stranded : boolean Boolean indicating whether read data is stranded and stranded nucleotide counts should be returned. Assumes R1 read on reverse strand implies + strand coverage etc. vcf : boolean Set to True if you are providing a vcf file that doesn't have a .vcf suffix. bed : boolean Set to True if you are providing a bed file that doesn't have a .bed suffix. Returns ------- counts : pandas.DataFrame Data frame with the counts for each base in the region. The index of this data frame is one-based for compatibility with VCF files. """
if not bed and not vcf: if type(positions) == pbt.bedtool.BedTool: df = positions.to_dataframe() elif positions[-4:] == '.bed': bed = True elif positions[-4:] == '.vcf': vcf = True else: sys.stderr.write('Positions must be BedTool, bed file, or vcf ' 'file.\n') if bed: df = pbt.BedTool(positions).to_dataframe() elif vcf: from variants import vcf_as_df tdf = vcf_as_df(positions) df = pd.DataFrame(index=tdf.index) df['chrom'] = tdf.CHROM df['start'] = tdf.POS - 1 df['end'] = tdf.POS res = [] for i in df.index: region = [df.ix[i, 'chrom'], df.ix[i, 'start'], df.ix[i, 'end']] res.append(get_region_nt_counts(region, bam, stranded)) res = pd.concat(res) return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save(self, name, file): """ Saves new content to the file specified by name. The content should be a proper File object or any python file-like object, ready to be read from the beginning. """
# Get the proper name for the file, as it will actually be saved. if name is None: name = file.name if not hasattr(file, 'chunks'): file = File(file, name=name) name = self.get_available_name(name) name = self._save(name, file) # Store filenames with forward slashes, even on Windows return name.replace('\\', '/')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def request_signature(self, stringtosign): """ Construct a signature by making an RFC2104 HMAC-SHA1 of the following and converting it to Base64 UTF-8 encoded string. """
digest = hmac.new( self.secret_key.encode(ENCODING), stringtosign.encode(ENCODING), hashlib.sha1 ).digest() return b64_string(digest)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_content_type(self, file): """ Return content type of file. If file does not have a content type, make a guess. """
if file.mimetype: return file.mimetype # get file extension _, extension = os.path.splitext(file.name) extension = extension.strip('.') # Make an educated guess about what the Content-Type should be. return media_types[extension] if extension in media_types else 'binary/octet-stream'
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _put_file(self, file): """Send PUT request to S3 with file contents"""
post_params = { 'file_size': file.size, 'file_hash': file.md5hash(), 'content_type': self._get_content_type(file), } headers = self._request_headers('PUT', file.prefixed_name, post_params=post_params) with closing(HTTPConnection(self.netloc)) as conn: conn.request('PUT', file.prefixed_name, file.read(), headers=headers) response = conn.getresponse() if response.status not in (200,): raise S3IOError( 'py3s3 PUT error. ' 'Response status: {}. ' 'Reason: {}. ' 'Response Text: \n' '{}'.format(response.status, response.reason, response.read()))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_file(self, prefixed_name): """ Return a signature for use in GET requests """
headers = self._request_headers('GET', prefixed_name) file = S3ContentFile('') with closing(HTTPConnection(self.netloc)) as conn: conn.request('GET', prefixed_name, headers=headers) response = conn.getresponse() if not response.status in (200,): if response.length is None: # length == None seems to be returned from GET requests # to non-existing files raise S3FileDoesNotExistError(prefixed_name) # catch all other cases raise S3IOError( 'py3s3 GET error. ' 'Response status: {}. ' 'Reason: {}. ' 'Response Text: \n' '{}'.format(response.status, response.reason, response.read())) file = S3ContentFile(response.read()) return file
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def url(self, name): """Return URL of resource"""
scheme = 'http' path = self._prepend_name_prefix(name) query = '' fragment = '' url_tuple = (scheme, self.netloc, path, query, fragment) return urllib.parse.urlunsplit(url_tuple)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_all_tasks(conf): """Returns a list with every task registred on Hamster. """
db = HamsterDB(conf) fact_list = db.all_facts_id security_days = int(conf.get_option('tasks.security_days')) today = datetime.today() tasks = {} for fact_id in fact_list: ht = HamsterTask(fact_id, conf, db) if ht.end_time: end_time = ht.get_object_dates()[1] if today - timedelta(security_days) <= end_time: rt = ht.get_remote_task() tasks[rt.task_id] = rt db.close_connection() print 'Obtained %d tasks' % len(tasks) return tasks
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def SetEncoding(sval): """Sets the encoding variable according to the text passed :param sval: text specification for the desired model """
global encoding s=sval.lower() if s == "additive": encoding = Encoding.Additive elif s == "dominant": encoding = Encoding.Dominant elif s == "recessive": encoding = Encoding.Recessive elif s == "genotype": encoding = Encoding.Genotype elif s == "raw": encoding = Encoding.Raw else: raise InvalidSelection("Invalid encoding, %s, selected" % (sval))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_family_details(self, pheno_covar): """Load family data updating the pheno_covar with family ids found. :param pheno_covar: Phenotype/covariate object :return: None """
file = open(self.fam_details) header = file.readline() format = file.readline() self.file_index = 0 mask_components = [] # 1s indicate an individual is to be masked out for line in file: words = line.strip().split() indid = ":".join(words[0:2]) if DataParser.valid_indid(indid): mask_components.append(0) sex = int(words[5]) pheno = float(words[6]) pheno_covar.add_subject(indid, sex, pheno) else: mask_components.append(1) mask_components = numpy.array(mask_components) self.ind_mask = numpy.zeros(len(mask_components) * 2, dtype=numpy.int8).reshape(-1, 2) self.ind_mask[0:, 0] = mask_components self.ind_mask[0:, 1] = mask_components self.ind_count = self.ind_mask.shape[0] pheno_covar.freeze_subjects()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load_genotypes(self): """Prepares the files for genotype parsing. :return: None """
if self.file_index < len(self.archives): self.current_file = self.archives[self.file_index] info_filename = self.current_file.replace(Parser.gen_ext, Parser.info_ext) if len(self.info_files) > 0: info_filename = self.info_files[self.file_index] self.info_file = open(info_filename) self.info_file.readline() # Dump the header if DataParser.compressed_pedigree: self.freq_file = gzip.open("%s" % (self.current_file), 'rb') else: self.freq_file = open(self.current_file) self.current_chrom = self.chroms[self.file_index] self.file_index += 1 else: raise StopIteration
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_next_line(self): """If we reach the end of the file, we simply open the next, until we \ run out of archives to process"""
line = self.freq_file.readline().strip().split() if len(line) < 1: self.load_genotypes() line = self.freq_file.readline().strip().split() info_line = self.info_file.readline().strip().split() info = float(info_line[4]) exp_freq = float(info_line[3]) return line, info, exp_freq
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_docstring(entity): """ Return sanitized docstring from an entity. The first line of the docstring is the title, and remaining lines are the details, aka git style. """
doc = inspect.getdoc(entity) if not doc: return None, None doc = doc.splitlines(keepends=True) if not doc[0].strip(): doc.pop(0) title = (doc and doc.pop(0).strip()) or None if doc and not doc[0].strip(): doc.pop(0) desc = ''.join(doc).rstrip() or None return title, desc
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_args(self, argv=None): """ Return an argparse.Namespace of the argv string or sys.argv if argv is None. """
arg_input = shlex.split(argv) if argv is not None else None self.get_or_create_session() return self.argparser.parse_args(arg_input)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_pager_spec(self): """ Find the best pager settings for this command. If the user has specified overrides in the INI config file we prefer those. """
self_config = self.get_config() pagercmd = self_config.get('pager') istty = self_config.getboolean('pager_istty') core_config = self.get_config('core') if pagercmd is None: pagercmd = core_config.get('pager') if istty is None: istty = core_config.get('pager_istty') return { "pagercmd": pagercmd, "istty": istty }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_wrap(self, args): """ Wrap some standard protocol around a command's run method. This wrapper should generally never capture exceptions. It can look at them and do things but prerun and postrun should always be symmetric. Any exception suppression should happen in the `session.execute`. """
self.fire_event('prerun', args) self.prerun(args) try: if self.session.allow_pager and self.use_pager: desc = 'Command\: %s' % '-'.join(self.prog.split()) with paging.pager_redirect(desc, **self.get_pager_spec()): result = self.run(args) else: result = self.run(args) except (SystemExit, Exception) as e: self.postrun(args, exc=e) self.fire_event('postrun', args, exc=e) raise e else: self.postrun(args, result=result) self.fire_event('postrun', args, result=result) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_config(self, section=None): """ Return the merged end-user configuration for this command or a specific section if set in `section`. """
config = self.session.config section = self.config_section() if section is None else section try: return config[section] except KeyError: config.add_section(section) return config[section]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parent(self, parent): """ Copy context from the parent into this instance as well as adjusting or depth value to indicate where we exist in a command tree. """
self._parent = parent if parent: pctx = dict((x, getattr(parent, x)) for x in parent.context_keys) self.inject_context(pctx) self.depth = parent.depth + 1 for command in self.subcommands.values(): command.parent = self # bump. else: self.depth = 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_root(self): """ Traverse parent refs to top. """
cmd = self while cmd.parent: cmd = cmd.parent return cmd
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def inject_context(self, __context_dict__=None, **context): """ Map context dict to this instance as attributes and keep note of the keys being set so we can pass this along to any subcommands. """
context = context or __context_dict__ self.context_keys |= set(context.keys()) for key, value in context.items(): setattr(self, key, value) for command in self.subcommands.values(): command.inject_context(context)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_argument(self, *args, parser=None, autoenv=False, env=None, complete=None, **kwargs): """ Allow cleaner action supplementation. Autoenv will generate an environment variable to be usable as a defaults setter based on the command name and the dest property of the action. """
if parser is None: parser = self.argparser action = parser.add_argument(*args, **kwargs) if autoenv: if env is not None: raise TypeError('Arguments `env` and `autoenv` are mutually ' 'exclusive') env = self._make_autoenv(action) if env: self.argparser.bind_env(action, env) if autoenv: self._autoenv_actions.add(action) if complete: action.complete = complete return action
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _make_autoenv(self, action): """ Generate a suitable env variable for this action. This is dependant on our subcommand hierarchy. Review the prog setter for details. """
env = ('%s_%s' % (self.prog, action.dest)).upper() env = re.sub(self.env_scrub_re, '', env.strip()) env = re.sub(self.env_flatten_re, '_', env) if re.match('^[0-9]', env): # Handle leading numbers. env = '_%s' % env return env
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_file_argument(self, *args, mode='r', buffering=1, filetype_options=None, **kwargs): """ Add a tab-completion safe FileType argument. This argument differs from a normal argparse.FileType based argument in that the value is a factory function that returns a file handle instead of providing an already open file handle. There are various reasons why this is a better approach but it is also required to avoid erroneous creation of files with shellish tab completion. """
type_ = supplement.SafeFileType(mode=mode, bufsize=buffering, **filetype_options or {}) return self.add_argument(*args, type=type_, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_argparser(self): """ Factory for arg parser. Can be overridden as long as it returns an ArgParser compatible instance. """
if self.desc: if self.title: fulldesc = '%s\n\n%s' % (self.title, self.desc) else: fulldesc = self.desc else: fulldesc = self.title return self.ArgumentParser(command=self, prog=self.name, description=fulldesc)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def attach_session(self): """ Create a session and inject it as context for this command and any subcommands. """
assert self.session is None root = self.find_root() session = self.Session(root) root.inject_context(session=session) return session
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _complete(self, text, line, begin, end): """ Do naive argument parsing so the completer has better ability to understand expansion rules. """
line = line[:end] # Ignore characters following the cursor. fullargs = self.split_line(line)[1:] args = fullargs[:] options = self.deep_scan_parser(self.argparser) # Walk into options tree if subcommands are detected. last_subcommand = None while True: for key, completers in options.items(): if key in args and hasattr(completers[0], 'items'): args.remove(key) last_subcommand = key options = completers[0] break else: break if text == last_subcommand: # We have to specially catch the case where the last argument is # the key used to find our subparser. More specifically when the # cursor is not preceded by a space too, as this prevents the # completion routines from continuing. The simplest way without # complicating the algo for coming up with our options list is to # simply shortcut the completer by returning a single item. # Subsequent tabs will work normally. return {text} # Look for incomplete actions. choices = set(x for x in options if x is not None and x.startswith(text)) arg_buf = [] pos_args = [] trailing_action = None # The slice below skips the last arg if it is 'active'. for x in reversed(args[:-1 if text else None]): if x in options: action = options[x][0] action.consume(arg_buf) pos_args.extend(arg_buf) del arg_buf[:] if action.full: choices -= {action.key} if not trailing_action: trailing_action = action if not action.full: if action.reached_min: choices |= action(self, text, fullargs) choices -= {action.key} else: choices = action(self, text, fullargs) break else: arg_buf.insert(0, x) pos_args.extend(arg_buf) # Feed any remaining arguments in the buffer to positionals so long as # there isn't a trailing action that can still consume. if None in options and (not trailing_action or trailing_action.full): for x_action in options[None]: x_action.consume(pos_args) if not x_action.reached_min: choices = x_action(self, text, fullargs) break elif not x_action.full: choices |= x_action(self, text, fullargs) return choices
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def split_line(self, line): """ Try to do pure shlex.split unless it can't parse the line. In that case we trim the input line until shlex can split the args and tack the unparsable portion on as the last argument. """
remainder = [] while True: try: args = shlex.split(line) except ValueError: remainder.append(line[-1]) line = line[:-1] else: if remainder: args.append(''.join(reversed(remainder))) return args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_commands_from(self, args): """ We have to code the key names for each depth. This method scans for each level and returns a list of the command arguments. """
commands = [] for i in itertools.count(0): try: commands.append(getattr(args, self.arg_label_fmt % i)) except AttributeError: break return commands
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _take_bits(buf, count): """Return the booleans that were packed into bytes."""
# TODO: Verify output bytes_count = (count + 7) // 8 bytes_mod = count % 8 data = _unpack_from(buf, 'B', bytes_count) values = [] for i, byte in enumerate(data): for _ in range(8 if i != bytes_count - 1 else bytes_mod): # TODO: Convert to True / False values.append(byte & 0b10000000) byte <<= 1 return values
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _fix_keys(key_mapping, inibin, string_table=None): """ Return a human-readable dictionary from the inibin. Arguments: key_mapping -- Dictionary used for conversion. Supports nesting. Every other value should be a numeric inibin key, or a tuple of the key and a function to apply to the result. inibin -- The dictionary returned from reading an inibin. string_table -- Used to translate strings. Any string with a key in string_table will be replaced. Typically loaded from a fontconfig_*.txt. """
if string_table is None: string_table = {} def walk(node, out_node): # Walk the nodes of the key mapping for key, value in node.items(): if isinstance(value, dict): if key not in out_node: out_node[key] = {} walk(value, out_node[key]) else: # Can either be just the index, or the index plus a function to apply func = None if isinstance(value, tuple): func = value[-1] index = value[0] else: index = value if index is None or index not in inibin: out_node[key] = None continue val = inibin[index] # Try numeric conversion # Inibins often store numbers in strings if isinstance(val, bytes): try: val = int(val) except ValueError: try: val = float(val) except ValueError: val = val.decode('utf8') # Check if value is a reference to a fontconfig key if val in string_table: val = string_table[val] # Apply the function if callable(func): val = func(val) out_node[key] = val out = {} walk(key_mapping, out) return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _unpack_from(buf, format_s, count=None, little_endian=True): """Read a binary format from the buffer."""
if count is not None: assert count > 0 format_s = '%i%s' % (count, format_s) if little_endian is True: format_s = '<' + format_s else: format_s = '>' + format_s size = struct.calcsize(format_s) res = struct.unpack_from(format_s, buf.read(size)) if count is not None: return res else: return res[0]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_permissions_mode_from_octal(file_path, code): """ Set permissions for a file or directory. :param file_path: Path to a file or directory :param code: Permission code in absolute notation (octal) :return: """
# Unpack permissions tuple user, group, other = tuple(str(code[-3:])) if len(str(code)) > 3 else tuple(str(code)) user, group, other = int(user), int(group), int(other) mode = get_permissions_mode(user, 'user') & get_permissions_mode(group, 'group') & get_permissions_mode(other, 'other') os.chmod(file_path, mode)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_permissions_mode(permission_octal, name): """Retrieve a user name group permissions bitwise code."""
read = PERMISSIONS[name]['read'] write = PERMISSIONS[name]['write'] execute = PERMISSIONS[name]['execute'] # Read if permission_octal == 4: return read & ~write & ~execute # Write elif permission_octal == 2: return ~read & write & ~execute # Execute elif permission_octal == 1: return ~read & ~write & execute # Read & Write elif permission_octal == 6: return read & write & ~execute # Read & Execute elif permission_octal == 5: return read & ~write & execute # Write & Execute elif permission_octal == 3: return ~read & write & execute # Read, Write & Execute elif permission_octal == 7: return read & write & execute # No read, write or execute by default else: return ~read & ~write & ~execute
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def revoke_access(self): """Revoke all access to this path."""
reading = PERMISSIONS['user']['execute'] + PERMISSIONS['group']['execute'] + PERMISSIONS['other']['execute'] os.chmod(self.file_path, reading)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def module_functions(modulestr): """ Return ordered dictionary of all functions declared in module """
funcs = dict(inspect.getmembers(import_module(modulestr), inspect.isfunction)) return OrderedDict(sorted(funcs.items(), key=lambda f: f[0]))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def flash_errors(form, category='warning'): """ Flash all form error messages """
for (field, errors) in form.errors.items(): for error in errors: flash('{0} - {1}'.format(getattr(form, field).label.text, error), category)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def render_css(self, fn=None, text=None, margin='', indent='\t'): """output css using the Sass processor"""
fn = fn or os.path.splitext(self.fn)[0]+'.css' if not os.path.exists(os.path.dirname(fn)): os.makedirs(os.path.dirname(fn)) curdir = os.path.abspath(os.curdir) os.chdir(os.path.dirname(fn)) # needed in order for scss to relative @import text = text or self.render_styles() if text != '': text = sass.compile(string=text) os.chdir(curdir) return CSS(fn=fn, text=text)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, field): """ Returns the value of a user field. :param str field: The name of the user field. :returns: str -- the value """
if field in ('username', 'uuid', 'app_data'): return self.data[field] else: return self.data.get('app_data', {})[field]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def auto_update(cls, function): """ This class method could be used as decorator on subclasses, it ensures update method is called after function execution. """
def wrapper(self, *args, **kwargs): f = function(self, *args, **kwargs) self.update() return f return wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _validate_key(self, key): """Returns a boolean indicating if the attribute name is valid or not"""
return not any([key.startswith(i) for i in self.EXCEPTIONS])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _extract_html_hex(string): """Get the first 3 or 6 hex digits in the string"""
try: hex_string = string and _hex_regexp().search(string).group(0) or '' except AttributeError: return None if len(hex_string) == 3: hex_string = hex_string[0] * 2 + hex_string[1] * 2 + hex_string[2] * 2 return hex_string
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def name_to_int(name): """Get a number for that colour name if not a name, then not a number """
if not name: return float('nan') lower = name.lower() cga_names = {s: i for i, s in enumerate(colour_names.cga())} return cga_names.get(lower) or html_to_small_ansi(lower)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_rule(self, rule_class, target_class=_Nothing): """Adds an authorization rule. :param rule_class: a class of authorization rule. :param target_class: (optional) a class or an iterable with classes to associate the rule with. """
if isinstance(target_class, Iterable): for cls in target_class: self._rules[cls] = rule_class else: self._rules[target_class] = rule_class
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check(self, user, permission, obj=_nothing): """Raises AuthorizationError when a user has no permission. :param user: a user. :param permission: permission to check. :param obj: (optional) an object to check permission for. """
if not self.allows(user, permission, obj): raise AuthorizationError( 'Can\'t {} object of class {}'.format( permission, type(obj)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def allows(self, user, permission, obj=_nothing): """Checks that a user has permission. Returns True or False. :param user: a user. :param permission: permission to check. :param obj: (optional) an object to check permission for. """
rule = self._get_rule(obj) if not isinstance(permission, basestring): return all( self._use_rule(rule, user, perm, obj) for perm in permission ) return self._use_rule(rule, user, permission, obj)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_permissions(self, user, obj=_nothing): """Returns permissions of a user. :param user: a user. :param obj: (optional) an object to get permissions for. """
rule = self._get_rule(obj) all_permissions = ( attr[len('can_'):] for attr in dir(rule) if attr.startswith('can_') ) return set( permission for permission in all_permissions if self.allows(user, permission, obj) )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def log_debug(func, *args, **kwargs): ''' Wrap call of provided function with debug log statements. ''' logging.debug('Starting "%s" in thread %s...', func.__name__, current_thread()) results = func(*args, **kwargs) logging.debug('Successfully finished "%s" in thread %s.', func.__name__, current_thread()) return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def std_datetimestr(self, datetimestr): """Reformat a datetime string to standard format. """
return datetime.strftime( self.str2datetime(datetimestr), self.std_datetimeformat)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _freq_parser(self, freq): """ day, hour, min, sec, """
freq = freq.lower().strip() try: if "day" in freq: freq = freq.replace("day", "") return timedelta(days=int(freq)) elif "hour" in freq: freq = freq.replace("hour", "") return timedelta(hours=int(freq)) elif "min" in freq: freq = freq.replace("min", "") return timedelta(minutes=int(freq)) elif "sec" in freq: freq = freq.replace("sec", "") return timedelta(seconds=int(freq)) else: raise Exception("%s is invalid format. use day, hour, min, sec." % freq) except: raise Exception("%s is invalid format. use day, hour, min, sec." % freq)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unjoin(self): """ Leave the local audience :rtype: None :raises SensorJoinException: Failed to leave """
self.debug("()") if self._joined.is_set(): packet = APPUnjoinMessage(device_id=Id.NOT_SET) self._send_packet(self._server_ip, self._server_port, packet) self._joined.clear() self.info("Left the audience")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def person_update(self, people): """ Update the status of people :param people: All people of this sensor :type people: list[paps.person.Person] :rtype: None :raises SensorUpdateException: Failed to update """
packet = APPUpdateMessage(device_id=Id.NOT_SET, people=people) self._send_packet( self._server_ip, self._server_port, packet, acknowledge_packet=False )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _do_config_packet(self, packet, ip, port): """ Apply config to this instance :param packet: Packet with config :type packet: paps.si.app.message.APPMessage :param ip: Ip of server :type ip: str :param port: Port of server :type port: int :rtype: None """
self.debug("()") if packet.header.device_id != Id.SERVER: # Only allow config packets from server self.warning("Config packets only allowed from server") return try: config = packet.payload self.debug(u"{}".format(config)) if not isinstance(config, dict): self.error("Wrong payload type") raise RuntimeError("Wrong type") config.setdefault("server_ip", ip) config.setdefault("server_port", port) self.config(config) self._joined.set() except: self.exception("Failed to configure") self.error(u"Faulty packet {}".format(format_data(packet.payload))) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def goodnode(self, nodelist): ''' Goes through the provided list and returns the first server node that does not return an error. ''' l = len(nodelist) for n in range(self.current_node(l), l): self.msg.message("Trying node " + str(n) + ": " + nodelist[n]) try: req = urllib.request.Request(url=nodelist[n]) urllib.request.urlopen(req) except HTTPError as e: self.msg.error_message(e) self.currentnode = int(self.currentnode) + 1 else: self.msg.message("Using " + nodelist[n]) return nodelist[n]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def permlink(self, identifier): ''' Deconstructs an identifier into an account name and permlink ''' temp = identifier.split("@") temp2 = temp[1].split("/") return [temp2[0], temp2[1]]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def scale_vote(self, value): ''' Scales a vote value between 1 and 100 to 150 to 10000 as required by Steem-Python for certain method calls ''' value = int(value) * 100 if value < 100: value = 100 if value > 10000: value = 10000 return value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def calc_regenerated(self, lastvotetime): ''' Uses math formula to calculate the amount of steem power that would have been regenerated given a certain datetime object ''' delta = datetime.utcnow() - datetime.strptime(lastvotetime,'%Y-%m-%dT%H:%M:%S') td = delta.days ts = delta.seconds tt = (td * 86400) + ts return tt * 10000 / 86400 / 5
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def retry(self, msg, e, retry_num, waittime): ''' Creates the retry message and waits the given default time when a method call fails or a server does not respond appropriately. ''' self.msg.error_message(msg) self.msg.error_message(e) self.msg.error_message("Attempt number " + str(retry_num) + ". Retrying in " + str(waittime) + " seconds.") time.sleep(waittime)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send_message(self, msg_id, msg_data): """msg_data can be None"""
if self._error.has_error() or self._force_close_time > 0: return False byte_msg = self.message_packer.pack(msg_id, msg_data) length = struct.calcsize(self.__class__.message_packer.size_fmt) + len(byte_msg) if length > self.__class__.max_message_size: raise MessageSizeOverflowError(msg_id, length, self.__class__.max_message_size) self._out_buffer += struct.pack(self.__class__.message_packer.size_fmt, length) self._out_buffer += byte_msg return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def abspath(relpath, root=None): """Returns an absolute path based on the given root and relative path."""
root = root or cwd() if op.isfile(root): root = op.dirname(root) return op.abspath(op.join(root, relpath))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def makedirs(path, ignore_extsep=False): """Makes all directories required for given path; returns true if successful and false otherwise. **Examples**: :: auxly.filesys.makedirs("bar/baz") """
if not ignore_extsep and op.basename(path).find(os.extsep) > -1: path = op.dirname(path) try: os.makedirs(path) except: return False return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete(path, regex=None, recurse=False, test=False): """Deletes the file or directory at `path`. If `path` is a directory and `regex` is provided, matching files will be deleted; `recurse` controls whether subdirectories are recursed. A list of deleted items is returned. If `test` is true, nothing will be deleted and a list of items that would have been deleted is returned. """
deleted = [] if op.isfile(path): if not test: os.remove(path) else: return [path] return [] if op.exists(path) else [path] elif op.isdir(path): if regex: for r,ds,fs in os.walk(path): for i in fs: if _is_match(regex, i): deleted += delete(op.join(r,i), test=test) if not recurse: break else: if not test: shutil.rmtree(path) else: return [path] return [] if op.exists(path) else [path] return deleted
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def walkfiles(startdir, regex=None, recurse=True): """Yields the absolute paths of files found within the given start directory. Can optionally filter paths using a regex pattern."""
for r,_,fs in os.walk(startdir): if not recurse and startdir != r: return for f in fs: path = op.abspath(op.join(r,f)) if regex and not _is_match(regex, path): continue if op.isfile(path): yield path
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def countfiles(path, recurse=False): """Returns the number of files under the given directory path."""
if not op.isdir(path): return 0 count = 0 for r,ds,fs in os.walk(path): count += len(fs) if not recurse: break return count
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def isempty(path): """Returns True if the given file or directory path is empty. **Examples**: :: """
if op.isdir(path): return [] == os.listdir(path) elif op.isfile(path): return 0 == os.stat(path).st_size return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getsize(path, recurse=False): """Returns the size of the file or directory in bytes."""
if not op.isdir(path): return op.getsize(path) size = 0 for r,_,fs in os.walk(path): for f in fs: size += getsize(op.join(r,f)) if not recurse: break return size
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def copy(srcpath, dstpath, overwrite=True): """Copies the file or directory at `srcpath` to `dstpath`. Returns True if successful, False otherwise."""
# Handle bail conditions. if not op.exists(srcpath): return False if not overwrite: if op.isfile(dstpath): return False if op.isdir(dstpath): chkpath = op.join(dstpath, op.basename(srcpath)) if op.isdir(chkpath) or op.isfile(chkpath): return False srcpath = op.abspath(srcpath) dstpath = op.abspath(dstpath) # Handle copying. if op.isdir(srcpath): dstdir = dstpath if op.isfile(dstpath): dstdir = op.dirname(dstpath) elif op.isdir(dstpath): # Make sure srcdir is copied INTO dstdir. dstdir = op.join(dstpath, op.basename(srcpath)) makedirs(dstdir) for r,ds,fs in os.walk(srcpath): basedir = r.replace(srcpath, "").rstrip(os.sep).strip(os.sep) curdir = op.join(dstdir, basedir) makedirs(curdir) for f in fs: if not copy(op.join(r,f), op.join(curdir, f), overwrite=overwrite): return False elif op.isfile(srcpath): makedirs(dstpath) shutil.copy2(srcpath, dstpath) return op.exists(dstpath)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def move(srcpath, dstpath, overwrite=True): """Moves the file or directory at `srcpath` to `dstpath`. Returns True if successful, False otherwise."""
# TODO: (JRR@201612230924) Consider adding smarter checks to prevent files ending up with directory names; e.g. if dstpath directory does not exist. if not op.exists(srcpath): return False if srcpath == dstpath: return True if op.isfile(srcpath) and op.isdir(dstpath): verfunc = op.isfile verpath = op.join(dstpath, op.basename(srcpath)) elif op.isfile(srcpath): verfunc = op.isfile verpath = dstpath makedirs(dstpath) elif op.isdir(srcpath) and op.isdir(dstpath): verfunc = op.isdir verpath = op.join(dstpath, op.basename(srcpath)) elif op.isdir(srcpath): verfunc = op.isdir verpath = dstpath else: return False if op.isfile(verpath): if not overwrite: return False else: # On Windows, filename case is ignored so the following check will # prevent unintentionally deleting the srcpath before moving. if "nt" == os.name and srcpath.lower() == dstpath.lower(): pass elif not delete(verpath): return False try: shutil.move(srcpath, dstpath) except: return False return verfunc(verpath)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dirpath(self): """Returns a Path object for the directory associated with this object."""
if self.isfile(): return Path(op.dirname(self._fspath)) else: return Path(self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read(self, encoding=None): """Reads from the file and returns result as a string."""
encoding = encoding or ENCODING try: with codecs.open(self.path, encoding=encoding) as fi: return fi.read() except: return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def readlines(self, encoding=None): """Reads from the file and returns result as a list of lines."""
try: encoding = encoding or ENCODING with codecs.open(self.path, encoding=None) as fi: return fi.readlines() except: return []
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _write(self, content, mode, encoding=None, linesep=False): """Handles file writes."""
makedirs(self.path) try: encoding = encoding or ENCODING if "b" not in mode: try: content = str(content) except: pass if linesep: content += os.linesep with codecs.open(self.path, mode, encoding=encoding) as fo: fo.write(content) return True except: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def append(self, content, binary=False, encoding=None): """Appends the given content to the file. Existing content is preserved. Returns true if successful, false otherwise."""
mode = "ab" if binary else "a" return self._write(content, mode, encoding=encoding, linesep=False)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handle_raw_output(ctx, data): """If a raw output format is set, dump data and exit"""
if ctx.obj['format'] == 'json': print(json_dump(data)) exit(0) if ctx.obj['format'] == 'yaml': print(yaml_dump(data), end='') exit(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_root_files(self, directory): """Retrieve files within the root directory"""
if len(self.filepaths) is 0: if self.filters: root_files = [(directory, f) for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f)) and self.filters.validate(f) and self.filters.get_level(f) == self.filters.max_level] else: root_files = [(directory, f) for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))] self.add_path(root_files)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def sprinter(self): """ Called when parallelize is True. This function will generate the file names in a directory tree by adding directories to a Queue and continuously exploring directories in the Queue until Queue is emptied. Significantly faster than crawler method for larger directory trees. """
self._printer('Multiprocess Walk') # Loop through directories in case there is more than one (1) for directory in self.directory: self._get_root_files(directory) # Add file within root directory if filepaths is empty # acquire the list of paths first_level_dirs = next(os.walk(directory))[1] for path in first_level_dirs: self.unsearched.put((directory, path)) self._printer('Pool Processing STARTED') pool = Pool(self.pool_size) pool.map_async(self.parallel_worker, range(self.pool_size)) pool.close() self.unsearched.join() self._printer('Pool Processing ENDED') return self.filepaths
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def empty_bucket(outputs_file): """Empty the bucket associated to the test deployment."""
with open(outputs_file, "r") as f: outputs = yaml.load(f) bucket = outputs["storage"]["BucketName"] print("Emptying bucket {} ...".format(bucket)) os.system("aws s3 rm s3://{} --recursive".format(bucket)) print("Bucket {} has been emptied".format(bucket))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fmt_title(text): """Article title formatter. Except functional words, first letter uppercase. Example: "Google Killing Annoying Browsing Feature" **中文文档** 文章标题的格式, 除了虚词, 每个英文单词的第一个字母大写。 """
text = text.strip() if len(text) == 0: # if empty string, return it return text else: text = text.lower() # lower all char # delete redundant empty space chunks = [chunk for chunk in text.split(" ") if len(chunk) >= 1] new_chunks = list() for chunk in chunks: if chunk not in _function_words: chunk = chunk[0].upper() + chunk[1:] new_chunks.append(chunk) new_chunks[0] = new_chunks[0][0].upper() + new_chunks[0][1:] return " ".join(new_chunks)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fmt_sentence(text): """English sentence formatter. First letter is always upper case. Example: "Do you want to build a snow man?" **中文文档** 句子格式。每句话的第一个单词第一个字母大写。 """
text = text.strip() if len(text) == 0: # if empty string, return it return text else: text = text.lower() # lower all char # delete redundant empty space chunks = [chunk for chunk in text.split(" ") if len(chunk) >= 1] chunks[0] = chunks[0][0].upper() + chunks[0][1:] return " ".join(chunks)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fmt_filename(text): """File name formatter. Remove all file system forbidden char from text. **中文文档** 移除文件系统中不允许的字符。 """
forbidden_char = ["\\", "/", ":", "*", "?", "|", "<", ">", '"'] for char in forbidden_char: text = text.replace(char, "") return text
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _init_request_hooks(self): """ initialize pre request hooks"""
for method_type in ('pre', 'post'): for method in _METHODS: event = getattr(self.app, 'on_' + method_type + '_' + method) event_hook = getattr(hooks, method_type + '_' + method) event += event_hook
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def from_json(cls, json_obj): """Build a MetricResponse from JSON. :param json_obj: JSON data representing a Cube Metric. :type json_obj: `String` or `json` :throws: `InvalidMetricError` when any of {type,time,data} fields are not present in json_obj. """
if isinstance(json_obj, str): json_obj = json.loads(json_obj) time = None value = None if cls.TIME_FIELD_NAME in json_obj: time = json_obj[cls.TIME_FIELD_NAME] else: raise InvalidMetricError("{field} must be present!".format( field=cls.TIME_FIELD_NAME)) if cls.VALUE_FIELD_NAME in json_obj: value = json_obj[cls.VALUE_FIELD_NAME] return cls(time, value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_parser(): """ Creates an argument parser configured with options to run a bot from the command line. :return: configured argument parser :rtype: :class:`argparse.ArgumentParser` """
parser = ArgumentParser( description='Start an IRC bot instance from the command line.', formatter_class=ArgumentDefaultsHelpFormatter, ) parser.add_argument( '-v', '--version', action='version', version='{0} v{1}'.format(NAME, VERSION) ) parser.add_argument( '-s', '--server', metavar='HOST', required=True, help='the host to connect to' ) parser.add_argument( '-p', '--port', metavar='PORT', type=int, default=6667, help='the port the server is listening on' ) parser.add_argument( '-n', '--nick', metavar='NAME', required=True, help="the bot's nickname" ) parser.add_argument( '-N', '--name', metavar='NAME', default=NAME, help="the bot's real name" ) parser.add_argument( '-c', '--channels', metavar='CHAN', nargs='*', help='join this channel upon connection' ) parser.add_argument( '-l', '--log', metavar='LEVEL', default='INFO', help='minimal level for displayed logging messages' ) parser.add_argument( '-S', '--ssl', action='store_true', help='connect to the server using SSL' ) return parser
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_bot(): """ Creates a new bot instance ready to be launched. """
parser = make_parser() args = parser.parse_args() settings = { 'server': args.server, 'port': args.port, 'ssl': args.ssl, 'nick': args.nick, 'realname': args.name, 'channels': args.channels or [], 'loglevel': args.log, } return IRC(settings)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(bot): """ Entry point for the command line launcher. :param bot: the IRC bot to run :type bot: :class:`fatbotslim.irc.bot.IRC` """
greenlet = spawn(bot.run) try: greenlet.join() except KeyboardInterrupt: print '' # cosmetics matters log.info("Killed by user, disconnecting...") bot.disconnect() finally: greenlet.kill()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_first(self, attr_name, resources, extra_prefix=''): """ Returns the boto object for the first resource in ``resources`` that belongs to this stack. Uses the attribute specified by ``attr_name`` to match the stack name. E.g. An RDS instance for a stack named ``foo`` might be named ``foo-mydb-fis8932ifs``. This call:: find_first('id', conn.get_all_dbinstances()) would return the boto.rds.dbinstance.DBInstance object whose ``id`` is ``foo-mydb-fis8932ifs``. Returns None if a matching resource is not found. If specified, ``extra_prefix`` is appended to the stack name prefix before matching. """
prefix = self.name + '-' + (extra_prefix + '-' if extra_prefix else '') for res in resources: attr = getattr(res, attr_name) if attr.startswith(prefix): return res
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_lb_secgroup(self, lb_name, hosts, port): """ Used by the load balancer deployer to register a hostname for a load balancer, in order that security group rules can be applied later. This is multiprocess-safe, but since keys are accessed only be a single load balancer deployer there should be no conflicts. :param str lb_name: The load balancer name (as per the config file) :param :class:`list` hosts: The load balancer host[s], once known :param port: The backend port that the LB will connect on """
self.lb_sec_groups.merge(lb_name, {'hosts': hosts, 'port': port})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_host(self, host, group_names=None, host_vars=None): """ Used by deployers to add hosts to the inventory. :param str host: The host identifier (e.g. hostname, IP address) to use in the inventory. :param list group_names: A list of group names to which the host belongs. **Note: This list will be sorted in-place.** :param dict host_vars: A mapping object of host *variables*. This can be a nested structure, and is used as the source of all the variables provided to the ansible playbooks. **Note: Additional key-value pairs (e.g. dynamic ansible values like ``inventory_hostname``) will be inserted into this mapping object.** """
gnames = group_names if group_names else [] hvars = host_vars if host_vars else {} # Add in ansible's magic variables. Assign them here because this is # just about the earliest point we can calculate them before anything # ansible-related (e.g. Stack.configure(), ``bang --host``) executes. gnames.sort() hvars[A.server.GROUP_NAMES] = gnames hvars[A.server.INV_NAME] = host hvars[A.server.INV_NAME_SHORT] = host.split('.')[0] self.groups_and_vars.merge(host, hvars) for gname in group_names: self.groups_and_vars.append(gname, host)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def describe(self): """Iterates through the deployers but doesn't run anything"""
for stage, corunners in self.get_deployers(): print self.name, "STAGE ", stage for d in corunners: print d.__class__.__name__, ",".join( [p[1].__name__ for p in d.phases] )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def configure(self): """ Executes the ansible playbooks that configure the servers in the stack. Assumes that the root playbook directory is ``./playbooks/`` relative to the stack configuration file. Also sets the ansible *module_path* to be ``./common_modules/`` relative to the stack configuration file. E.g. If the stack configuration file is:: $HOME/bang-stacks/my_web_service.yml then the root playbook directory is:: $HOME/bang-stacks/playbooks/ and the ansible module path is:: $HOME/bang-stacks/common_modules/ """
cfg = self.config bang_config_dir = os.path.abspath( os.path.dirname(cfg.filepath) ) playbook_dir = os.path.join(bang_config_dir, 'playbooks') creds = cfg.get(A.DEPLOYER_CREDS, {}) pb_kwargs = { # this allows connection reuse using "ControlPersist": 'transport': 'ssh', 'module_path': os.path.join(bang_config_dir, 'common_modules'), 'remote_pass': creds.get(A.creds.SSH_PASS), # TODO: determine forks # 'forks': options.forks, } # only add the 'remote_user' kwarg if it's in the config, otherwise use # ansible's default behaviour. ssh_user = creds.get(A.creds.SSH_USER) if ssh_user: pb_kwargs['remote_user'] = ssh_user ansible_cfg = cfg.get(A.ANSIBLE, {}) ansible_verbosity = ansible_cfg.get(A.ansible.VERBOSITY, 1) ansible.utils.VERBOSITY = ansible_verbosity for playbook in cfg.get(A.PLAYBOOKS, []): playbook_path = os.path.join(playbook_dir, playbook) # gratuitously stolen from main() in ``ansible-playbook`` stats = callbacks.AggregateStats() playbook_cb = callbacks.PlaybookCallbacks( verbose=ansible_verbosity ) runner_cb = callbacks.PlaybookRunnerCallbacks( stats, verbose=ansible_verbosity ) vault_password = ansible_cfg.get(A.ansible.VAULT_PASS) extra_kwargs = { 'playbook': playbook_path, # TODO: do we really need new instances of the following # for each playbook? 'callbacks': playbook_cb, 'runner_callbacks': runner_cb, 'stats': stats, # ``host_list`` is used to generate the inventory, but # don't worry, we override the inventory later 'host_list': [], 'vault_password': vault_password, } pb_kwargs.update(extra_kwargs) pb = PlayBook(**pb_kwargs) inventory = BangsibleInventory( copy.deepcopy(self.groups_and_vars.lists), copy.deepcopy(self.groups_and_vars.dicts), vault_password=vault_password ) inventory.set_playbook_basedir(playbook_dir) pb.inventory = inventory pb.run() hosts = sorted(pb.stats.processed.keys()) playbook_cb.on_stats(pb.stats) failed = False for h in hosts: hsum = pb.stats.summarize(h) if hsum['failures'] or hsum['unreachable']: failed = True print "%-30s : %s" % (h, hsum) # TODO: sort this out # print "%-30s : %s %s %s %s " % ( # hostcolor(h, hsum), # colorize('ok', hsum['ok'], 'green'), # colorize('changed', hsum['changed'], 'yellow'), # colorize('unreachable', hsum['unreachable'], 'red'), # colorize('failed', hsum['failures'], 'red')) if failed: raise BangError("Server configuration failed!")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def interpolate_color(c1, c2, factor: float) -> list: """ Linear interpolate two 3-channel colors, using channel based interpolation. """
assert(len(c1) == len(c2)) new_color = [] for i in range(len(c1)): new_color.append(int(interpolate(c1[i], c2[i], factor))) return new_color
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def human_timestamp(__timestamp: datetime.datetime) -> str: """Format a relative time. Args: __timestamp: Event to generate relative timestamp against Returns: Human readable date and time offset """
numstr = '. a two three four five six seven eight nine ten'.split() matches = [ 60 * 60 * 24 * 365, 60 * 60 * 24 * 28, 60 * 60 * 24 * 7, 60 * 60 * 24, 60 * 60, 60, 1, ] match_names = ['year', 'month', 'week', 'day', 'hour', 'minute', 'second'] if __timestamp.tzinfo is None: __timestamp = __timestamp.replace(tzinfo=datetime.timezone.utc) now = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) delta = int((now - __timestamp).total_seconds()) for scale in matches: i = delta // scale if i: name = match_names[matches.index(scale)] break else: i = 0 # Too small if i == 0: result = 'right now' elif i == 1 and name in ('year', 'month', 'week'): result = 'last {}'.format(name) elif i == 1 and name == 'day': result = 'yesterday' elif i == 1 and name == 'hour': result = 'about an hour ago' else: result = 'about {} {}{} ago'.format(i if i > 10 else numstr[i], name, 's' if i > 1 else '') return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def configurate_app(config_file=''): """ Configures Flask app :param config_file: Absolute path to Py config file, optional :returns: App object, host and port """
# Load config app.config.from_pyfile('defaults.py') app.config.from_pyfile(config_file, silent=True) if app.config.get('MINIFY_HTML', False): app.jinja_env.add_extension('flask_utils.jinja2htmlcompress.HTMLCompress') # Setup web assets assets = Environment(app) js = Bundle('common.js', filters='jsmin', output='gen/main.%(version)s.js') css = Bundle('common.css', filters='cssmin', output='gen/main.%(version)s.css') assets.register('js_all', js) assets.register('css_all', css) # Set host and port port = app.config.get('PORT', 5000) host = app.config.get('HOST', '127.0.0.1') return app, host, port