text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decode_response(content: bytes) -> set: """ adb response text -> device set """
content = content[4:].decode(config.ENCODING) if '\t' not in content and '\n' not in content: return set() connected_devices = set() device_list = [i for i in content.split('\n') if i] for each_device in device_list: device_id, device_status = each_device.split('\t') if device_status == 'device': connected_devices.add(device_id) return connected_devices
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(requirements_file, skip_requirements_file, pipfile, skip_pipfile): # type: (str, bool, str, bool) -> None """Update the requirements.txt file and reformat the Pipfile."""
pipfile_path = path.Path(pipfile) pf = load_pipfile(pipfile_path) if not skip_requirements_file: requirements_file_path = path.Path(requirements_file) update_requirements(requirements_file_path, pf) if not skip_pipfile: dump_pipfile(pipfile_path, pf)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clean_file(self): """Analyse the uploaded file, and return the parsed lines. Returns: tuple of tuples of cells content (as text). """
data = self.cleaned_data['file'] available_parsers = self.get_parsers() for parser in available_parsers: try: return parser.parse_file(data) except parsers.ParserError: pass raise forms.ValidationError( "No parser could read the file. Tried with parsers %s." % (", " % (force_text(p) for p in available_parsers)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clean(self): """Global cleanup."""
super(LineFormSet, self).clean() if any(self.errors): # Already seen errors, let's skip. return self.clean_unique_fields()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clean_unique_fields(self): """Ensure 'unique fields' are unique among entered data."""
if not self.unique_fields: return keys = set() duplicates = [] for form in self.forms: key = tuple(form.cleaned_data[field] for field in self.unique_fields) if key in keys: duplicates.append(",".join(key)) else: keys.add(key) if duplicates: raise forms.ValidationError( "Fields %s should be unique; found duplicates for %s" % ( ','.join(self.unique_fields), duplicates))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(itf): """ Run optimize functions. """
if not itf: return 1 # access user input options = SplitInput(itf) # read input inputpath = os.path.abspath(options.inputpath) print(" Reading input file ...") molecules = csv_interface.read_csv(inputpath, options) if not molecules: print("\n '{flag}' was unable to be parsed\n".format(flag=os.path.basename(options.inputpath))) sys.exit(1) # determine the sort order & ensemble_size #sort_order = classification.get_sort_order(molecules) sort_order = 'asc' ensemble_size = options.ensemble_size # loop over all ensembles # temp 2/3/15 append to auc_list ef_list & write it out for later histogram construction auc_list = [] ef_list = [] for size in [x + 1 for x in range(ensemble_size)]: auc, ef = optimizor(molecules, sort_order, size, options) auc_list += auc ef_list += ef # temp 2/9/15 write auc_list & ef_list out to files for subsequent post-processing f = open('auc_histogram.csv', 'w') for value in auc_list: f.write('%f\n' % value) #f.write('%f, %s\n' % (value[0], value[1])) f.close() f = open('ef_histogram.csv', 'w') for value in ef_list: f.write('%f\n' % value) f.close()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def optimizor(molecules, sort_order, ensemble_size, options): """ Evaluate the performance of all ensembles of fixed size. """
# set variables ncpu = options.ncpu score_field = options.score_field # generate an exhaustive list of all possible ensembles ensemble_list = make_ensemble_list(molecules, score_field, ensemble_size) # set number of processors. if not ncpu: ncpu = multiprocessing.cpu_count() if ncpu > 1: print("Determining the performance of {d} ensembles using {n} processors".format(d=len(ensemble_list), n=ncpu)) if ncpu > len(ensemble_list): ncpu = len(ensemble_list) jobs = [] output_queue = multiprocessing.Queue() for ensemble_chunk in chunker(ensemble_list, ncpu): p = multiprocessing.Process(target=evaluate, args=(molecules, ensemble_chunk, sort_order, options, output_queue)) jobs.append(p) p.start() # collect results into a dictionary results = {} for i in range(len(jobs)): results.update(output_queue.get()) # stop jobs for j in jobs: j.join() else: print("Determining the performance of {d} ensembles using {n} processor".format(d=len(ensemble_list), n=ncpu)) results = evaluate(molecules, ensemble_list, sort_order, options) # peel away the best performing ensemble ensemble = screener.find_best_ensemble(results, options) # write out the best performing ensemble output.write_ensemble(list(ensemble), options) # temp 2/9/15 generate and return a list of auc values and ef at fpf = 0.001 to build up a histogram nd = max([results[x].ef.keys() for x in results.keys()][0]) n = int(round(0.001 * nd)) ef_list = [results[x].get_prop(n, 'ef') for x in results.keys()] auc_list = [results[x].get_prop('auc') for x in results.keys()] # auc_list = [[results[x].get_prop('auc'), results[x].get_prop('ensemble')] for x in results.keys()] return auc_list, ef_list
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def evaluate(molecules, ensemble_chunk, sort_order, options, output_queue=None): """ Evaluate VS performance of each ensemble in ensemble_chunk """
results = {} # {('receptor_1', ..., 'receptor_n') : ensemble storage object} for ensemble in ensemble_chunk: results[ensemble] = calculate_performance(molecules, ensemble, sort_order, options) if output_queue is not None: output_queue.put(results) else: return results
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def make_ensemble_list(molecules, score_field, ensemble_size): """ Construct ensemble list """
# generate list of queries queryList = molecules[0].scores.keys() # nchoosek ensemble_iterator = itertools.combinations(queryList, ensemble_size) # list of tuples: [(query1, query2), ... (queryN-1, queryN) ensembleList = [] for ensemble in ensemble_iterator: ensembleList.append(ensemble) return ensembleList
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def chunker(ensemble_list, ncpu): """ Generate successive chunks of ensemble_list. """
# determine sublist lengths length = int(len(ensemble_list) / ncpu) # generator for i in range(0, len(ensemble_list), length): yield ensemble_list[i:i + length]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fasta_verifier(entries, ambiguous=False): """Raises error if invalid FASTA format detected Args: entries (list): A list of FastaEntry instances ambiguous (bool): Permit ambiguous bases, i.e. permit non-ACGTU bases Raises: FormatError: Error when FASTA format incorrect with descriptive message Example: """
if ambiguous: regex = r'^>.+{0}[ACGTURYKMSWBDHVNX]+{0}$'.format(os.linesep) else: regex = r'^>.+{0}[ACGTU]+{0}$'.format(os.linesep) delimiter = r'{0}'.format(os.linesep) for entry in entries: try: entry_verifier([entry.write()], regex, delimiter) except FormatError as error: if error.part == 0: msg = 'Unknown Header Error with {0}'.format(entry.id) raise FormatError(message=msg) elif error.part == 1 and ambiguous: msg = '{0} contains a base not in ' \ '[ACGTURYKMSWBDHVNX]'.format(entry.id) raise FormatError(message=msg) elif error.part == 1 and not ambiguous: msg = '{0} contains a base not in ' \ '[ACGTU]'.format(entry.id) raise FormatError(message=msg) else: msg = '{0}: Unknown Error: Likely a Bug'.format(entry.id) raise FormatError(message=msg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def guard(params, guardian, error_class=GuardError, message=''): ''' A guard function - check parameters with guardian function on decorated function :param tuple or string params: guarded function parameter/s :param function guardian: verifying the conditions for the selected parameter :param Exception error_class: raised class when guardian return false :param string message: error message ''' params = [params] if isinstance(params, string_types) else params def guard_decorate(f): @wraps(f) def _guard_decorate(*args, **kwargs): if guardian(**_params(f, args, kwargs, params)): return f(*args, **kwargs) else: raise error_class(message) return _guard_decorate return guard_decorate
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def main(dimension, iterations): """ Main function for PSO optimizer example. Instantiate PSOOptimizer to optimize 30-dimensional spherical function. """
optimizer = PSOOptimizer() solution = optimizer.minimize(sphere, -5.12, 5.12, dimension, max_iterations(iterations)) return solution, optimizer
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_last_commit_message(self): """ Gets the last commit message on the active branch Returns None if not in a git repo """
# Check if we are currently in a repo try: branch = self.active_branch return self.commit(branch).message except InvalidGitRepositoryError: print "Not in a git repo" return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_last_modified_timestamp(self): """ Looks at the files in a git root directory and grabs the last modified timestamp """
cmd = "find . -print0 | xargs -0 stat -f '%T@ %p' | sort -n | tail -1 | cut -f2- -d' '" ps = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT) output = ps.communicate()[0] print output
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_config_root(path=sys.argv[0]): """ Finds config root relative to the given file path """
dirname = os.path.dirname(path) lastdirname = None while dirname != lastdirname: config_root = os.path.join(dirname, 'config') if os.path.exists(config_root): return config_root lastdirname, dirname = dirname, os.path.dirname(dirname)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def restore_default(self, index): """Set the value of the given index row to its default :param index: :type index: :returns: :rtype: :raises: """
spec = self.get_configspec_str(index) if spec is None or isinstance(spec, Section): return try: default = self._vld.get_default_value(spec) defaultstr = self._val_to_str(default) self.setData(index, defaultstr) except KeyError: raise ConfigError("Missing Default Value in spec: \"%s\"" % spec)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_value(self, index): """ Return the value of the given index The index stores the section as internal pointer. The row of the index determines the key. The key is used on the section to return the value :param index: The QModelIndex :type index: QModelIndex :returns: The value for the given index """
p = index.internalPointer() k = self.get_key(p, index.row()) return p[k]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_configspec_str(self, index): """ Return the config spec string of the given index The index stores the section as internal pointer. The row of the index determines the key. The section stores the spec in its configspec attribute The key is used on the configspec attribute to return the spec :param index: The QModelIndex :type index: QModelIndex :returns: The spec for the given index or None """
p = index.internalPointer() if p is None: return spec = p.configspec if spec is None: return None k = self.get_key(p, index.row()) try: return spec[k] except KeyError: return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _val_to_str(self, value): """Converts the value to a string that will be handled correctly by the confobj :param value: the value to parse :type value: something configobj supports :returns: str :rtype: str :raises: None When the value is a list, it will be converted to a string that can be parsed to the same list again. """
# might be a list value # then represent it 'nicer' so that when we edit it, the same value will return if isinstance(value, list): # so we have a list value. the default str(v) would produce something like: ['a', 'b'] # handling such a value is not possible. it should be: 'a', 'b' # so we have to convert it to a string but we have to make sure, we do not loose quotes # even when values are integers, they get quoted. thats alright. the config obj will parse them correctly return ', '.join("'%s'" % str(i) for i in value) return str(value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_index_edited(self, index, edited): """Set whether the conf was edited or not. Edited files will be displayed with a \'*\' :param index: the index that was edited :type index: QModelIndex :param edited: if the file was edited, set edited to True, else False :type edited: bool :returns: None :rtype: None :raises: None """
self.__edited[index.row()] = edited self.dataChanged.emit(index, index)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_edited(self, ): """Return all indices that were modified :returns: list of indices for modified confs :rtype: list of QModelIndex :raises: None """
modified = [] for i in range(len(self.__edited)): if self.__edited[i]: modified.append(self.__configs[i]) return modified
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def validate(self, index): """Validate the conf for the given index :param index: the index of the model to validate :type index: QModelIndex :returns: True if passed and a False/True dict representing fail/pass. The structure follows the configobj. If the configobj does not have a configspec True is returned. :rtype: True|Dict :raises: None """
c = self.__configs[index.row()] if c.configspec is None: return True else: return c.validate(self.vld)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def metapolicy(request, permitted, domains=None): """ Serves a cross-domain policy which can allow other policies to exist on the same domain. Note that this view, if used, must be the master policy for the domain, and so must be served from the URL ``/crossdomain.xml`` on the domain: setting metapolicy information in other policy files is forbidden by the cross-domain policy specification. **Required arguments:** ``permitted`` A string indicating the extent to which other policies are permitted. A set of constants is available in ``flashpolicies.policies``, defining acceptable values for this argument. **Optional arguments:** ``domains`` A list of domains from which to allow access. Each value may be either a domain name (e.g., ``example.com``) or a wildcard (e.g., ``*.example.com``). Due to serious potential security issues, it is strongly recommended that you not use wildcard domain values. """
if domains is None: domains = [] policy = policies.Policy(*domains) policy.metapolicy(permitted) return serve(request, policy)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _run_popen(command, print_output=False): """ subprocess has the most terrible interface ever. Envoy is an option but too heavyweight for this. This is a convenience wrapper around subprocess.Popen. Also, this merges STDOUT and STDERR together, since there isn't a good way of interleaving them without threads. """
output = '' po = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, ) fcntl.fcntl( po.stdout.fileno(), fcntl.F_SETFL, fcntl.fcntl(po.stdout.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK, ) while po.poll() is None: stream = po.stdout readx = select.select([stream.fileno()], [], [])[0] if readx: chunk = stream.read() output += chunk if print_output: print chunk return Result(output, po.returncode)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def perimeter(patch, world_size=(60, 60), neighbor_func=get_rook_neighbors_toroidal): """ Count cell faces in patch that do not connect to part of patch. This preserves various square geometry features that would not be preserved by merely counting the number of cells that touch an edge. """
edge = 0 patch = set([tuple(i) for i in patch]) for cell in patch: neighbors = neighbor_func(cell, world_size) neighbors = [n for n in neighbors if n not in patch] edge += len(neighbors) return edge
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def traverse_core(core_area, world_size=(60, 60), neighbor_func=get_moore_neighbors_toroidal): """ Treat cells in core_area like a graph and traverse it to see how many connected components there are. """
if not core_area: return [] core_area = [tuple(i) for i in core_area] curr = core_area[0] core_area = set(core_area[1:]) to_explore = [] cores = [[curr]] while core_area: neighbors = neighbor_func(curr, world_size) for n in neighbors: if n in core_area: core_area.remove(n) to_explore.append(n) cores[-1].append(n) if to_explore: curr = to_explore.pop() else: curr = core_area.pop() cores.append([curr]) return cores
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_command_line_args(): """parse command line args"""
parser = argparse.ArgumentParser(description='PipApp. {}'.format(DESCRIPTION)) parser.add_argument( '-d', '--dir', metavar='DIR', help='Root directory where to create new project files and dirs. Default is current directory.' ) parser.add_argument( '-v,', '--version', action='version', version='{} v{}'.format(PROGRAMNAME, VERSION) ) parser.add_argument( "project_name", metavar='PROJECTNAME', help="Name of the generated Project. Has to be a valid Python identifier." ) return parser.parse_args()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def requirements(collector): """Just print out the requirements"""
out = sys.stdout artifact = collector.configuration['dashmat'].artifact if artifact not in (None, "", NotSpecified): if isinstance(artifact, six.string_types): out = open(artifact, 'w') else: out = artifact for active in collector.configuration['__imported__'].values(): for requirement in active.requirements(): out.write("{0}\n".format(requirement))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_checks(collector): """Just run the checks for our modules"""
artifact = collector.configuration["dashmat"].artifact chosen = artifact if chosen in (None, "", NotSpecified): chosen = None dashmat = collector.configuration["dashmat"] modules = collector.configuration["__active_modules__"] config_root = collector.configuration["config_root"] module_options = collector.configuration["modules"] datastore = JsonDataStore(os.path.join(config_root, "data.json")) if dashmat.redis_host: datastore = RedisDataStore(redis.Redis(dashmat.redis_host)) scheduler = Scheduler(datastore) for name, module in modules.items(): if chosen is None or name == chosen: server = module.make_server(module_options[name].server_options) scheduler.register(module, server, name) scheduler.twitch(force=True)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def list_npm_modules(collector, no_print=False): """List the npm modules that get installed in a docker image for the react server"""
default = ReactServer().default_npm_deps() for _, module in sorted(collector.configuration["__active_modules__"].items()): default.update(module.npm_deps()) if not no_print: print(json.dumps(default, indent=4, sort_keys=True)) return default
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def collect_dashboard_js(collector): """Generate dashboard javascript for each dashboard"""
dashmat = collector.configuration["dashmat"] modules = collector.configuration["__active_modules__"] compiled_static_prep = dashmat.compiled_static_prep compiled_static_folder = dashmat.compiled_static_folder npm_deps = list_npm_modules(collector, no_print=True) react_server = ReactServer() react_server.prepare(npm_deps, compiled_static_folder) for dashboard in collector.configuration["dashboards"].values(): log.info("Generating compiled javascript for dashboard:{0}".format(dashboard.path)) filename = dashboard.path.replace("_", "__").replace("/", "_") location = os.path.join(compiled_static_folder, "dashboards", "{0}.js".format(filename)) if os.path.exists(location): os.remove(location) generate_dashboard_js(dashboard, react_server, compiled_static_folder, compiled_static_prep, modules)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_file(file, filename='<input>'): """This is a generator that yields all top-level S-expression nodes from a given file object."""
reader = Reader(filename) for line in file: yield from reader.feed_line(line) reader.finish()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _feed_node(self, value, loc): """A helper method called when an S-expression has been recognized. Like feed_line, this is a generator that yields newly recognized top-level expressions. If the reader is currently at the top level, simply yields the passed expression. Otherwise, it appends it to whatever is currently being parsed and yields nothing. """
node = GenericNode(value, loc) if not self.stack: yield node else: top = self.stack[-1] if isinstance(top, StackEntryList): top.items.append(node) elif isinstance(top, StackEntryComment): self.stack.pop() else: assert 0
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_until_complete(self): """Run loop until all futures are done. Schedule futures for execution and wait until all are done. Return value from future, or list of values if multiple futures had been passed to constructor or gather method. All results will be in the same order as order of futures passed to constructor. :Example: .. code-block:: python ['ultra slow', None] :return: Value from future or list of values. :rtype: None, list, Any """
try: result = self.loop.run_until_complete(self.futures) except asyncio.futures.CancelledError: return None else: if self.ft_count == 1: return result[0] return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_irregular_vertex(bgedge): """ This method is called only in irregular edges in current implementation, thus at least one edge will be irregular """
if not bgedge.is_irregular_edge: raise Exception("trying to retrieve an irregular vertex from regular edge") return bgedge.vertex1 if bgedge.vertex1.is_irregular_vertex else bgedge.vertex2
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def from_coordinates(cls, ra=None, dec=None, distance=None, pm_ra_cosdec=None, pm_dec=None, radial_velocity=None, obstime=2000.0*u.year, id=None, mag=None, **kwargs): ''' Iniitalize a constellation object. Parameters ---------- ra, dec, distance, pm_ra_cosdec, pm_dec, radial_velocity These must be able to initialize a SkyCoord. id : list, array Identifications for the entries. mag : list, array Magnitudes for the entries. **kwargs All arguments and keyword arguments are passed along to SkyCoord. They can be coordinates in the first place, or, for example, ra and dec with units, or any other inputs that can initialize a SkyCoord. ''' # make sure we can initialzie some coordinates # coordinates = coord.SkyCoord(ra=ra, dec=dec, distance=distance, pm_ra_cosdec=pm_ra_cosdec, pm_dec=pm_dec, radial_velocity=radial_velocity) N = len(np.atleast_1d(ra)) if id is None: id = ['{}'.format(i) for i in range(N)] if mag is None: mag = np.zeros(N) standardized = Table(data=[id, mag], names=['object-id', 'filter-mag']) for k in cls.coordinate_keys: if locals()[k] is not None: standardized[k] = locals()[k] return cls(standardized)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def to_text(self, filename=None, overwrite=True): ''' Write this catalog out to a text file. ''' table = self.standardized #table = hstack([self.identifiers, # self._coordinate_table(), # self.magnitudes, # self.errors]) if filename == None: filename = '{}.txt'.format(self.name) self.speak('saving to {}'.format(filename)) table.write(filename, format='ascii.ecsv', overwrite=overwrite)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def plot(self, sizescale=10, color=None, alpha=0.5, label=None, edgecolor='none', **kw): ''' Plot the ra and dec of the coordinates, at a given epoch, scaled by their magnitude. (This does *not* create a new empty figure.) Parameters ---------- sizescale : (optional) float The marker size for scatter for a star at the magnitudelimit. color : (optional) any valid color The color to plot (but there is a default for this catalog.) **kw : dict Additional keywords will be passed on to plt.scatter. Returns ------- plotted : outputs from the plots ''' # calculate the sizes of the stars (logarithmic with brightness?) size = np.maximum(sizescale*(1 + self.magnitudelimit - self.magnitude), 1) # make a scatter plot of the RA + Dec scatter = plt.scatter(self.ra, self.dec, s=size, color=color or self.color, label=label or '{} ({:.1f})'.format(self.name, self.epoch), alpha=alpha, edgecolor=edgecolor, **kw) return scatter
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def animate(self, filename='constellation.mp4', epochs=[1900,2100], dt=5, dpi=300, fps=10, **kw): ''' Animate a finder chart. ''' scatter = self.finder(**kw) plt.tight_layout() figure = plt.gcf() if '.gif' in filename: try: writer = ani.writers['pillow'](fps=fps) except (RuntimeError, KeyError): writer = ani.writers['imagemagick'](fps=fps) except: raise RuntimeError('This python seems unable to make an animated gif.') else: try: writer = ani.writers['ffmpeg'](fps=fps) except (RuntimeError,KeyError): raise RuntimeError('This computer seems unable to ffmpeg.') with writer.saving(figure, filename, dpi or figure.get_dpi()): for epoch in tqdm(np.arange(epochs[0], epochs[1]+dt, dt)): # update the illustration to a new time coords = self.atEpoch(epoch) scatter.set_offsets(list(zip(coords.ra.value, coords.dec.value))) plt.title('{} in {:.1f}'.format(self.name, epoch)) writer.grab_frame()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_gitdir(self): """Determine the git repository for this request"""
if self.gitlookup is None: raise tornado.web.HTTPError(500, 'no git lookup configured') gitdir = self.gitlookup(self.request) if gitdir is None: raise tornado.web.HTTPError(404, 'unable to find repository') logger.debug("Accessing git at: %s", gitdir) return gitdir
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_last_data(self, uuid, period=0, average_by=0): """ Get the data from one device for period till now. :param uuid: Id of the device :type uuid: str :param period: Number of seconds between start time of search and now :type period: integer :param average_by: amount of seconds to average data over. 0 or 300 for no average. Use 3600 (average hourly) or a multiple for long range requests (e.g. more than 1 day) :type average_by: integer :returns: list of datapoints :raises: ClientError, AuthFailure, BadFormat, ForbiddenAccess, TooManyRequests, InternalError .. note:: Use period = 0 and averageBy = 0 to get the very last data point. If you only need one average for a period, the average_by needs to be bigger than the period (eg, for a 10 minutes average: period = 600, average_by = 601) .. seealso:: :func:`parse_data` for return data syntax """
return self.parse_data((yield from self._get( LAST_DATA_URL.format(uuid= uuid, period= trunc(period), average_by= trunc(average_by)))))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_historical_data(self, uuid, start, end, average_by=0): """ Get the data from one device for a specified time range. .. note:: Can fetch a maximum of 42 days of data. To speed up query processing, you can use a combination of average factor multiple of 1H in seconds (e.g. 3600), and o'clock start and end times :param uuid: Id of the device :type uuid: str :param start: start of the range :type start: datetime :param end: end of the range :type end: datetime :param average_by: amount of seconds to average data over. 0 or 300 for no average. Use 3600 (average hourly) or a multiple for long range requests (e.g. more than 1 day) :type average_by: integer :returns: list of datapoints :raises: ClientError, AuthFailure, BadFormat, ForbiddenAccess, TooManyRequests, InternalError .. seealso:: :func:`parse_data` for return data syntax """
return self.parse_data((yield from self._get( HISTORICAL_DATA_URL.format(uuid= uuid, start = trunc(start.replace(tzinfo=timezone.utc).timestamp()), end = trunc(end.replace(tzinfo=timezone.utc).timestamp()), average_by= trunc(average_by)))))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_data(self, response): """ Convert the weird list format used for datapoints to a more usable dictionnary one :param response: dictionnary from API json response :type response: dict :returns: list of datapoints .. note:: Datapoint content: * time: UTC timestamp, unit: seconds * pm: Particulate Matter, unit: ugm3 * tmp: temperature, unit: C * hum: humidity, unit: % * co2: Carbon Dioxide, unit: ppm * voc: Volatile Organic Compounds, unit: ppb * allpollu: `foobot index <https://help.foobot.io/hc/en-us/articles/204814371-What-does-central-number-mean->`_, unit: % """
parsed = [] try: items = response['sensors'] for datapoint in response['datapoints']: line = {} for index, data in enumerate(datapoint): line[items[index]] = data parsed.append(line) return parsed except (KeyError, IndexError, TypeError): raise FoobotClient.InvalidData()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def metaclass(self): """Get a metaclass configured to use this registry."""
if '_metaclass' not in self.__dict__: self._metaclass = type('PermissionsMeta', (PermissionsMeta,), {'registry': self}) return self._metaclass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def register(self, perm_func=None, model=None, allow_staff=None, allow_superuser=None, allow_anonymous=None, unauthenticated_handler=None, request_types=None, name=None, replace=False, _return_entry=False): """Register permission function & return the original function. This is typically used as a decorator:: permissions = PermissionsRegistry() @permissions.register def can_do_something(user): For internal use only: you can pass ``_return_entry=True`` to have the registry :class:`.Entry` returned instead of ``perm_func``. """
allow_staff = _default(allow_staff, self._allow_staff) allow_superuser = _default(allow_superuser, self._allow_superuser) allow_anonymous = _default(allow_anonymous, self._allow_anonymous) unauthenticated_handler = _default(unauthenticated_handler, self._unauthenticated_handler) request_types = _default(request_types, self._request_types) if perm_func is None: return ( lambda perm_func_: self.register( perm_func_, model, allow_staff, allow_superuser, allow_anonymous, unauthenticated_handler, request_types, name, replace, _return_entry) ) name = _default(name, perm_func.__name__) if name == 'register': raise PermissionsError('register cannot be used as a permission name') elif name in self._registry and not replace: raise DuplicatePermissionError(name) view_decorator = self._make_view_decorator( name, perm_func, model, allow_staff, allow_superuser, allow_anonymous, unauthenticated_handler, request_types) entry = Entry( name, perm_func, view_decorator, model, allow_staff, allow_superuser, allow_anonymous, unauthenticated_handler, request_types, set()) self._registry[name] = entry @wraps(perm_func) def wrapped_func(user, instance=NO_VALUE): if user is None: return False if not allow_anonymous and user.is_anonymous(): return False test = lambda: perm_func(user) if instance is NO_VALUE else perm_func(user, instance) return ( allow_staff and user.is_staff or allow_superuser and user.is_superuser or test() ) register.filter(name, wrapped_func) log.debug('Registered permission: {0}'.format(name)) return entry if _return_entry else wrapped_func
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def require(self, perm_name, **kwargs): """Use as a decorator on a view to require a permission. Optional args: - ``field`` The name of the model field to use for lookup (this is only relevant when requiring a permission that was registered with ``model=SomeModelClass``) Examples:: @registry.require('can_do_stuff') def view(request): @registry.require('can_do_stuff_with_model', field='alt_id') def view_model(request, model_id): """
view_decorator = self._get_entry(perm_name).view_decorator return view_decorator(**kwargs) if kwargs else view_decorator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def entry_for_view(self, view, perm_name): """Get registry entry for permission if ``view`` requires it. In other words, if ``view`` requires the permission specified by ``perm_name``, return the :class:`Entry` associated with the permission. If ``view`` doesn't require the permission, return ``None`` instead. """
view_name = self._get_view_name(view) entry = self._get_entry(perm_name) if view_name in entry.views: return entry return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_importer(self): """ Resolve importer from TRACK_IMPORTER_CLASS setting. """
try: importer_path = settings.TRACK_IMPORTER_CLASS except AttributeError: raise ImproperlyConfigured('No TRACK_IMPORTER_CLASS setting found.') try: dot = importer_path.rindex('.') except ValueError: raise ImproperlyConfigured('%s isn\'t a Track Importer module.' % importer_path) module, classname = importer_path[:dot], importer_path[dot+1:] try: mod = import_module(module) except ImportError, e: raise ImproperlyConfigured('Could not import Track Importer %s: "%s".' % (module, e)) try: importer_class = getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Track Importer module "%s" does not define a "%s" class.' % (module, classname)) importer_instance = importer_class() if not hasattr(importer_instance, 'run'): raise ImproperlyConfigured('Track Importer class "%s" does not define a run method. Implement the method to return a list of Track objects.' % classname) return importer_instance
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lookup_track(self, track): """ Looks up Django Track object for provided raw importing track object. """
tracks = Track.objects.filter(title__iexact=track.title) for track_obj in tracks: for contributor in track_obj.get_primary_contributors(permitted=False): if contributor.title == track.artist: return track_obj return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self): """ Run import. """
latest_track = Track.objects.all().order_by('-last_played') latest_track = latest_track[0] if latest_track else None importer = self.get_importer() tracks = importer.run() # Create/update Django Track objects for importer tracks. for track in tracks: # Only create/update if tracks with start times greater than what already exists are imported. if not latest_track or not latest_track.last_played \ or track.start_time > latest_track.last_played: obj = self.lookup_track(track) # Don't update importing track that is regarded as the latest. This prevents start times from constantly incrementing. if latest_track and obj == latest_track: print "[%s-%s]: Start time not updated as it is the latest track." % (track.title, track.artist) continue # If no existing track object could be resolved, create it. if not obj: print "[%s-%s]: Created." % (track.title, track.artist) obj = Track.objects.create(title=track.title) obj.length = track.length roles = MusicCreditOption.objects.all().order_by('role_priority') role = roles[0].role_priority if roles else 1 obj.create_credit(track.artist, role) else: print "[%s-%s]: Not created as it already exists." % (track.title, track.artist) # Update last played time to start time. obj.last_played = track.start_time obj.save() print "[%s-%s]: Start time updated to %s." % (track.title, track.artist, track.start_time) else: print "[%s-%s]: Not created as it has a past start time of %s (latest %s). " % (track.title, track.artist, track.start_time, latest_track.last_played)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def match_value_by_name(expected_type, actual_value): """ Matches expected type to a type of a value. :param expected_type: an expected type name to match. :param actual_value: a value to match its type to the expected one. :return: true if types are matching and false if they don't. """
if expected_type == None: return True if actual_value == None: raise Exception("Actual value cannot be null") return TypeMatcher.match_type_by_name(expected_type, type(actual_value))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def match_type_by_name(expected_type, actual_type): """ Matches expected type to an actual type. :param expected_type: an expected type name to match. :param actual_type: an actual type to match defined by type code. :return: true if types are matching and false if they don't. """
if expected_type == None: return True if actual_type == None: raise Exception("Actual type cannot be null") expected_type = expected_type.lower() if actual_type.__name__.lower() == expected_type: return True elif expected_type == "object": return True elif expected_type == "int" or expected_type == "integer": return issubclass(actual_type, int) #or issubclass(actual_type, long) elif expected_type == "long": return issubclass(actual_type, int) elif expected_type == "float" or expected_type == "double": return issubclass(actual_type, float) elif expected_type == "string": return issubclass(actual_type, str) #or issubclass(actual_type, unicode) elif expected_type == "bool" or expected_type == "boolean": return issubclass(actual_type, bool) elif expected_type == "date" or expected_type == "datetime": return issubclass(actual_type, datetime.datetime) or issubclass(actual_type. datetime.date) elif expected_type == "timespan" or expected_type == "duration": return issubclass(actual_type, int) or issubclass(actual_type, float) elif expected_type == "enum": return issubclass(actual_type, str) or issubclass(actual_type, int) elif expected_type == "map" or expected_type == "dict" or expected_type == "dictionary": return issubclass(actual_type, dict) elif expected_type == "array" or expected_type == "list": return issubclass(actual_type, list) or issubclass(actual_type, tuple) or issubclass(actual_type, set) elif expected_type.endswith("[]"): # Todo: Check subtype return issubclass(actual_type, list) or issubclass(actual_type, tuple) or issubclass(actual_type, set) else: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def blob(self, request, pk=None): """ fetch large object from pg and gives it back to user via HTTP 1.1 request :param request: django request instance :param pk: requested resource primary key :rtype: django.http.HttpResponse :rtype: HttpResponse :return: file with its filename stored in database """
obj = self.get_object_or_none() if obj: blob = obj.get_blob_data() content_type = 'octet/stream' response = HttpResponse(blob, content_type=content_type, status=status.HTTP_200_OK) response['Content-Disposition'] = ( 'attachment; filename="%s"' % obj.name ) return response return HttpResponse('404', status=status.HTTP_404_NOT_FOUND, content_type='application/json')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_github(self): """Creates an instance of github.Github to interact with the repos via the API interface in pygithub. """
from github import Github vms("Querying github with user '{}'.".format(self.username)) g = Github(self.username, self.apikey) self._user = g.get_user() if self._user is None: raise ValueError("Can't authenticate to github with '{}'.".format(self.username)) #The github user authenticating always has to be specified; however the user #may not be able to see the repo, even if it has access to it. We may need #to check the organization repos. if self.organization is not None: self._org = g.get_organization(self.organization) vms("Found github organization '{}'.".format(self._org.name), 2) #Next we need to find this repository in the lists available to both #the user *and* the organization. If they specified an organization, then we #should check that first/exclusively. for repo in self._org.get_repos(): if repo.full_name.lower() == self.name.lower(): self._repo = repo vms("Found organization repository '{}'.".format(self._repo.full_name), 2) break else: for repo in self._user.get_repos(): if repo.full_name.lower() == self.name.lower(): self._repo = repo vms("Found user repository '{}'.".format(self._repo.full_name), 2) break
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_xml(self): """Extracts the XML settings into class instances that can operate on the settings to perform the testing functions. """
import xml.etree.ElementTree as ET from os import path #This dict has the keys of XML tags that are required in order for the #CI server to run the repo. When each one is parsed, we change its value #to True and then check that they are all true at the end. required = {"testing": False, "wiki": False} #Make sure the file exists and then import it as XML and read the values out. if path.isfile(self.filepath): tree = ET.parse(self.filepath) vms("Parsing XML tree from {}.".format(self.filepath), 2) root = tree.getroot() if root.tag != "cirepo": raise ValueError("The root tag in a continuous integration settings XML " "file should be a <cirepo> tag.") self._parse_repo(root) for child in root: if child.tag == "cron": if self.server is not None: self.server.cron.settings[self.name] = CronSettings(child) if child.tag == "testing": self.testing = TestingSettings(child) if child.tag == "static": self.static = StaticSettings(child) if child.tag == "wiki": self.wiki["user"] = get_attrib(child, "user", "wiki") self.wiki["password"] = get_attrib(child, "password", "wiki") self.wiki["basepage"] = get_attrib(child, "basepage", "wiki") if child.tag in required: required[child.tag] = True if not all(required.values()): tags = ', '.join(["<{}>".format(t) for t in required]) raise ValueError("{} are required tags in the repo's XML settings file.".format(tags))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_xml(self, xml): """Extracts the attributes from the XMLElement instance."""
from re import split vms("Parsing <cron> XML child tag.", 2) self.frequency = get_attrib(xml, "frequency", default=5, cast=int) self.emails = split(",\s*", get_attrib(xml, "emails", default="")) self.notify = split(",\s*", get_attrib(xml, "notify", default=""))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_xml(self, xml): """Extracts objects representing and interacting with the settings in the xml tag. """
vms("Parsing <static> XML child tag.", 2) for child in xml: if "path" in child.attrib and "target" in child.attrib: if child.tag == "file": self.files.append({"source": child.attrib["path"], "target": child.attrib["target"]}) elif child.tag == "folder": self.folders.append({"source": child.attrib["path"], "target": child.attrib["target"]})
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def copy(self, repodir): """Copies the static files and folders specified in these settings into the locally-cloned repository directory. :arg repodir: the full path to the directory with the locally-cloned version of the pull request being unit tested. """
#Instead of using the built-in shell copy, we make shell calls to rsync. #This allows us to copy only changes across between runs of pull-requests. from os import system, path vms("Running static file copy locally.", 2) for file in self.files: fullpath = path.expanduser(file["source"]) if path.isfile(fullpath): vms("Running 'rsync' for {}.".format(fullpath), 3) system("rsync -t -u {} {}".format(fullpath, get_repo_relpath(repodir, file["target"]))) for folder in self.folders: fullpath = path.expanduser(folder["source"]) if path.isdir(fullpath): vms("Running 'rsync' for {}.".format(fullpath), 3) system("rsync -t -u -r {} {}".format(path.join(fullpath, ""), path.join(get_repo_relpath(repodir, folder["target"]), "")))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def serial(self): """Returns true if the CI server should run in serial mode. """
serial = self.property_get("SERIAL", False) if isinstance(serial, str): return serial.lower() == "true" else: return serial
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def var_replace(self, text): """Replaces all instances of @VAR with their values in the specified text. """
result = text for var in self._vardict: result = result.replace("@{}".format(var), self._vardict[var]) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_or_create_iobject(identifier_uid, identifier_namespace_uri, iobject_type_name, iobject_type_namespace_uri, iobject_type_revision_name, iobject_family_name, iobject_family_revision_name="", identifier_namespace_name="", timestamp=None, create_timestamp=None, overwrite=False, dingos_class_map=dingos_class_map): """ Get or create an information object. """
# create or retrieve the iobject type and revision # create or retrieve identifier if not timestamp: raise StandardError("You must supply a timestamp.") id_namespace, created = dingos_class_map['IdentifierNameSpace'].objects.get_or_create(uri=identifier_namespace_uri) if created and identifier_namespace_name: id_namespace.name = identifier_namespace_name id_namespace.save() identifier, created = dingos_class_map['Identifier'].objects.get_or_create(uid=identifier_uid, namespace=id_namespace, defaults={'latest': None}) iobject_type_namespace, created = dingos_class_map['DataTypeNameSpace'].objects.get_or_create(uri=iobject_type_namespace_uri) iobject_family, created = dingos_class_map['InfoObjectFamily'].objects.get_or_create(name=iobject_family_name) iobject_family_revision, created = dingos_class_map['Revision'].objects.get_or_create( name=iobject_family_revision_name) # create or retrieve the iobject type iobject_type, created = dingos_class_map['InfoObjectType'].objects.get_or_create(name=iobject_type_name, iobject_family=iobject_family, namespace=iobject_type_namespace) iobject_type_revision, created = dingos_class_map['Revision'].objects.get_or_create(name=iobject_type_revision_name) if not create_timestamp: create_timestamp = timezone.now() #if not timestamp: # timestamp = create_timestamp # iobject = overwrite # created = False iobject, created = dingos_class_map["InfoObject"].objects.get_or_create(identifier=identifier, timestamp=timestamp, defaults={'iobject_family': iobject_family, 'iobject_family_revision': iobject_family_revision, 'iobject_type': iobject_type, 'iobject_type_revision': iobject_type_revision, 'create_timestamp': create_timestamp}) if created: iobject.set_name() iobject.save() identifier.latest = iobject identifier.save() elif overwrite: iobject.timestamp = timestamp iobject.create_timestamp = create_timestamp iobject.iobject_family = iobject_family iobject.iobject_family_revision = iobject_family_revision iobject.iobject_type = iobject_type iobject.iobject_type_revision = iobject_type_revision iobject.set_name() iobject.save() logger.debug( "Created iobject id with %s , ts %s (created was %s) and overwrite as %s" % (iobject.identifier, timestamp, created, overwrite)) return iobject, created
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_or_create_fact(fact_term, fact_dt_name='String', fact_dt_namespace_uri=DINGOS_NAMESPACE_URI, values=None, value_iobject_id=None, value_iobject_ts=None, ): """ Get or create a fact object. """
if not values: values = [] vocab_namespace, created = dingos_class_map['DataTypeNameSpace'].objects.get_or_create(uri=fact_dt_namespace_uri) fact_data_type, created = dingos_class_map['FactDataType'].objects.get_or_create(name=fact_dt_name, namespace=vocab_namespace) # Maybe we already have a fact with exactly the same fact term and the same fact values? # We start by looking at the number of values value_objects = [] for value in values: storage_location=dingos.DINGOS_VALUES_TABLE # collect (create or get) the required value objects if value == None: value = '' if isinstance(value,tuple): # If a value is wrapped in a tuple, the second component of the tuple # specifies the storage location of the value. value, storage_location = value if storage_location == dingos.DINGOS_VALUES_TABLE: # If the value is larger than a given size, the value is written to disk, instead. # We use this to keep too large values out of the database. Depending on how the # database is set up, this may be necessary to allow indexing, which in turn is # required to check uniqueness on values. if len(value) > dingos.DINGOS_MAX_VALUE_SIZE_WRITTEN_TO_VALUE_TABLE: (value_hash,storage_location) = write_large_value(value) value = value_hash fact_value, created = dingos_class_map['FactValue'].objects.get_or_create(value=value, fact_data_type=fact_data_type, storage_location=storage_location) value_objects.append(fact_value) # Do we already have a fact with given fact term and given values? # # For understanding the query below better, see https://groups.google.com/forum/#!topic/django-users/X9TCSrBn57Y. # The double query is necessary, because the first count counts the number of selected # fact_value objects, not the number of total objects for each fact. possibly_matching_facts = Fact.objects.filter(fact_values__in=value_objects, value_iobject_id=value_iobject_id, value_iobject_ts=value_iobject_ts, fact_term=fact_term ).values_list('pk',flat=True) matching_facts = Fact.objects.filter(pk__in=list(possibly_matching_facts)). \ annotate(num_values=Count('fact_values')). \ filter(num_values=len(value_objects)). \ exclude(id__in= \ Fact.objects.filter(pk__in=possibly_matching_facts).annotate(total_values=Count('fact_values')). \ filter(total_values__gt=len(value_objects))) # Below, for educational purposes, the original query until Dingos 0.2.0, which got *really* # slow with lot's of objects in the system. The reason for this are the last three lines: # the exclude-statement required the database to count the the number of values for each # Fact in the system... but we are really only interested into facts with the same # fact_term, value_iobject_id and value_iobject_ts... #matching_facts = Fact.objects.filter(fact_values__in=value_objects). \ # annotate(num_values=Count('fact_values')). \ # filter(num_values=len(value_objects)). \ # filter(value_iobject_id=value_iobject_id). \ # filter(value_iobject_ts=value_iobject_ts). \ # filter(fact_term=fact_term). \ # exclude(id__in= \ # Fact.objects.annotate(total_values=Count('fact_values')). \ # filter(total_values__gt=len(value_objects))) created = True try: fact_obj = matching_facts[0] created = False logger.debug("FOUND MATCHING OBJECT with pk %s" % fact_obj.pk) except: fact_obj = dingos_class_map['Fact'].objects.create(fact_term=fact_term, value_iobject_id=value_iobject_id, value_iobject_ts=value_iobject_ts, ) fact_obj.fact_values.add(*value_objects) fact_obj.save() return fact_obj, created
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_or_create_fact_term(iobject_family_name, fact_term_name, fact_term_attribute, iobject_type_name, iobject_type_namespace_uri, fact_dt_name=DINGOS_DEFAULT_FACT_DATATYPE, fact_dt_namespace_name=None, fact_dt_kind=FactDataType.UNKNOWN_KIND, fact_dt_namespace_uri=DINGOS_NAMESPACE_URI, dingos_class_map=dingos_class_map ): """ Get or create a fact term. """
if not fact_term_attribute: fact_term_attribute = '' # create or retrieve the enrichment type and revision iobject_family, created = dingos_class_map['InfoObjectFamily'].objects.get_or_create(name=iobject_family_name) # create or retrieve namespace of data type fact_dt_namespace, created = dingos_class_map['DataTypeNameSpace'].objects.get_or_create(uri=fact_dt_namespace_uri) # create or retrieve namespace of the infoobject type iobject_type_namespace, created = dingos_class_map['DataTypeNameSpace'].objects.get_or_create(uri=iobject_type_namespace_uri) if created and fact_dt_namespace_name: fact_dt_namespace.name = fact_dt_namespace_name fact_dt_namespace.save() # create or retrieve the fact-value data type object fact_dt, created = dingos_class_map['FactDataType'].objects.get_or_create(name=fact_dt_name, namespace=fact_dt_namespace) if created: fact_dt.kind = fact_dt_kind fact_dt.save() # create or retreive the iobject type iobject_type, created = dingos_class_map['InfoObjectType'].objects.get_or_create(name=iobject_type_name, iobject_family=iobject_family, namespace=iobject_type_namespace) fact_term, created = dingos_class_map['FactTerm'].objects.get_or_create(term=fact_term_name, attribute=fact_term_attribute) fact_term_2_type, dummy = dingos_class_map['FactTerm2Type'].objects.get_or_create(fact_term=fact_term, iobject_type=iobject_type, ) fact_term_2_type.fact_data_types.add(fact_dt) fact_term_2_type.save() return fact_term, created
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def marking_thru(self): """ Return the back-pointer to markings that may have been attached via Django's content type mechanism. """
self_django_type = ContentType.objects.get_for_model(self) return Marking2X.objects.filter(content_type__pk=self_django_type.id, object_id=self.id)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def embedded_in(self): """ Used in the view for the InfoObject (in order to be able to use the standard class-based object view. Should be removed from here and put into a proper custom view for the object. This query only returns embedding objects of the latest revision: to change this, the filter 'iobject__timestamp=F('iobject__identifier__latest__timestamp' must be removed. """
return self._DCM['InfoObject2Fact']. \ objects. \ filter(fact__value_iobject_id=self.identifier). \ filter(iobject__timestamp=F('iobject__identifier__latest__timestamp')). \ order_by('-iobject__timestamp') \ .values_list( 'iobject', 'iobject__identifier__namespace__uri', 'iobject__identifier__uid', 'iobject__timestamp', 'iobject__name', 'fact__value_iobject_ts', 'fact__fact_term__term', 'node_id__name').distinct()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_name(self,name=None): """ Set the name of the object. If no name is given, the name is extracted via the extract_name method. """
if name: self.name = name[:254] else: self.name = self.extract_name()[:254] self.save() return self.name
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_relation(self, target_id=None, relation_types=None, fact_dt_namespace_name=None, fact_dt_namespace_uri=DINGOS_NAMESPACE_URI, fact_dt_kind=FactDataType.UNKNOWN_KIND, fact_dt_name='String', metadata_dict=None, markings=None ): """ Add a relationship between this object and another object. """
if not markings: markings = [] if relation_types == None: relation_types = [] # Create fact-term for relation types relation_type_ft, created = get_or_create_fact_term(iobject_family_name=self.iobject_family.name, fact_term_name=DINGOS_RELATION_TYPE_FACTTERM_NAME, iobject_type_name=self.iobject_type.name, iobject_type_namespace_uri=self.iobject_type.namespace.uri, fact_dt_name=fact_dt_name, fact_dt_namespace_name=fact_dt_namespace_name, fact_dt_kind=fact_dt_kind, fact_dt_namespace_uri=fact_dt_namespace_uri) # Create fact containing relation types relation_type_fact, created = get_or_create_fact(fact_term=relation_type_ft, fact_dt_name=fact_dt_name, fact_dt_namespace_uri=fact_dt_namespace_uri, values=relation_types, value_iobject_id=None, value_iobject_ts=None, ) rel_target_id = target_id rel_source_id = self.identifier # Create relation object relation, created = self._DCM['Relation'].objects.get_or_create( source_id=rel_source_id, target_id=rel_target_id, relation_type=relation_type_fact) # Add markings for marking in markings: Marking2X.objects.create(marked=relation, marking=marking) if metadata_dict: # If the relation already existed and had associated metadata, # we retrieve the identifier of that metadata object and # write the current metadata as new revision. Otherwise, # we create a new identifier. if relation.metadata_id: rel_identifier_uid = relation.metadata_id.uid rel_identifier_namespace_uri = relation.metadata_id.namespace.uri else: rel_identifier_uid = None rel_identifier_namespace_uri = DINGOS_ID_NAMESPACE_URI metadata_iobject, created = get_or_create_iobject(identifier_uid=rel_identifier_uid, identifier_namespace_uri=rel_identifier_namespace_uri, iobject_type_name=DINGOS_RELATION_METADATA_OBJECT_TYPE_NAME, iobject_type_namespace_uri=DINGOS_NAMESPACE_URI, iobject_type_revision_name=DINGOS_REVISION_NAME, iobject_family_name=DINGOS_IOBJECT_FAMILY_NAME, iobject_family_revision_name=DINGOS_REVISION_NAME, timestamp=None, overwrite=False) metadata_iobject.from_dict(metadata_dict) return relation
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_data(self): """ Load data from raw_data or file_path """
if self.raw_data is None and self.data_format is not FormatType.PYTHON: if self.file_path is None: raise ArgumentInvalid('One of "raw_data" or "file_path" should be set!') if not os.path.isfile(self.file_path) or not os.access(self.file_path, os.R_OK): raise ArgumentInvalid('"file_path" should be a valid path to an exist file with read permission!') with open(self.file_path) as f: self.raw_data = f.read()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _validate(self): """ Validate the input data. """
if self.data_format is FormatType.PYTHON: self.data = self.raw_data elif self.data_format is FormatType.JSON: self._validate_json() elif self.data_format is FormatType.YAML: self._validate_yaml()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def match(pattern, text, no_escape=False, path_name=True, wild_star=True, period=False, case_fold=False): u""" Matches text against the supplied wildmatch pattern. To get git's behavior, use the `wild_star` flag. Note that the EXTMATCH (ksh extended glob patterns) option is not available :type pattern: text_type :param pattern: A wildmatch pattern :type text: text_type :param text: The text to match :type no_escape: bool :param no_escape: Disable backslash escaping :type path_name: bool :param path_name: Separator (slash) in text cannot be matched by an asterisk, question-mark nor bracket expression in pattern (only a literal). :type wild_star: bool :param wild_star: A True value forces the `path_name` flag to True. This allows the double-asterisk `**` to match any (0 to many) number of directories :type period: bool :param period: A leading period in text cannot be matched by an asterisk, question-mark nor bracket expression in pattern (only a literal). A period is "leading" if: - it is the first character of `text` OR - path_name (or wild_star) is True and the previous character is a slash :type case_fold: bool :param case_fold: Perform a case insensitive match (GNU Extension) :rtype: bool :return: Result of the match """
regex = translate(pattern, no_escape=no_escape, path_name=path_name, wild_star=wild_star, period=period, case_fold=case_fold, closed_regex=True) return regex.match(text) is not None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def filter(self, texts): u""" Returns a generator yielding the elements of `texts` matching this pattern. :type texts: typing.Iterable[text_type] :param texts: An iterable collection of texts to match :rtype: typing.Iterable[text_type] :return: A generator of filtered elements. """
return (text for text in texts if self.regex.match(text) is not None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def imagesc(data, title=None, fig='current', ax=None): '''Simple alias for a Matlab-like imshow function.''' ax = _get_axis(fig, ax, False) ax.imshow(data, interpolation='nearest', aspect='auto') if title: ax.set_title(title) return plt.show
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def vector_field(points, directions, title=None, fig='current', ax=None, edge_style='k-', vertex_style='o'): '''Plots vectors that start at 'points', and move along 'directions'.''' assert points.shape[1] in (2,3) and directions.shape == points.shape ax = _get_axis(fig, ax, points.shape[1] == 3) # Plot. if points.shape[1] == 2: x,y = points.T dx,dy = directions.T if hasattr(ax, 'zaxis'): # Must be on a 3d plot axis, so supply zeros. _quiver3d(ax, x, y, 0, dx, dy, 0, arrow_length_ratio=0.1) else: args = (x, y, dx, dy) ax.quiver(*args, angles='xy', scale_units='xy', scale=1, headwidth=5) if vertex_style is not None: ax.scatter(x, y, marker=vertex_style, zorder=2, edgecolor='none') else: x,y,z = points.T dx,dy,dz = directions.T _quiver3d(ax, x, y, z, dx, dy, dz, arrow_length_ratio=0.1) if vertex_style is not None: ax.scatter(x, y, z, marker=vertex_style, zorder=2, edgecolor='none') if title: ax.set_title(title) return plt.show
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def read_passwd_file(pass_file): """Read password from external file and retrun as string. The file should contain just single line. Prevents hard-coding password anywhere in this script. IMPORTANT! Password is stored as plain text! Do NOT use with your personal account!" Args: pass_file (str): /path/to/pass_file """
with open(pass_file) as fin: passwd = fin.read().strip() return passwd
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def send_mail(to_addr, subj_msg, body_msg, attach_path, serv_addr, serv_port, from_addr, passwd): """Send an e-mail message using smtplib and email standard python libraries. IMPORTANT! Password is stored as plain text! Do NOT use with your personal account! Args: to_addr (str): Recipient address. subj_msg (str): Message subject. body_msg (str): Message body. serv_addr (str): Server's address. Default: <smtp.gmail.com>. serv_port (int): Server's port. Default: <587>. from_addr (str): Account address. Default: <headnode.notifiy@gmail.com>. passwd (str): Account password. """
msg = MIMEMultipart() if attach_path is not None: with open(attach_path, "rb") as fin: part = MIMEBase("application", "octet-stream") part.set_payload(fin.read()) encoders.encode_base64(part) part.add_header("Content-Disposition", "attachment; filename={0}".format(attach_path)) msg.attach(part) else: pass msg["From"] = from_addr msg["To"] = to_addr msg["Subject"] = subj_msg msg.attach(MIMEText(body_msg, "plain")) server = smtplib.SMTP(serv_addr, serv_port) server.starttls() server.login(from_addr, passwd) text_msg = msg.as_string() server.sendmail(from_addr, to_addr, text_msg) server.quit
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def roc_calculator(screened_molecules, status_field, active_label, decoy_label): """ Calculates ROC curve """
P = 0 # Total no. of actives N = 0 # Total no. of decoys tpf = []; tpf.append(0) # true positive fraction list fpf = []; fpf.append(0) # false positive fraction list fpindex = [] # indeces where decoys are found are labeled '1' # Tally the # of positives & negatives at each threshold & in total for index in range(len(screened_molecules)): if screened_molecules[index].GetProp(status_field) == active_label and index == 0: tpf[index] = float(1) P = P + 1 fpindex.append(0) elif screened_molecules[index].GetProp(status_field) == active_label and index > 0: tpf.append(float(tpf[index - 1] + 1)) fpf.append(float(fpf[index - 1])) P = P + 1 fpindex.append(0) elif screened_molecules[index].GetProp(status_field) == decoy_label and index == 0: fpf[index] = float(1) N = N + 1 fpindex.append(1) elif screened_molecules[index].GetProp(status_field) == decoy_label and index > 0: fpf.append(float(fpf[index - 1] + 1)) tpf.append(float(tpf[index - 1])) N = N + 1 fpindex.append(1) # calculate TPF & FPF for index in range(len(tpf)): tpf[index] = tpf[index] / P fpf[index] = fpf[index] / N return tpf, fpf, P, N
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def listen(identifier): """ Launch a listener and return the compactor context. """
context = Context() process = WebProcess(identifier) context.spawn(process) log.info("Launching PID %s", process.pid) return process, context
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def history(self): """Get the latest 10 files uploaded to the account. Return a list of Puush File objects. """
res = self._api_request('hist') if res[0][0] == '-1': raise PuushError("History retrieval failed.") files = [] for line in res[1:]: id, upload_time, url, filename, views, _ = line files.append(self._File(id, url, filename, upload_time, views)) return files
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prepare_value(self, value): """ To avoid evaluating the lazysorted callable more than necessary to establish a potential initial value for the field, we do it here. If there's - only one template choice, and - the field is required, and - there's no prior initial set (either by being bound or by being set higher up the stack then forcibly select the only "good" value as the default. """
if value is None and self.required: choices =list(self.choices) if len(choices) == 1: value = choices[0][0] return super(TemplateChoiceField, self).prepare_value(value)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def to_float_with_default(value, default_value): """ Converts value into float or returns default when conversion is not possible. :param value: the value to convert. :param default_value: the default value. :return: float value or default value when conversion is not supported. """
result = FloatConverter.to_nullable_float(value) return result if result != None else default_value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self): """This method is run by a separated thread"""
self.busy = True for i in range(9): self.counter += 1 time.sleep(0.5) pass self.counter += 1 self.busy = False return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_info_field(prop): """ Return the info attribute of the given property """
if isinstance(prop, ColumnProperty): column = prop.columns[0] elif isinstance(prop, RelationshipProperty): column = prop return column.info
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_item(self, sqla_col_type, item, key_specific=None): """ Add an item to the registry """
if key_specific is not None: self.setdefault(key_specific, {})[sqla_col_type] = item else: self[sqla_col_type] = item
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_formatter(self, sqla_col_type, formatter, key_specific=None): """ Add a formatter to the registry if key_specific is provided, this formatter will only be used for some specific exports """
self.add_item(sqla_col_type, formatter, key_specific)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_layout(self, orientation=None): """Setup the layout for the tooltip in the given orientation :param layout: the orentation of the layout :type layout: QtCore.Qt.Orientation | None :returns: None :rtype: None :raises: None """
if orientation == QtCore.Qt.Horizontal or orientation is None: layout = QtGui.QHBoxLayout() elif orientation == QtCore.Qt.Vertical: layout = QtGui.QVBoxLayout() else: raise TypeError('Orientation is of wrong type! Allowed is QtCore.Qt.Horizontal and QtCore.Qt.Vertical. Given: %s' % orientation) layout.setContentsMargins(0, 0, 0, 0) layout.setSpacing(0) self.setLayout(layout)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_size(self, width, height): """Set the width and height for one cell in the tooltip This is inderectly acomplished by setting the iconsizes for the buttons. :param width: the width of one cell, min. is 7 -> icon width = 0 :type width: int :param height: the height of one cell, min. is 6 -> icon height = 0 :type height: int :returns: None :rtype: None :raises: None """
self._iconw = max(0, width - 7) self._iconh = max(0, height - 6) self.update_all_buttons()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_cyatimer(self, interval): """Setup the timer that will close the widget after the mouse left the widget for the time of interval :param interval: the time that the tooltip waits before it dissapears in milliseconds :type interval: int :returns: None :rtype: None :raises: None """
self.cyatimer = QtCore.QTimer(self) self.cyatimer.setSingleShot(True) self.cyatimer.timeout.connect(self.hide) self._interval = interval
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def event(self, event): """Reimplementation of QWidget.event The widget is closed, when the window is deactivated. The widget is closed after the set interval if the mouse leaves the widget. The timer is stops when the mouse enters the widget before the interval ends. On show, the added widgets are rendered for the tooltip into buttons. The buttons are used to set the widget in focus. """
if event.type() == QtCore.QEvent.WindowDeactivate: # hide the tooltip self.cyatimer.stop() self.hide() return True if event.type() == QtCore.QEvent.Leave: # start timer self.cyatimer.start(self._interval) return True if event.type() == QtCore.QEvent.Enter: # reset/stop timer self.cyatimer.stop() return True if event.type() == QtCore.QEvent.Show: # render the widgets self.cyatimer.stop() return True return super(WidgetToolTip, self).event(event)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_button(self, widget): """Create a button that has the given widget rendered as an icon :param widget: the widget to render as icon :type widget: QtGui.QWidget :returns: the created button :rtype: QtGui.QAbstractButton :raises: None """
btn = QtGui.QToolButton(self) btn.setIconSize(QtCore.QSize(self._iconw, self._iconh)) self.update_button(btn, widget) return btn
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_button(self, button, widget): """Update the icon of the button with the given widget if the widget does not is invalid, it is deleted from the tooltip automatically. :param button: the button to update :type button: QtGui.QAbstractButton :param widget: the widget to render as icon :type widget: QtGui.QWidget :returns: None :rtype: None :raises: None """
if not shiboken.isValid(widget): self.remove_widget(widget) return button.setIconSize(QtCore.QSize(self._iconw, self._iconh)) pix = QtGui.QPixmap(widget.size()) widget.render(pix) icon = QtGui.QIcon(pix) button.setIcon(icon)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_all_buttons(self, ): """Update all buttons :returns: None :rtype: None :raises: None """
for widget, button in self._buttons.items(): self.update_button(button, widget) self.adjustSize()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def focus_widget(self, checked=None, w=None): """Focus the given widget. Checked is ignored and only used as a slot for QAbstractButton.clicked. :param checked: The checked state of the button that was clicked :type checked: bool :param w: the widget to focus :type w: QtGui.QWidget :returns: None :raises: None """
if w is None: return if w.isMinimized(): w.showNormal() else: w.show() w.activateWindow() w.setFocus()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def add_widget(self, widget): """Add the given widget to the tooltip :param widget: the widget to add :type widget: QtGui.QWidget :returns: None :rtype: None :raises: None """
if self._buttons.get(widget): return btn = self.create_button(widget) cb = partial(self.focus_widget, w=widget) btn.clicked.connect(cb) self.layout().addWidget(btn) self._buttons[widget] = btn
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_widget(self, widget): """Remove the given widget from the tooltip :param widget: the widget to remove :type widget: QtGui.QWidget :returns: None :rtype: None :raises: KeyError """
button = self._buttons.pop(widget) self.layout().removeWidget(button) button.deleteLater()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def eventFilter(self, watched, event): """Filter ToolTip events and display this tooltip widget, if watched requests a tooltip. :param watched: The watched object :type watched: QtCore.QObject :param event: The event sent by watched :type event: QtCore.QEvent :returns: True if the event was processed. False if the event should be passed on. :rtype: bool :raises: None """
if event.type() == self._triggerevent: self.show() return True else: return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_position(self, ): """Return a recommended position for this widget to appear This implemenation returns a position so that the widget is vertically centerd on the mouse and 10 pixels left of the mouse :returns: the position :rtype: QPoint :raises: None """
pos = QtGui.QCursor.pos() if self._alignment & QtCore.Qt.AlignLeft == QtCore.Qt.AlignLeft: pos.setX(pos.x() - self._offset) elif self._alignment & QtCore.Qt.AlignRight == QtCore.Qt.AlignRight: pos.setX(pos.x() - self.frameGeometry().width() + self._offset) elif self._alignment & QtCore.Qt.AlignHCenter == QtCore.Qt.AlignHCenter: pos.setX(pos.x() - self.frameGeometry().width()/2) if self._alignment & QtCore.Qt.AlignTop == QtCore.Qt.AlignTop: pos.setY(pos.y() - self._offset) elif self._alignment & QtCore.Qt.AlignBottom == QtCore.Qt.AlignBottom: pos.setY(pos.y() - self.frameGeometry().height() + self._offset) elif self._alignment & QtCore.Qt.AlignVCenter == QtCore.Qt.AlignVCenter: pos.setY(pos.y() - self.frameGeometry().height()/2) return pos
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def show(self, ): """Reimplementation that moves the tooltip and updates the buttons :returns: None :rtype: None :raises: None """
self.update_all_buttons() pos = self.get_position() self.move(pos) super(WidgetToolTip, self).show()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def show(self, ): """Reimplementation of show to update all currently available JB_MainWindows :returns: None :rtype: None :raises: None """
wins = set(JB_MainWindow.instances()) widgets = set(self.get_widgets()) for w in wins - widgets: self.add_widget(w) super(JB_WindowToolTip, self).show()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_issue(self, branch): """ Gets the JIRA issue associated with the branch name. Returns None if no issue with this branch name. """
if branch: try: issue = self.issue(branch, expand='changelog') return issue except jira.exceptions.JIRAError as ex: if ex.status_code == 404: print "No JIRA issue found for branch %s" % branch else: print str(ex)