text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dxdy(line): """ return normalised ascent vector """
x0, y0, x1, y1 = line dx = float(x1 - x0) dy = float(y1 - y0) f = hypot(dx, dy) return dx / f, dy / f
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fromAttr(mid, ang, dist): """ create from middle, angle and distance """
mx, my = mid dx = cos(ang) * dist * 0.5 dy = sin(ang) * dist * 0.5 return mx - dx, my - dy, mx + dx, my + dy
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fromAttr2(start, ang, dist): """ create from start, angle and distance """
sx, sy = start dx = cos(ang) * dist dy = sin(ang) * dist return sx, sy, sx + dx, sy + dy
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def merge(l1, l2): """ merge 2 lines together """
x1, y1, x2, y2 = l1 xx1, yy1, xx2, yy2 = l2 comb = ((x1, y1, xx1, yy1), (x1, y1, xx2, yy2), (x2, y2, xx1, yy1), (x2, y2, xx2, yy2)) d = [length(c) for c in comb] i = argmax(d) dist = d[i] mid = middle(comb[i]) a = (angle(l1) + angle(l2)) * 0.5 return fromAttr(mid, a, dist)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def distance(line, point): """ infinite line to point or line to line distance is point is given as line - use middle point of that liune """
x0, y0, x1, y1 = line try: p1, p2 = point except ValueError: # line is given instead of point p1, p2 = middle(point) n1 = ascent(line) n2 = -1 n0 = y0 - n1 * x0 return abs(n1 * p1 + n2 * p2 + n0) / (n1 ** 2 + n2 ** 2) ** 0.5
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def intersection(line1, line2): """ Return the coordinates of a point of intersection given two lines. Return None if the lines are parallel, but non-colli_near. Return an arbitrary point of intersection if the lines are colli_near. Parameters: line1 and line2: lines given by 4 points (x0,y0,x1,y1). """
x1, y1, x2, y2 = line1 u1, v1, u2, v2 = line2 (a, b), (c, d) = (x2 - x1, u1 - u2), (y2 - y1, v1 - v2) e, f = u1 - x1, v1 - y1 # Solve ((a,b), (c,d)) * (t,s) = (e,f) denom = float(a * d - b * c) if _near(denom, 0): # parallel # If colli_near, the equation is solvable with t = 0. # When t=0, s would have to equal e/b and f/d if b == 0 or d == 0: return None if _near(e / b, f / d): # colli_near px = x1 py = y1 else: return None else: t = (e * d - b * f) / denom # s = (a*f - e*c)/denom px = x1 + t * (x2 - x1) py = y1 + t * (y2 - y1) return px, py
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def translate(line, ascent, offs=0): """ offs -> shifts parallel to line ascent -> rotate line """
# TODO: why do I have thuis factor here? ascent *= -2 offs *= -2 l0 = length(line) # change relative to line: t0 = offs # -h+offs t1 = l0 * ascent + offs return translate2P(line, t0, t1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def splitN(line, n): """ split a line n times returns n sublines """
x0, y0, x1, y1 = line out = empty((n, 4), dtype=type(line[0])) px, py = x0, y0 dx = (x1 - x0) / n dy = (y1 - y0) / n for i in range(n): o = out[i] o[0] = px o[1] = py px += dx py += dy o[2] = px o[3] = py return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _remove_files(files): """ Remove all given files. Args: files (list): List of filenames, which will be removed. """
logger.debug("Request for file removal (_remove_files()).") for fn in files: if os.path.exists(fn): logger.debug("Removing '%s'." % fn) os.remove(fn)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _safe_read_meta_file(fn, error_protocol): """ Try to read MetadataFile. If the exception is raised, log the errors to the `error_protocol` and return None. """
try: return MetadataFile(fn) except Exception, e: error_protocol.append( "Can't read MetadataFile '%s':\n\t%s\n" % (fn, e.message) )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _process_pair(first_fn, second_fn, error_protocol): """ Look at given filenames, decide which is what and try to pair them. """
ebook = None metadata = None if _is_meta(first_fn) and not _is_meta(second_fn): # 1st meta, 2nd data logger.debug( "Parsed: '%s' as meta, '%s' as data." % (first_fn, second_fn) ) metadata, ebook = first_fn, second_fn elif not _is_meta(first_fn) and _is_meta(second_fn): # 1st data, 2nd meta logger.debug( "Parsed: '%s' as meta, '%s' as data." % (second_fn, first_fn) ) metadata, ebook = second_fn, first_fn elif _is_meta(first_fn) and _is_meta(second_fn): # both metadata logger.debug( "Parsed: both '%s' and '%s' as meta." % (first_fn, second_fn) ) return [ _safe_read_meta_file(first_fn, error_protocol), _safe_read_meta_file(second_fn, error_protocol) ] else: # both data logger.debug( "Parsed: both '%s' and '%s' as data." % (first_fn, second_fn) ) return [ EbookFile(first_fn), EbookFile(second_fn) ] # process pairs, which were created in first two branches of the if # statement above pair = DataPair( metadata_file=_safe_read_meta_file(metadata, error_protocol), ebook_file=EbookFile(ebook) ) if not pair.metadata_file: logger.error( "Can't parse MetadataFile '%s'. Continuing with data file '%s'." % ( metadata, ebook ) ) return [pair.ebook_file] return [pair]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _process_directory(files, user_conf, error_protocol): """ Look at items in given directory, try to match them for same names and pair them. If the items can't be paired, add their representation. Note: All successfully processed files are removed. Returns: list: of items. Example: [MetadataFile, DataPair, DataPair, EbookFile] """
items = [] banned = [settings.USER_IMPORT_LOG, settings.USER_ERROR_LOG] files = filter(lambda x: not os.path.basename(x) in banned, files) if len(files) == 2 and conf_merger(user_conf, "SAME_DIR_PAIRING"): logger.debug("There are only two files.") items.extend(_process_pair(files[0], files[1], error_protocol)) files = [] while files: same_names = [] fn = files.pop() logger.debug("Processing '%s'." % fn) # get files with same names (ignore paths and suffixes) if conf_merger(user_conf, "SAME_NAME_DIR_PAIRING"): same_names = _same_named(fn, files) # returns (index, name) indexes = map(lambda (i, fn): i, same_names) # get indexes same_names = map(lambda (i, fn): fn, same_names) # get names # remove `same_names` from `files` (they are processed in this # pass) for i in sorted(indexes, reverse=True): del files[i] # has exactly one file pair SDP = conf_merger(user_conf, "SAME_NAME_DIR_PAIRING") if len(same_names) == 1 and SDP: logger.debug( "'%s' can be probably paired with '%s'." % (fn, same_names[0]) ) items.extend(_process_pair(fn, same_names[0], error_protocol)) elif not same_names: # there is no similar files logger.debug("'%s' can't be paired. Adding standalone file." % fn) if _is_meta(fn): items.append(_safe_read_meta_file(fn, error_protocol)) else: items.append(EbookFile(fn)) else: # error - there is too many similar files logger.error( "Too many files with same name: %s" % ", ".join(same_names) ) error_protocol.append( "Too many files with same name:" + "\n\t".join(same_names) + "\n\n---\n" ) return filter(lambda x: x, items)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _index(array, item, key=None): """ Array search function. Written, because ``.index()`` method for array doesn't have `key` parameter and raises `ValueError`, if the item is not found. Args: array (list): List of items, which will be searched. item (whatever): Item, which will be matched to elements in `array`. key (function, default None): Function, which will be used for lookup into each element in `array`. Return: Index of `item` in `array`, if the `item` is in `array`, else `-1`. """
for i, el in enumerate(array): resolved_el = key(el) if key else el if resolved_el == item: return i return -1
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _isbn_pairing(items): """ Pair `items` with same ISBN into `DataPair` objects. Args: items (list): list of items, which will be searched. Returns: list: list with paired items. Paired items are removed, `DataPair` is \ added instead. """
NameWrapper = namedtuple("NameWrapper", ["name", "obj"]) metas = map( lambda x: NameWrapper(_just_name(x.filename), x), filter(lambda x: isinstance(x, MetadataFile), items) ) ebooks = map( lambda x: NameWrapper(_just_name(x.filename), x), filter(lambda x: isinstance(x, EbookFile), items) ) # simple pairing mechanism, which shouldn't be O^2 complex, but something # slightly better metas = sorted(metas, key=lambda x: x.name) ebooks = sorted(ebooks, key=lambda x: x.name, reverse=True) while metas: meta = metas.pop() if not isbn_validator.is_valid_isbn(meta.name): continue if not ebooks: break ebook_index = _index(ebooks, meta.name, key=lambda x: x.name) if ebook_index >= 0: logger.debug( "Pairing '%s' and '%s'." % ( meta.obj.filename, ebooks[ebook_index].obj.filename ) ) items.append( DataPair( metadata_file=meta.obj, ebook_file=ebooks[ebook_index].obj ) ) items.remove(meta.obj) items.remove(ebooks[ebook_index].obj) ebooks = ebooks[ebook_index+1:] return items
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_import_log(items): """ Used to create log with successfully imported data. """
log = [] for item in items: if isinstance(item, MetadataFile): log.append( "Metadata file '%s' successfully imported." % item.filename ) elif isinstance(item, EbookFile): log.append( "Ebook file '%s' successfully imported." % item.filename ) elif isinstance(item, DataPair): meta = item.metadata_file.filename data = item.ebook_file.filename log.extend([ "Metadata and data files paired to epub. import request:", "\tMetadata file '%s' successfully imported." % meta, "\tEbook file '%s' successfully imported." % data ]) return log
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _process_items(items, user_conf, error_protocol): """ Parse metadata. Remove processed and sucessfully parsed items. Returns sucessfully processed items. """
def process_meta(item, error_protocol): try: return item._parse() except Exception, e: error_protocol.append( "Can't parse %s: %s" % (item._get_filenames()[0], e.message) ) if isinstance(item, DataPair): return item.ebook_file # process all items and put them to output queue out = [] for item in items: if isinstance(item, EbookFile): out.append(item) else: out.append(process_meta(item, error_protocol)) out = filter(lambda x: x, out) # remove None items (process_meta() fails) # remove processed files fn_pool = [] soon_removed = out if conf_merger(user_conf, "LEAVE_BAD_FILES") else items for item in soon_removed: fn_pool.extend(item._get_filenames()) _remove_files(fn_pool) return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def start(self, daemon=True): """ Start driving the chain asynchronously, return immediately :param daemon: ungracefully kill the driver when the program terminates :type daemon: bool """
if self._run_lock.acquire(False): try: # there is a short race window in which `start` release the lock, # but `run` has not picked it up yet, but the thread exists anyway if self._run_thread is None: self._run_thread = threading.Thread(target=self._run_in_thread) self._run_thread.daemon = daemon self._run_thread.start() finally: self._run_lock.release()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self): """ Start driving the chain, block until done """
with self._run_lock: while self.mounts: for mount in self.mounts: try: next(mount) except StopIteration: self.mounts.remove(mount)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fill(self, color): """ Fill the whole screen with the given color. :param color: Color to use for filling :type color: tuple """
self.matrix = [[color for _ in range(self.height)] for _ in range(self.width)]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_dot(self, pos, color): """ Draw one single dot with the given color on the screen. :param pos: Position of the dot :param color: COlor for the dot :type pos: tuple :type color: tuple """
if 0 <= pos[0] < self.width and 0 <= pos[1] < self.height: self.matrix[pos[0]][pos[1]] = color
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_line(self, start, end, color): """ Draw a line with the given color on the screen. :param start: Start point of the line :param end: End point of the line :param color: Color of the line :type start: tuple :type end: tuple :type color: tuple """
def dist(p, a, b): return (abs((b[0] - a[0]) * (a[1] - p[1]) - (a[0] - p[0]) * (b[1] - a[1])) / math.sqrt((b[0] - a[0])**2 + (b[1] - a[1])**2)) points = [] for x in range(min(start[0], end[0]), max(start[0], end[0]) + 1): for y in range(min(start[1], end[1]), max(start[1], end[1]) + 1): if dist((x, y), start, end) < 0.5: points.append((x, y)) for point in points: self.draw_dot(point, color)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_rect(self, pos, size, color, fillcolor=None): """ Draw a rectangle with the given color on the screen and optionally fill it with fillcolor. :param pos: Top left corner of the rectangle :param size: Sieze of the rectangle :param color: Color for borders :param fillcolor: Color for infill :type pos: tuple :type size: tuple :type color: tuple :type fillcolor: tuple """
# draw top and botton line for x in range(size[0]): self.draw_dot((pos[0] + x, pos[1]), color) self.draw_dot((pos[0] + x, pos[1] + size[1] - 1), color) # draw left and right side for y in range(size[1]): self.draw_dot((pos[0], pos[1] + y), color) self.draw_dot((pos[0] + size[0] - 1, pos[1] + y), color) # draw filled rect if fillcolor and size[0] >= 3 and size[1] >= 3: for x in range(size[0] - 2): for y in range(size[1] - 2): self.draw_dot((pos[0] + 1 + x, pos[1] + 1 + y), fillcolor)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def draw_circle(self, pos, radius, color, fillcolor=None): """ Draw a circle with the given color on the screen and optionally fill it with fillcolor. :param pos: Center of the circle :param radius: Radius :param color: Color for border :param fillcolor: Color for infill :type pos: tuple :type radius: int :type color: tuple :type fillcolor: tuple """
#TODO: This still produces rubbish but it's on a good way to success def dist(d, p, r): return abs(math.sqrt((p[0] - d[0])**2 + (p[1] - d[1])**2) - r) points = [] for x in range(pos[0] - radius, pos[0] + radius): for y in range(pos[1] - radius, pos[1] + radius): if 0 < x < self.width and 0 < y < self.height: if dist((x, y), pos, radius) < 1.3: points.append((x, y)) # draw fill color if fillcolor: for point in points: pass # draw outline for point in points: self.draw_dot(point, color)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def blit(self, surface, pos=(0, 0)): """ Blits a surface on this surface at pos :param surface: Surface to blit :param pos: Top left point to start blitting :type surface: Surface :type pos: tuple """
for x in range(surface.width): for y in range(surface.height): px = x + pos[0] py = y + pos[1] if 0 < px < self.width and 0 < py < self.height: self.matrix[px][py] = surface.matrix[x][y]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def replace_color(self, before, after): """ Replaces a color on a surface with another one. :param before: Change all pixels with this color :param after: To that color :type before: tuple :type after: tuple """
#TODO: find out if this actually works #((self.matrix[x][y] = after for y in range(self.height) if self.matrix[x][y] == before) for x in range(self.width)) for x in range(self.width): for y in range(self.height): if self.matrix[x][y] == before: self.matrix[x][y] = after
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def detect_secrets(): """ Call detect-secrets tool """
# use # blah blah = "foo" # pragma: whitelist secret # to ignore a false posites errors_file = "detect-secrets-results.txt" print(execute_get_text("pwd")) command = "{0} detect-secrets --scan --base64-limit 4 --exclude .idea|.js|.min.js|.html|.xsd|" \ "lock.json|synced_folders|.scss|Pipfile.lock|" \ "lint.txt|{1}".format(PIPENV, errors_file).strip() print(command) bash_process = subprocess.Popen(command.split(" "), #shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) foo = bash_process.wait() out, err = bash_process.communicate() # wait with open(errors_file, "w+") as file_handle: if len(out)==0: print("Warning- no output from detect secrets. Happens with git hook, but not from ordinary command line.") return file_handle.write(out.decode()) with open(errors_file) as f: try: data = json.load(f) except Exception: print("Can't read json") exit(-1) return if data["results"]: for result in data["results"]: print(result) print("detect-secrets has discovered high entropy strings, possibly passwords?") exit(-1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def mypy(): """ Are types ok? """
if sys.version_info < (3, 4): print("Mypy doesn't work on python < 3.4") return if IS_TRAVIS: command = "{0} -m mypy {1} --ignore-missing-imports --strict".format(PYTHON, PROJECT_NAME).strip() else: command = "{0} mypy {1} --ignore-missing-imports --strict".format(PIPENV, PROJECT_NAME).strip() bash_process = subprocess.Popen(command.split(" "), # shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE ) out, err = bash_process.communicate() # wait mypy_file = "mypy_errors.txt" with open(mypy_file, "w+") as lint_file: lines = out.decode().split("\n") for line in lines: if "build_utils.py" in line: continue if "test.py" in line: continue if "tests.py" in line: continue if "/test_" in line: continue if "/tests_" in line: continue else: lint_file.writelines([line + "\n"]) num_lines = sum(1 for line in open(mypy_file) if line and line.strip(" \n")) max_lines = 25 if num_lines > max_lines: raise TypeError("Too many lines of mypy : {0}, max {1}".format(num_lines, max_lines))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def gemfury(): """ Push to gem fury, a repo with private options """
# fury login # fury push dist/*.gz --as=YOUR_ACCT # fury push dist/*.whl --as=YOUR_ACCT cp = subprocess.run(("fury login --as={0}".format(GEM_FURY).split(" ")), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, check=True) print(cp.stdout) about = {} with open(os.path.join(SRC, PROJECT_NAME, "__version__.py")) as f: exec(f.read(), about) version = Version(about["__version__"]) print("Have version : " + str(version)) print("Preparing to upload") if version not in get_versions(): for kind in ["gz", "whl"]: try: files = glob.glob("{0}dist/*.{1}".format(SRC.replace(".", ""), kind)) for file_name in files: cp = subprocess.run(("fury push {0} --as={1}".format(file_name, GEM_FURY).split(" ")), stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, check=True) print("result of fury push") for stream in [cp.stdout, cp.stderr]: if stream: for line in stream.decode().split("\n"): print(line) except subprocess.CalledProcessError as cpe: print("result of fury push- got error") for stream in [cp.stdout, cp.stderr]: if stream: for line in stream.decode().split("\n"): print(line) print(cpe) raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def needs_label(model_field, field_name): """ Returns `True` if the label based on the model's verbose name is not equal to the default label it would have based on it's field name. """
default_label = field_name.replace('_', ' ').capitalize() return capfirst(model_field.verbose_name) != default_label
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_relation_kwargs(field_name, relation_info): """ Creates a default instance of a flat relational field. """
model_field, related_model, to_many, to_field, has_through_model = relation_info kwargs = { 'queryset': related_model._default_manager, 'view_name': get_detail_view_name(related_model) } if to_many: kwargs['many'] = True if to_field: kwargs['to_field'] = to_field if has_through_model: kwargs['read_only'] = True kwargs.pop('queryset', None) if model_field: if model_field.verbose_name and needs_label(model_field, field_name): kwargs['label'] = capfirst(model_field.verbose_name) help_text = model_field.help_text if help_text: kwargs['help_text'] = help_text if not model_field.editable: kwargs['read_only'] = True kwargs.pop('queryset', None) if kwargs.get('read_only', False): # If this field is read-only, then return early. # No further keyword arguments are valid. return kwargs if model_field.has_default() or model_field.blank or model_field.null: kwargs['required'] = False if model_field.null: kwargs['allow_null'] = True if model_field.validators: kwargs['validators'] = model_field.validators if getattr(model_field, 'unique', False): validator = UniqueValidator(queryset=model_field.model._default_manager) kwargs['validators'] = kwargs.get('validators', []) + [validator] if to_many and not model_field.blank: kwargs['allow_empty'] = False return kwargs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_api_dict(bases, url, **kwargs): """Create an API dict :param bases: configuration bases :type bases: :class:`~pyextdirect.configuration.Base` or list of :class:`~pyextdirect.configuration.Base` :param string url: URL where the router can be reached :param \*\*kwargs: extra keyword arguments to populate the API dict. Most common keyword arguments are *id*, *maxRetries*, *namespace*, *priority* and *timeout* .. note:: Keyword arguments *type*, *url*, *actions* and *enableUrlEncode* will be overridden """
api = kwargs or {} api.update({'type': 'remoting', 'url': url, 'actions': defaultdict(list), 'enableUrlEncode': 'data'}) if not isinstance(bases, list): bases = [bases] configuration = merge_configurations([b.configuration for b in bases]) for action, methods in configuration.iteritems(): for method, element in methods.iteritems(): if isinstance(element, tuple): func = getattr(element[0], element[1]) attrs = len(inspect.getargspec(func)[0]) - 1 else: func = element attrs = len(inspect.getargspec(func)[0]) spec = {'name': method, 'len': attrs} if func.exposed_kind == SUBMIT: spec['formHandler'] = True api['actions'][action].append(spec) return api
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def bundle_sequences(element): """ Convert sequence types to bundles This converter automatically constructs a :py:class:`~.Bundle` from any :py:class:`tuple`, :py:class:`list` or :py:class:`set` encountered during linking. The following two lines produce the same chain: .. code:: python a >> [b, c, d] >> e a >> Bundle((b, c, d)) >> e """
if isinstance(element, (tuple, list, set)): return Bundle(element) return NotImplemented
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def css(app, env): """ Add bolditalic CSS. :param app: Sphinx application context. :param env: Sphinx environment context. """
srcdir = os.path.abspath(os.path.dirname(__file__)) cssfile = 'bolditalic.css' csspath = os.path.join(srcdir, cssfile) buildpath = os.path.join(app.outdir, '_static') try: os.makedirs(buildpath) except OSError: if not os.path.isdir(buildpath): raise copy(csspath, buildpath) app.add_stylesheet(cssfile) return
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def bolditalic(name, rawtext, text, lineno, inliner, options={}, content=[]): """ Add bolditalic role. Returns 2 part tuple containing list of nodes to insert into the document and a list of system messages. Both are allowed to be empty. :param name: The role name used in the document. :param rawtext: The entire markup snippet, with role. :param text: The text marked with the role. :param lineno: The line number where rawtext appears in the input. :param inliner: The inliner instance that called this function. :param options: Directive options for customization. :param content: The directive content for customization. """
node = nodes.inline(rawtext, text) node.set_class('bolditalic') return [node], []
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _safe_getmodule(o): """Attempts to return the module in which `o` is defined. """
from inspect import getmodule try: return getmodule(o) except: # pragma: no cover #There is nothing we can do about this for now. msg.err("_safe_getmodule: {}".format(o), 2) pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _safe_getattr(o): """Gets the attribute from the specified object, taking the acorn decoration into account. """
def getattribute(attr): # pragma: no cover if hasattr(o, "__acornext__") and o.__acornext__ is not None: return o.__acornext__.__getattribute__(attr) elif hasattr(o, "__acorn__") and o.__acorn__ is not None: #Some of the functions have the original function (when it was not #extended) in the __acorn__ attribute. return o.__acorn__.__getattribute__(attr) else: return getattr(o, attr) return getattribute
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _safe_hasattr(o, attr): """Returns True if `o` has the specified attribute. Takes edge cases into account where packages didn't intend to be used like acorn uses them. """
try: has = hasattr(o, attr) except: # pragma: no cover has = False msg.err("_safe_hasattr: {}.{}".format(o, attr), 2) pass return has
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _update_attrs(nobj, oobj, exceptions=None, acornext=False): """Updates the attributes on `nobj` to match those of old, excluding the any attributes in the exceptions list. """
success = True if (acornext and hasattr(oobj, "__acornext__") and oobj.__acornext__ is not None): # pragma: no cover target = oobj.__acornext__ else: target = oobj for a, v in _get_members(target): if hasattr(nobj, a): #We don't want to overwrite something that acorn has already done. continue if a in ["__class__", "__code__", "__closure__"]:# pragma: no cover #These attributes are not writeable by design. continue if exceptions is None or a not in exceptions: try: setattr(nobj, a, v) except TypeError:# pragma: no cover #Some of the built-in types have __class__ attributes (for #example) that we can't set on a function type. This catches #that case and any others. emsg = "_update_attrs (type): {}.{} => {}" msg.err(emsg.format(nobj, a, target), 2) pass except AttributeError:# pragma: no cover #Probably a read-only attribute that we are trying to set. Just #ignore it. emsg = "_update_attrs (attr): {}.{} => {}" msg.err(emsg.format(nobj, a, target), 2) pass except ValueError:# pragma: no cover emsg = "_update_attrs (value): {}.{} => {}" msg.err(emsg.format(nobj, a, target), 2) success = False return success
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_name_filter(package, context="decorate", reparse=False): """Makes sure that the name filters for the specified package have been loaded. Args: package (str): name of the package that this method belongs to. context (str): one of ['decorate', 'time', 'analyze']; specifies which section of the configuration settings to check. """
global name_filters pkey = (package, context) if pkey in name_filters and not reparse: return name_filters[pkey] from acorn.config import settings spack = settings(package) # The acorn.* sections allow for global settings that affect every package # that ever gets wrapped. sections = { "decorate": ["tracking", "acorn.tracking"], "time": ["timing", "acorn.timing"], "analyze": ["analysis", "acorn.analysis"] } filters, rfilters = None, None import re if context in sections: # We are interested in the 'filter' and 'rfilter' options if they exist. filters, rfilters = [], [] ignores, rignores = [], [] for section in sections[context]: if spack.has_section(section): options = spack.options(section) if "filter" in options: filters.extend(re.split(r"\s*\$\s*", spack.get(section, "filter"))) if "rfilter" in options: # pragma: no cover #Until now, the fnmatch filters have been the most #useful. So I don't have any unit tests for regex filters. pfilters = re.split(r"\s*\$\s*", spack.get(section, "rfilter")) rfilters.extend([re.compile(p, re.I) for p in pfilters]) if "ignore" in options: ignores.extend(re.split(r"\s*\$\s*", spack.get(section, "ignore"))) if "rignore" in options: # pragma: no cover pignores = re.split(r"\s*\$\s*", spack.get(section, "rignore")) rignores.extend([re.compile(p, re.I) for p in pfilters]) name_filters[pkey] = { "filters": filters, "rfilters": rfilters, "ignores": ignores, "rignores": rignores } else: name_filters[pkey] = None return name_filters[pkey]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_args(*argl, **argd): """Checks the specified argument lists for objects that are trackable. """
args = {"_": []} for item in argl: args["_"].append(_tracker_str(item)) for key, item in argd.items(): args[key] = _tracker_str(item) return args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _reduced_stack(istart=3, iend=5, ipython=True): """Returns the reduced function call stack that includes only relevant function calls (i.e., ignores any that are not part of the specified package or acorn. Args: package (str): name of the package that the logged method belongs to. """
import inspect return [i[istart:iend] for i in inspect.stack() if _decorated_path(i[1])]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _pre_create(cls, atdepth, stackdepth, *argl, **argd): """Checks whether the the logging should happen based on the specified parameters. If it should, an initialized entry is returned. """
from time import time if not atdepth: rstack = _reduced_stack() reduced = len(rstack) if msg.will_print(3): # pragma: no cover sstack = [' | '.join(map(str, r)) for r in rstack] msg.info("{} => stack ({}): {}".format(cls.__fqdn__, len(rstack), ', '.join(sstack)), 3) else: reduced = stackdepth + 10 if reduced <= stackdepth: args = _check_args(*argl, **argd) entry = { "m": "{}.__new__".format(cls.__fqdn__), "a": args, "s": time(), "r": None, "stack": reduced } else: atdepth = True entry = None return (entry, atdepth)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _post_create(atdepth, entry, result): """Finishes the entry logging if applicable. """
if not atdepth and entry is not None: if result is not None: #We need to get these results a UUID that will be saved so that any #instance methods applied to this object has a parent to refer to. retid = _tracker_str(result) entry["r"] = retid ekey = retid else: # pragma: no cover ekey = _tracker_str(cls) msg.info("{}: {}".format(ekey, entry), 1) record(ekey, entry)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def creationlog(base, package, stackdepth=_def_stackdepth): """Decorator for wrapping the creation of class instances that are being logged by acorn. Args: base: base class used to call __new__ for the construction. package (str): name of (global) package the class belongs to. stackdepth (int): if the calling stack is less than this depth, than include the entry in the log; otherwise ignore it. """
@staticmethod def wrapnew(cls, *argl, **argd): global _atdepth_new, _cstack_new, streamlining origstream = None if not (decorating or streamlining): entry, _atdepth_new = _pre_create(cls, _atdepth_new, stackdepth, *argl, **argd) _cstack_new.append(cls) #See if we need to enable streamlining for this constructor. fqdn = cls.__fqdn__ if fqdn in _streamlines and _streamlines[fqdn]: #We only use streamlining for the plotting routines at the #moment, so it doesn't get hit by the unit tests. msg.std("Streamlining {}.".format(fqdn), 2) origstream = streamlining streamlining = True try: if six.PY2: result = base.__old__(cls, *argl, **argd) else: # pragma: no cover #Python 3 changed the way that the constructors behave. In cases #where a class inherits only from object, and doesn't override #the __new__ method, the __old__ we replaced was just the one #belonging to object. if base.__old__ is object.__new__: result = base.__old__(cls) else: result = base.__old__(cls, *argl, **argd) except TypeError: # pragma: no cover #This is a crazy hack! We want this to be dynamic so that it can #work with any of the packages. If the error message suggests using #a different constructor, we go ahead and use it. import sys xcls, xerr = sys.exc_info()[0:2] referral = xerr.args[0].split()[-1] if ".__new__()" in referral: t = eval(referral.split('.')[0]) result = t.__new__(cls, *argl, **argd) else: raise result = None if result is not None and hasattr(cls, "__init__"): try: cls.__init__(result, *argl, **argd) except: # pragma: no cover print(cls, argl, argd) raise else: # pragma: no cover msg.err("Object initialize failed for {}.".format(base.__name__)) #If we don't disable streamlining for the original method that set #it, then the post call would never be reached. if origstream is not None: #We avoid another dict lookup by checking whether we set the #*local* origstream to something above. streamlining = origstream if not (decorating or streamlining): _cstack_new.pop() if len(_cstack_new) == 0: _atdepth_new = False _post_create(_atdepth_new, entry, result) return result return wrapnew
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _pre_call(atdepth, parent, fqdn, stackdepth, *argl, **argd): """Checks whether the logging should create an entry based on stackdepth. If so, the entry is created. """
from time import time if not atdepth: rstack = _reduced_stack() if "<module>" in rstack[-1]: # pragma: no cover code = rstack[-1][1] else: code = "" reduced = len(rstack) if msg.will_print(3): # pragma: no cover sstack = [' | '.join(map(str, r)) for r in rstack] msg.info("{} => stack ({}): {}".format(fqdn, len(rstack), ', '.join(sstack)), 3) else: reduced = stackdepth + 10 bound = False if reduced <= stackdepth: args = _check_args(*argl, **argd) # At this point, we should start the entry. If the method raises an # exception, we should keep track of that. If this is an instance # method, we should get its UUID, if not, then we can just store the # entry under the full method name. #There is yet another subtletly here: many packages have a base, static #method that gets set as an instance method for sub-classes of a certain #ABC. In that case, parent will be a super-class of the first argument, #though the types will *not* be identical. Check for overlap in the base #classes of the first argument. It would be nice if we could easily #check for bound methods using inspect, but it doesn't work for some of #the C-extension modules... if (len(argl) > 0 and parent is not None and inspect.isclass(parent)): ftype = type(argl[0]) if isinstance(argl[0], parent): bound = True elif (inspect.isclass(ftype) and hasattr(ftype, "__bases__") and inspect.isclass(parent) and hasattr(parent, "__bases__")): # pragma: no cover common = set(ftype.__bases__) & set(parent.__bases__) bound = len(common) > 0 if not bound: #For now, we use the fqdn; later, if the result is not None, we #will rather index this entry by the returned result, since we #can also access the fqdn in the entry details. ekey = fqdn else: # It must have the first argument be the instance. ekey = _tracker_str(argl[0]) #Check whether the logging has been overidden by a configuration option. if (fqdn not in _logging or _logging[fqdn]): entry = { "m": fqdn, "a": args, "s": time(), "r": None, "c": code, } else: entry = None else: entry = None atdepth = True ekey = None return (entry, atdepth, reduced, bound, ekey)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _post_call(atdepth, package, fqdn, result, entry, bound, ekey, argl, argd): """Finishes constructing the log and records it to the database. """
from time import time if not atdepth and entry is not None: ek = ekey if result is not None: retid = _tracker_str(result) if result is not None and not bound: ek = retid entry["r"] = None else: entry["r"] = retid name = fqdn.split('.')[-1] if filter_name(fqdn, package, "time"): entry["e"] = time() - entry["s"] if filter_name(fqdn, package, "analyze"): entry["z"] = analyze(fqdn, result, argl, argd) msg.info("{}: {}".format(ek, entry), 1) # Before we return the result, let's first save this call to the # database so we have a record of it. record(ek, entry) return (ek, entry) else: return (None, None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post(fqdn, package, result, entry, bound, ekey, *argl, **argd): """Adds logging for the post-call result of calling the method externally. Args: fqdn (str): fully-qualified domain name of the function being logged. package (str): name of the package we are logging for. Usually the first element of `fqdn.split('.')`. result: returned from calling the method we are logging. entry (dict): one of the values returned by :func:`pre`. bound (bool): true if the method is bound. ekey (str): key under which to store the entry in the database. """
global _atdepth_call, _cstack_call _cstack_call.pop() if len(_cstack_call) == 0: _atdepth_call = False r = _post_call(_atdepth_call, package, fqdn, result, entry, bound, ekey, argl, argd) return r
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pre(fqdn, parent, stackdepth, *argl, **argd): """Adds logging for a call to the specified function that is being handled by an external module. Args: fqdn (str): fully-qualified domain name of the function being logged. parent: *object* that the function belongs to. stackdepth (int): maximum stack depth before entries are ignored. argl (list): positional arguments passed to the function call. argd (dict): keyword arguments passed to the function call. """
global _atdepth_call, _cstack_call #We add +1 to stackdepth because this method had to be called in #addition to the wrapper method, so we would be off by 1. pcres = _pre_call(_atdepth_call, parent, fqdn, stackdepth+1, *argl, **argd) entry, _atdepth_call, reduced, bound, ekey = pcres _cstack_call.append(fqdn) return (entry, bound, ekey)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_extension(o, otype, fqdn, pmodule): """Creates an extension object to represent `o` that can have attributes set, but which behaves identically to the given object. Args: o: object to create an extension for; no checks are performed to see if extension is actually required. otype (str): object types; one of ["classes", "functions", "methods", "modules"]. fqdn (str): fully qualified name of the package that the object belongs to. pmodule: the parent module (or class) that `o` belongs to; used for setting the special __module__ attribute. """
import types xdict = {"__acornext__": o, "__doc__": o.__doc__} if otype == "classes": classname = o.__name__ try: if fqdn in _explicit_subclasses: xclass = eval(_explicit_subclasses[fqdn]) xclass.__acornext__ = o else: xclass = type(classname, (o, ), xdict) xclass.__module__ = o.__module__ return xclass except TypeError: #This happens when a class is final, meaning that it is not allowed #to be subclassed. _final_objs.append(id(o)) return o elif (otype in ["functions", "descriptors", "unknowns"] or (otype == "builtins" and (isinstance(o, types.BuiltinFunctionType) or isinstance(o, types.BuiltinMethodType)))): #The unknowns type is for objects that don't match any of the #inspect.is* function calls, but still have a __call__ method (such as #the numpy.ufunc class instances). These can still be wrapped by another #function. def xwrapper(*args, **kwargs): try: return o(*args, **kwargs) except: #see issue #4. targs = list(map(type, args)) kargs = list(kwargs.keys()) msg.err("xwrapper: {}({}, {})".format(o, targs, kargs), 2) pass #Set the docstring and original object attributes. for attr, val in xdict.items(): setattr(xwrapper, attr, val) #We want to get the members dictionary. For classes, using #:meth:`inspect.getmembers` produces stack overflow errors. Instead, we #reference the __dict__ directly. However, for built-in methods and #functions, there is no __dict__, so we use `inspect.getmembers`. failed = False setattr(xwrapper, "__getattribute__", _safe_getattr(xwrapper)) #We want the new function to be identical to the old except that #it's __call__ method, which we overrode above. failed = not _update_attrs(xwrapper, o, ["__call__"]) if otype in ["descriptors", "unknowns"] and inspect.ismodule(pmodule): if hasattr(o, "__objclass__"): # pragma: no cover setattr(xwrapper, "__module__", pmodule.__name__) elif hasattr(o, "__class__") and o.__class__ is not None: setattr(xwrapper, "__module__", pmodule.__name__) if not failed: return xwrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _extend_object(parent, n, o, otype, fqdn): """Extends the specified object if it needs to be extended. The method attempts to add an attribute to the object; if it fails, a new object is created that inherits all of `o` attributes, but is now a regular object that can have attributes set. Args: parent: has `n` in its `__dict__` attribute. n (str): object name attribute. o (list): object instances to be extended. otype (str): object types; one of ["classes", "functions", "methods", "modules"]. fqdn (str): fully qualified name of the package that the object belongs to. """
from inspect import ismodule, isclass pmodule = parent if ismodule(parent) or isclass(parent) else None try: #The __acornext__ attribute references the original, unextended #object; if the object didn't need extended, then __acornext__ is #none. if otype == "methods": setattr(o.__func__, "__acornext__", None) else: setattr(o, "__acornext__", None) fqdn = _fqdn(o, recheck=True, pmodule=pmodule) return o except (TypeError, AttributeError): #We have a built-in or extension type. okey = id(o) if okey not in _extended_objs: #We need to generate an extension for this object and store it #in the extensions dict. xobj = _create_extension(o, otype, fqdn, pmodule) fqdn = _fqdn(xobj, recheck=True, pmodule=pmodule) if xobj is not None: _extended_objs[okey] = xobj #else: we can't handle this kind of object; it just won't be #logged... try: setattr(parent, n, _extended_objs[okey]) return _extended_objs[okey] except KeyError: # pragma: no cover msg.warn("Object extension failed: {} ({}).".format(o, otype))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _fqdn(o, oset=True, recheck=False, pmodule=None): """Returns the fully qualified name of the object. Args: o (type): instance of the object's type. oset (bool): when True, the fqdn will also be set on the object as attribute `__fqdn__`. recheck (bool): for sub-classes, sometimes the super class has already had its __fqdn__ attribute set; in that case, we want to recheck the object's name. This usually only gets used during object extension. """
if id(o) in _set_failures or o is None: return None if recheck or not _safe_hasattr(o, "__fqdn__"): import inspect if not hasattr(o, "__name__"): msg.warn("Skipped object {}: no __name__ attribute.".format(o), 3) return result = None if hasattr(o, "__acornext__") and o.__acornext__ is not None: otarget = o.__acornext__ else: otarget = o omod = _safe_getmodule(otarget) or pmodule if (omod is None and hasattr(otarget, "__objclass__") and otarget.__objclass__ is not None): # pragma: no cover omod = _safe_getmodule(otarget.__objclass__) parts = ("<unknown>" if omod is None else omod.__name__, otarget.__objclass__.__name__, otarget.__name__) #msg.std("FQDN: objclass => {}".format(parts), 4) result = "{}.{}.{}".format(*parts) elif (omod is None and hasattr(otarget, "__class__") and otarget.__class__ is not None): omod = _safe_getmodule(otarget.__class__) parts = ("<unknown>" if omod is None else omod.__name__, otarget.__class__.__name__, otarget.__name__) #msg.std("FQDN: class => {}".format(parts), 4) result = "{}.{}.{}".format(*parts) elif omod is not otarget: parts = (_fqdn(omod, False), otarget.__name__) #msg.std("FQDN: o => {}".format(parts), 4) result = "{}.{}".format(*parts) else: result = otarget.__name__ if oset: _safe_setattr(o, "__fqdn__", result) return result if _safe_hasattr(o, "__fqdn__"): return o.__fqdn__
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_stack_depth(package, fqdn, defdepth=_def_stackdepth): """Loads the stack depth settings from the config file for the specified package. Args: package (str): name of the package to get stack depth info for. fqdn (str): fully qualified domain name of the member in the package. defdepth (int): default depth when one has not been configured. """
global _stack_config if package not in _stack_config: from acorn.config import settings spack = settings(package) _stack_config[package] = {} secname = "logging.depth" if spack.has_section(secname): for ofqdn in spack.options(secname): _stack_config[package][ofqdn] = spack.getint(secname, ofqdn) usedef = True if fqdn in _stack_config[package]: result = _stack_config[package][fqdn] usedef = False elif "*" in _stack_config[package]: # pragma: no cover result = _stack_config[package]["*"] usedef = False else: result = defdepth if not usedef: msg.gen("Using {} for {} stack depth.".format(result, fqdn), 3) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_subclasses(package): """Loads the subclass settings for the specified package so that we can decorate the classes correctly. """
global _explicit_subclasses from acorn.config import settings spack = settings(package) if spack is not None: if spack.has_section("subclass"): _explicit_subclasses.update(dict(spack.items("subclass")))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _load_callwraps(packname, package): """Loads the special call wrapping settings for functions in the specified package. This allows the result of the original method call to be cast as a different type, or passed to a different constructor before returning from the wrapped function. Args: packname (str): name of the package to get config settings for. package: actual package object. """
global _callwraps from acorn.config import settings from acorn.logging.descriptors import _obj_getattr spack = settings(packname) if spack is not None: if spack.has_section("callwrap"): wrappings = dict(spack.items("callwrap")) for fqdn, target in wrappings.items(): caller = _obj_getattr(package, target) _callwraps[fqdn] = caller
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decorate(package): """Decorates all the methods in the specified package to have logging enabled according to the configuration for the package. """
from os import sep global _decor_count, _decorated_packs, _decorated_o, _pack_paths global decorating if "acorn" not in _decorated_packs: _decorated_packs.append("acorn") packpath = "acorn{}".format(sep) if packpath not in _pack_paths: #We initialize _pack_paths to include common packages that people #use without decorating. Otherwise those slow *way* down and the #notebook becomes unusable. _pack_paths.append(packpath) npack = package.__name__ #Since scipy includes numpy (for example), we don't want to run the numpy #decoration twice if the person also chooses to import numpy. In that case, #we just skip it; the memory references in scipy point to the same numpy #modules and libraries. if npack not in _decorated_packs: _decor_count[npack] = [0, 0, 0] _decorated_o[npack] = {} _load_subclasses(npack) packsplit = _split_object(package, package.__name__) origdecor = decorating decorating = True for ot, ol in packsplit.items(): for name, obj in ol: decorate_obj(package, name, obj, ot) #Now that we have actually decorated all the objects, we can load the #call wraps to point to the new decorated objects. _load_callwraps(npack, package) _load_streamlines(npack, package) _load_logging(npack, package) decorating = origdecor _decorated_packs.append(npack) _pack_paths.append("{}{}".format(npack, sep)) msg.info("{}: {} (Decor/Skip/NA)".format(npack, _decor_count[npack]))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def visit_named_arg(self, _, children): """Named argument of a filter. Arguments --------- _ (node) : parsimonious.nodes.Node. children : list - 0: name of the arg - 1: for ``WS`` (whitespace): ``None``. - 2: operator - 3: for ``WS`` (whitespace): ``None``. - 4: value of the named arg Returns ------- .resources.NamedArg Instance of ``.resources.NamedArg``. Example ------- foo=1 bar="BAZ" quz=None foo=True bar=False """
return self.NamedArg( arg=children[0], arg_type=children[2], value=children[4], )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def visit_filter(self, _, children): """A filter, with optional arguments. Arguments --------- _ (node) : parsimonious.nodes.Node. children : list - 0: string, name of the filter. - 1: list of instances of ``.resources.NamedArg`` Returns ------- .resources.Filter An instance of ``.resources.Filter`` with a name and a list of arguments. The list of arguments will be ``None`` if no parenthesis. Example ------- .foo .foo() .foo(1) .foo(1, bar="baz") """
return self.Filter( name=children[0], args=children[1], )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def setup_zmq(self): """Set up a PUSH and a PULL socket. The PUSH socket will push out requests to the workers. The PULL socket will receive responses from the workers and reply through the server socket."""
self.context = zmq.Context() self.push = self.context.socket(zmq.PUSH) self.push_port = self.push.bind_to_random_port("tcp://%s" % self.host) # start a listener for the pull socket eventlet.spawn(self.zmq_pull) eventlet.sleep(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def start(self, blocking=True): """Start the producer. This will eventually fire the ``server_start`` and ``running`` events in sequence, which signify that the incoming TCP request socket is running and the workers have been forked, respectively. If ``blocking`` is False, control ."""
self.setup_zmq() if blocking: self.serve() else: eventlet.spawn(self.serve) # ensure that self.serve runs now as calling code will # expect start() to have started the server even non-blk eventlet.sleep(0)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def status_icon(self): 'glyphicon for task status; requires bootstrap' icon = self.status_icon_map.get(self.status.lower(), self.unknown_icon) style = self.status_style.get(self.status.lower(), '') return mark_safe( '<span class="glyphicon %s %s" aria-hidden="true"></span>' % (icon, style))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_trailing_string(content, trailing): """ Strip trailing component `trailing` from `content` if it exists. Used when generating names from view classes. """
if content.endswith(trailing) and content != trailing: return content[:-len(trailing)] return content
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dedent(content): """ Remove leading indent from a block of text. Used when generating descriptions from docstrings. Note that python's `textwrap.dedent` doesn't quite cut it, as it fails to dedent multiline docstrings that include unindented text on the initial line. """
content = force_text(content) whitespace_counts = [len(line) - len(line.lstrip(' ')) for line in content.splitlines()[1:] if line.lstrip()] # unindent the content if needed if whitespace_counts: whitespace_pattern = '^' + (' ' * min(whitespace_counts)) content = re.sub(re.compile(whitespace_pattern, re.MULTILINE), '', content) return content.strip()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def camelcase_to_spaces(content): """ Translate 'CamelCaseNames' to 'Camel Case Names'. Used when generating names from view classes. """
camelcase_boundry = '(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))' content = re.sub(camelcase_boundry, ' \\1', content).strip() return ' '.join(content.split('_')).title()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def markup_description(description): """ Apply HTML markup to the given description. """
if apply_markdown: description = apply_markdown(description) else: description = escape(description).replace('\n', '<br />') description = '<p>' + description + '</p>' return mark_safe(description)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def import_class(class_uri): """ Import a class by string 'from.path.module.class' """
parts = class_uri.split('.') class_name = parts.pop() module_uri = '.'.join(parts) try: module = import_module(module_uri) except ImportError as e: # maybe we are still in a module, test going up one level try: module = import_class(module_uri) except Exception: # if failure raise the original exception raise e return getattr(module, class_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def class_name_to_instant_name(name): """ This will convert from 'ParentName_ChildName' to 'parent_name.child_name' """
name = name.replace('/', '_') ret = name[0].lower() for i in range(1, len(name)): if name[i] == '_': ret += '.' elif '9' < name[i] < 'a' and name[i - 1] != '_': ret += '_' + name[i].lower() else: ret += name[i].lower() return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def OR(*fns): """ Validate with any of the chainable valdator functions """
if len(fns) < 2: raise TypeError('At least two functions must be passed') @chainable def validator(v): for fn in fns: last = None try: return fn(v) except ValueError as err: last = err if last: raise last return validator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def NOT(fn): """ Reverse the effect of a chainable validator function """
@chainable def validator(v): try: fn(v) except ValueError: return v raise ValueError('invalid') return validator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def spec_validator(spec, key=operator.itemgetter): """ Take a spec in dict form, and return a function that validates objects The spec maps each object's key to a chain of validator functions. The ``key`` argument can be used to customize the way value matching a spec key from the object. By default, it uses ``operator.itemgetter``. It should be assigned a function that takes a key value and returns a function that returns the vale from an object that is passed to it. """
spec = {k: (key(k), make_chain(v)) for k, v in spec.items()} def validator(obj): errors = {} for k, v in spec.items(): getter, chain = v val = getter(obj) try: chain(val) except ValueError as err: errors[k] = err return errors return validator
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def auth(self, auth_type, auth_key, project_id=None): """Creates authenticated client. Parameters: * `auth_type` - Authentication type. Use `session` for auth by session key. Use `token` for auth by token. * `auth_key` - Authentication `session key` or `token`. * `project_id` - Project identifier. Must be provided for `token` authentication. Default is `None`. Returns: * Instance of [SessionAuthClient](#sessionauthclient) if `auth_type` is `session`. * Instance of [ProjectClient](#projectclient) if `auth_type` is `token` Raises: * ValueError: if `project_id` parameter was not provided Examples: For auth with `session` you should obtain session key by [Client.user.login](#clientuserlogin) providing your account's email and password: ```python client = Client(host='deform.io') session_client = client.auth( 'session', client.user.login( email='email@example.com', password='password' ), ) print session_client <pydeform.client.SessionAuthClient object at 0x10c585650> ``` Authentication with `token` example: ```python client = Client(host='deform.io') token_client = client.auth( 'token', auth_key='token-value', project_id='some-project', ) print token_client <pydeform.client.ProjectClient object at 0x11c585650> ``` """
if auth_type == 'session': return SessionAuthClient( auth_header=get_session_http_auth_header(auth_key), host=self.host, port=self.port, secure=self.secure, requests_session=self.requests_session, request_defaults=self.request_defaults, api_base_path=self.api_base_path, ) elif auth_type == 'token': if not project_id: msg = 'You should provide project_id for token authentication' raise ValueError(msg) return ProjectClient( base_uri=get_base_uri( project=project_id, host=self.host, port=self.port, secure=self.secure, api_base_path=self.api_base_path ), auth_header=get_token_http_auth_header(auth_key), requests_session=self.requests_session, request_defaults=self.request_defaults, )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getdoc(obj): """ Get object docstring :rtype: str """
inspect_got_doc = inspect.getdoc(obj) if inspect_got_doc in (object.__init__.__doc__, object.__doc__): return '' # We never want this builtin stuff return (inspect_got_doc or '').strip()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_callable(obj, of_class = None): """ Get callable for an object and its full name. Supports: * functions * classes (jumps to __init__()) * methods * @classmethod * @property :param obj: function|class :type obj: Callable :param of_class: Class that this method is a member of :type of_class: class|None :return: (qualname, Callable|None, Class|None). Callable is None for classes without __init__() :rtype: (str, Callable|None, Class|None) """
# Cases o = obj if inspect.isclass(obj): try: o = obj.__init__ of_class = obj except AttributeError: pass # Finish return qualname(obj), o, of_class
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _doc_parse(doc, module=None, qualname=None): """ Parse docstring into a dict :rtype: data.FDocstring """
# Build the rex known_tags = { 'param': 'arg', 'type': 'arg-type', 'return': 'ret', 'returns': 'ret', 'rtype': 'ret-type', 'exception': 'exc', 'except': 'exc', 'raise': 'exc', 'raises': 'exc', } tag_rex = re.compile(r'^\s*:(' + '|'.join(map(re.escape, known_tags)) + r')\s*(\S+)?\s*:', re.MULTILINE) # Match tags collect_args = {} collect_ret = {} doc_args = [] doc_exc = [] for m in reversed(list(tag_rex.finditer(doc))): # Fetch data tag, arg = m.groups() tag = known_tags[tag] # Normalized tag name # Fetch docstring part value = doc[m.end():].strip() # Copy text after the tag doc = doc[:m.start()].strip() # truncate the string # Handle tag: collect data if tag == 'exc': doc_exc.append(data.ExceptionDoc(arg, value)) elif tag in ('ret', 'ret-type'): # Collect fields 1 by 1 collect_ret[{'ret': 'doc', 'ret-type': 'type'}[tag]] = value elif tag in ('arg', 'arg-type'): # Init new collection if arg not in collect_args: doc_args.append(arg) # add name for now, then map() replace with classes collect_args[arg] = {} # Collect fields 1 by 1 collect_args[arg][{'arg': 'doc', 'arg-type': 'type'}[tag]] = value else: raise AssertionError('Unknown tag type: {}'.format(tag)) # Merge collected data doc_ret = data.ValueDoc(**collect_ret) if collect_ret else None doc_args = map(lambda name: data.ArgumentDoc(name=name, **collect_args[name]), collect_args) # Finish return data.FDocstring(module=module, qualname=qualname, doc=doc, args=doc_args, exc=doc_exc, ret=doc_ret)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _argspec(func): """ For a callable, get the full argument spec :type func: Callable :rtype: list[data.ArgumentSpec] """
assert isinstance(func, collections.Callable), 'Argument must be a callable' try: sp = inspect.getargspec(func) if six.PY2 else inspect.getfullargspec(func) except TypeError: # inspect.getargspec() fails for built-in functions return [] # Collect arguments with defaults ret = [] defaults_start = len(sp.args) - len(sp.defaults) if sp.defaults else len(sp.args) for i, name in enumerate(sp.args): arg = data.ArgumentSpec(name) if i >= defaults_start: arg['default'] = sp.defaults[i - defaults_start] ret.append(arg) # *args, **kwargs if sp.varargs: ret.append(data.ArgumentSpec(sp.varargs, varargs=True)) if six.PY2: if sp.keywords: ret.append(data.ArgumentSpec(sp.keywords, keywords=True)) else: if sp.varkw: ret.append(data.ArgumentSpec(sp.varkw, keywords=True)) # TODO: support Python 3: kwonlyargs, kwonlydefaults, annotations # Finish return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def doc(obj, of_class=None): """ Get parsed documentation for an object as a dict. This includes arguments spec, as well as the parsed data from the docstring. ```python from exdoc import doc ``` The `doc()` function simply fetches documentation for an object, which can be * Module * Class * Function or method * Property The resulting dictionary includes argument specification, as well as parsed docstring: ```python def f(a, b=1, *args): ''' Simple function : param a: First : type a: int : param b: Second : type b: int : param args: More numbers : returns: nothing interesting : rtype: bool : raises ValueError: hopeless condition ''' from exdoc import doc doc(f) # -> { 'module': '__main__', 'name': 'f', 'qualname': 'f', # qualified name: e.g. <class>.<method> 'signature': 'f(a, b=1, *args)', 'qsignature': 'f(a, b=1, *args)', # qualified signature 'doc': 'Simple function', 'clsdoc': '', # doc from the class (used for constructors) # Exceptions 'exc': [ {'doc': 'hopeless condition', 'name': 'ValueError'} ], # Return value 'ret': {'doc': 'nothing interesting', 'type': 'bool'}, # Arguments 'args': [ {'doc': 'First', 'name': 'a', 'type': 'int'}, {'default': 1, 'doc': 'Second', 'name': 'b', 'type': 'int'}, {'doc': 'More numbers', 'name': '*args', 'type': None} ], } ``` Note: in Python 3, when documenting a method of a class, pass the class to the `doc()` function as the second argument: ```python doc(cls.method, cls) ``` This is necessary because in Python3 methods are not bound like they used to. Now, they are just functions. :type obj: ModuleType|type|Callable|property :param of_class: A class whose method is being documented. :type of_class: class|None :rtype: Docstring|FDocstring """
# Special care about properties if isinstance(obj, property): docstr = doc(obj.fget) # Some hacks for properties docstr.signature = docstr.qsignature= obj.fget.__name__ docstr.args = docstr.args[1:] return docstr # Module module = inspect.getmodule(obj) if module: module = module.__name__ # Not callable: e.g. modules if not callable(obj): if hasattr(obj, '__name__'): return data.Docstring(qualname=obj.__name__, doc=getdoc(obj)) else: return None # Callables qualname, fun, of_class = _get_callable(obj, of_class) docstr = _docspec(fun, module=module, qualname=qualname, of_class=of_class) # Class? Get doc if inspect.isclass(obj): # Get class doc clsdoc = getdoc(obj) # Parse docstring and merge into constructor doc if clsdoc: # Parse docstring clsdoc = _doc_parse(clsdoc, module=module, qualname=qualname) # Store clsdoc always docstr.clsdoc = clsdoc.doc # Merge exceptions list docstr.exc.extend(clsdoc.exc) # If constructor does not have it's own docstr -- copy it from the clsdoc if not docstr.doc: docstr.doc = docstr.clsdoc # Merge arguments: type, doc for a_class in clsdoc.args: for a_constructor in docstr.args: if a_class.name.lstrip('*') == a_constructor.name.lstrip('*'): a_constructor.type = a_class.type a_constructor.doc = a_class.doc # Finish return docstr
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def subclasses(cls, leaves=False): """ List all subclasses of the given class, including itself. If `leaves=True`, only returns classes which have no subclasses themselves. :type cls: type :param leaves: Only return leaf classes :type leaves: bool :rtype: list[type] """
stack = [cls] subcls = [] while stack: c = stack.pop() c_subs = c.__subclasses__() stack.extend(c_subs) if not leaves or not c_subs: subcls.append(c) return subcls
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def generate(passphrase, trees=['primary']): """Generate a seed for the primary tree of a Gem wallet. You may choose to store the passphrase for a user so the user doesn't have to type it in every time. This is okay (although the security risks should be obvious) but Gem strongly discourages storing even the encrypted private seed, and storing both the passphrase and the private seed is completely insane. Don't do it. Args: passphrase (str): The passphrase that will be used to encrypt the seed before it's send to Gem. Key-stretching is done with PBDKF2 and encryption is done with nacl's SecretBox. trees (list of str): A list of names to generate trees for. For User Wallets this will be ['primary'], for Application Wallets it will be ['primary', 'backup']. Returns: A dict of dicts containing the serialized public master node, and a sub-dict with the encrypted private seed for each tree in `trees`. """
seeds, multi_wallet = MultiWallet.generate(trees, entropy=True) result = {} for tree in trees: result[tree] = dict(private_seed=seeds[tree], public_seed=multi_wallet.public_wif(tree), encrypted_seed=PassphraseBox.encrypt(passphrase, seeds[tree])) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create(self, name, passphrase=None, wallet_data=None): """Create a new Wallet object and add it to this Wallets collection. This is only available in this library for Application wallets. Users must add additional wallets in their User Console Args: name (str): wallet name passphrase (str, optional): A passphrase with which to encrypt a user wallet. If not supplied, wallet_data is mandatory. wallet_data (dict): Output from wallets.generate. For User Wallets, only the primary tree is used. For Application Wallets, the primary and backup trees are used. Returns: A tuple of the (backup_private_seed, round.Wallet). """
if not self.application: raise RoundError("User accounts are limited to one wallet. Make an " "account or shoot us an email <dev@gem.co> if you " "have a compelling use case for more.") if not passphrase and not wallet_data: raise ValueError("Usage: wallets.create(name, passphrase [, " "wallet_data])") elif passphrase: wallet_data = generate(passphrase, trees=(['primary', 'backup'] if ( self.application) else ['primary'])) wallet = dict( primary_private_seed=wallet_data['primary']['encrypted_seed'], primary_public_seed=wallet_data['primary']['public_seed'], name=name) if self.application: wallet['backup_public_seed'] = wallet_data['backup']['public_seed'] resource = self.resource.create(wallet) wallet = self.wrap(resource) return (wallet_data['backup']['private_seed'], self.add(wallet)) if ( self.application) else self.add(wallet)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unlock(self, passphrase, encrypted_seed=None): """Unlock the Wallet by decrypting the primary_private_seed with the supplied passphrase. Once unlocked, the private seed is accessible in memory and calls to `account.pay` will succeed. This is a necessary step for creating transactions. Args: passphrase (str): The passphrase the User used to encrypt this wallet. encrypted_seed (dict): A dictionary of the form {'ciphertext': longhexvalue, 'iterations': integer of pbkdf2 derivations, 'nonce': 24-byte hex value 'salt': 16-byte hex value} this dict represents an private seed (not a master key) encrypted with the `passphrase` using pbkdf2. You can obtain this value with wallet.generate. If this value is supplied, it overwrites (locally only) the encrypted primary_private_seed value, allowing you to load in a primary key that you didn't store with Gem. Note that the key MUST match the pubkey that this wallet was created with. Returns: self """
wallet = self.resource if not encrypted_seed: encrypted_seed = wallet.primary_private_seed try: if encrypted_seed['nonce']: primary_seed = NaclPassphraseBox.decrypt( passphrase, encrypted_seed) else: primary_seed = PassphraseBox.decrypt( passphrase, encrypted_seed) except: raise InvalidPassphraseError() self.multi_wallet = MultiWallet( private_seeds={'primary': primary_seed}, public={'cosigner': wallet.cosigner_public_seed, 'backup': wallet.backup_public_seed}) return self
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_accounts(self, fetch=False): """Return this Wallet's accounts object, populating it if fetch is True."""
return Accounts(self.resource.accounts, self.client, wallet=self, populate=fetch)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def account(self, key=None, address=None, name=None): """Query for an account by key, address, or name."""
if key: return self.client.account(key, wallet=self) if address: q = dict(address=address) elif name: q = dict(name=name) else: raise TypeError("Missing param: key, address, or name is required.") return Account( self.resource.account_query(q).get(), self.client, wallet=self)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dump_addresses(self, network, filename=None): """Return a list of address dictionaries for each address in all of the accounts in this wallet of the network specified by `network` """
addrs = [addr.data for a in self.accounts.values() if a.network == network for addr in a.addresses] if filename: from json import dump with open(filename, 'w') as f: dump(addrs, f) return addrs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_subscriptions(self, fetch=False): """Return this Wallet's subscriptions object, populating it if fetch is True."""
return Subscriptions( self.resource.subscriptions, self.client, populate=fetch)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def signatures(self, transaction): """Sign a transaction. Args: transaction (coinop.Transaction) Returns: A list of signature dicts of the form [ {'primary': 'base58signaturestring'}, """
# TODO: output.metadata['type']['change'] if not self.multi_wallet: raise DecryptionError("This wallet must be unlocked with " "wallet.unlock(passphrase)") return self.multi_wallet.signatures(transaction)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def map_exception_codes(): '''Helper function to intialise CODES_TO_EXCEPTIONS.''' werkex = inspect.getmembers(exceptions, lambda x: getattr(x, 'code', None)) return {e.code: e for _, e in werkex}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def extract_pathvars(callback): '''Extract the path variables from an Resource operation. Return {'mandatory': [<list-of-pnames>], 'optional': [<list-of-pnames>]} ''' mandatory = [] optional = [] # We loop on the signature because the order of the parameters is # important, and signature is an OrderedDict, while annotations is a # regular dictionary for pname in callback.signature.parameters.keys(): try: anno = callback.__annotations__[pname] except KeyError: # unannotated params, like "cls" or "request" continue if anno[0] != Ptypes.path: continue # At this point we are only considering path variables, but # we have to generate different (present/absent) if these # parameters have a default. if callback.signature.parameters[pname].default == inspect._empty: mandatory.append(pname) else: optional.append(pname) return {'mandatory': mandatory, 'optional': optional}
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def inject_extra_args(callback, request, kwargs): '''Inject extra arguments from header, body, form.''' # TODO: this is a temporary pach, should be managed via honouring the # mimetype in the request header.... annots = dict(callback.__annotations__) del annots['return'] for param_name, (param_type, _) in annots.items(): if param_type == Ptypes.path: continue # Already parsed by werkzeug elif param_type == Ptypes.body: value = getattr(request, PTYPE_TO_REQUEST_PROPERTY[param_type]) # TODO: The JSON conversion should be dependant from request # header type, really... value = json.loads(value.decode('utf-8')) else: get = lambda attr: getattr(request, attr).get(param_name, None) value = get(PTYPE_TO_REQUEST_PROPERTY[param_type]) if value is not None: kwargs[param_name] = value
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def filter_annotations_by_ptype(function, ptype): '''Filter an annotation by only leaving the parameters of type "ptype".''' ret = {} for k, v in function.__annotations__.items(): if k == 'return': continue pt, _ = v if pt == ptype: ret[k] = v return ret
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def option2tuple(opt): """Return a tuple of option, taking possible presence of level into account"""
if isinstance(opt[0], int): tup = opt[1], opt[2:] else: tup = opt[0], opt[1:] return tup
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def opt_func(options, check_mandatory=True): """ Restore argument checks for functions that takes options dicts as arguments Functions that take the option dictionary produced by :meth:`Options.parse` as `kwargs` loose the argument checking usually performed by the python interpretor. They also loose the ability to take default values for their keyword arguments. Such function is basically unusable without the full dictionary produced by the option parser. This is a decorator that restores argument checking and default values assignment on the basis of an :class:`Options` instance. options = Options([ (0, "-f", "input", str, 1, None, MULTI, "Input file"), (0, "-o", "output", str, 1, None, MA, "Output file"), (0, "-p", "topology", str, 1, None, 0, "Optional topology"), ]) @opt_func(options) def process_things(**arguments): # Do something return # The function can be called with the arguments # from the argument parser arguments = options.parse() process_things(**arguments) # It can be called with only part of the arguments, # the other arguments will be set to their default as defined by # the Options instance process_things(output='output.gro') # If the function is called with an unknown argument, the decorator # raises a TypeError process_things(unknown=None) # Likewise, if the function is called without the mandatory arguments, # the decorator raises a TypeError process_things(topology='topology.top') # The check for mandatory arguments can be deactivated @opt_func(options, check_mandatory=False) def process_things(**arguments): # Do things return Note that the decorator cannot be used on functions that accept Other arguments than the one defined in the :class:`Options` instance. Also, the arguments have to be given as keyword arguments. Positional arguments will cause the decorator to raise a `TypeError`. """
# A function `my_function` decorated with `opt_func` is replaced by # `opt_func(options)(my_function)`. This is equivalent to # `validate_arguments(my_function)` using the `options` argument provided # to the decorator. A call to `my_function` results in a call to # `opt_func(options)(my_function)(*args, **kwargs)` # ^^^^^^^^^^^^^^^^^^ validate_arguments # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ wrap def validate_arguments(func): @functools.wraps(func) def wrap(*args, **kwargs): if args: raise TypeError('{0.__name__}() takes 0 positional arguments ' 'but {1} was given'.format(func, len(args))) keys = set(kwargs.keys()) missing = options.mandatory_keys - keys if missing and check_mandatory: raise TypeError('{0.__name__}() is missing the following ' 'mandatory keyword arguments: {1}' .format(func, ', '.join(missing))) arguments = options._default_dict() unknown = keys - set(arguments.keys()) if unknown: raise TypeError('{0.__name__}() received the following ' 'unexpected arguments: {1}' .format(func, ', '.join(unknown))) arguments.update(**kwargs) return func(**arguments) return wrap return validate_arguments
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _default_dict(self): """Return a dictionary with the default for each option."""
options = {} for attr, _, _, default, multi, _ in self._optiondict.values(): if multi and default is None: options[attr] = [] else: options[attr] = default return options
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def help(self, args=None, userlevel=9): """Make a string from the option list"""
out = [main.__file__+"\n"] if args is not None: parsed = self.parse(args, ignore_help=True) else: parsed = self._default_dict() for thing in self.options: if type(thing) == str: out.append(" "+thing) elif thing[0] <= userlevel: out.append(" %10s %s ( %s )" % (thing[1], thing[-1], str(parsed[thing[2]]))) return "\n".join(out)+"\n"
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def page_list(cls, pages, base_url='/'): """transform a list of page titles in a list of html links"""
plist = [] for page in pages: url = "<a href=\"{}{}\">{}</a>".format(base_url, cls.slugify(page), page) plist.append(url) return plist
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def slugify(cls, s): """Return the slug version of the string ``s``"""
slug = re.sub("[^0-9a-zA-Z-]", "-", s) return re.sub("-{2,}", "-", slug).strip('-')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def is_whitelisted(self, addrinfo): """ Returns if a result of ``socket.getaddrinfo`` is in the socket address whitelist. """
# For details about the ``getaddrinfo`` struct, see the Python docs: # http://docs.python.org/library/socket.html#socket.getaddrinfo family, socktype, proto, canonname, sockaddr = addrinfo address, port = sockaddr[:2] return address in self.socket_address_whitelist
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def report(self): """ Performs rollups, prints report of sockets opened. """
aggregations = dict( (test, Counter().rollup(values)) for test, values in self.socket_warnings.items() ) total = sum( len(warnings) for warnings in self.socket_warnings.values() ) def format_test_statistics(test, counter): return "%s:\n%s" % ( test, '\n'.join( ' - %s: %s' % (socket, count) for socket, count in counter.items() ) ) def format_statistics(aggregations): return '\n'.join( format_test_statistics(test, counter) for test, counter in aggregations.items() ) # Only print the report if there are actually things to report. if aggregations: print('=' * 70, file=self.stream) print( 'NON-WHITELISTED SOCKETS OPENED: %s' % total, file=self.stream, ) print('-' * 70, file=self.stream) print(format_statistics(aggregations), file=self.stream)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_all(self): """ Gets all captured counters. :return: a list with counters. """
self._lock.acquire() try: return list(self._cache.values()) finally: self._lock.release()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, name, typ): """ Gets a counter specified by its name. It counter does not exist or its type doesn't match the specified type it creates a new one. :param name: a counter name to retrieve. :param typ: a counter type. :return: an existing or newly created counter of the specified type. """
if name == None or len(name) == 0: raise Exception("Counter name was not set") self._lock.acquire() try: counter = self._cache[name] if name in self._cache else None if counter == None or counter.type != typ: counter = Counter(name, typ) self._cache[name] = counter return counter finally: self._lock.release()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def load(self, filename, **kwargs): """ Parse a file specified with the filename and return an numpy array Parameters filename : string A path of a file Returns ------- ndarray An instance of numpy array """
with open(filename, 'r') as f: return self.parse(f, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_thumbnail_filename(filename, append_text="-thumbnail"): """ Returns a thumbnail version of the file name. """
name, ext = os.path.splitext(filename) return ''.join([name, append_text, ext])