text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def stdin_readable(): """Determine whether stdin has any data to read."""
if not WINDOWS: try: return bool(select([sys.stdin], [], [], 0)[0]) except Exception: logger.log_exc() try: return not sys.stdin.isatty() except Exception: logger.log_exc() return False
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_recursion_limit(limit): """Set the Python recursion limit."""
if limit < minimum_recursion_limit: raise CoconutException("--recursion-limit must be at least " + str(minimum_recursion_limit)) sys.setrecursionlimit(limit)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def canparse(argparser, args): """Determines if argparser can parse args."""
old_error_method = argparser.error argparser.error = _raise_ValueError try: argparser.parse_args(args) except ValueError: return False else: return True finally: argparser.error = old_error_method
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_style(self, style): """Set pygments syntax highlighting style."""
if style == "none": self.style = None elif prompt_toolkit is None: raise CoconutException("syntax highlighting is not supported on this Python version") elif style == "list": print("Coconut Styles: none, " + ", ".join(pygments.styles.get_all_styles())) sys.exit(0) elif style in pygments.styles.get_all_styles(): self.style = style else: raise CoconutException("unrecognized pygments style", style, extra="use '--style list' to show all valid styles")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_history_file(self, path): """Set path to history file. "" produces no file."""
if path: self.history = prompt_toolkit.history.FileHistory(fixpath(path)) else: self.history = prompt_toolkit.history.InMemoryHistory()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def prompt(self, msg): """Get input using prompt_toolkit."""
try: # prompt_toolkit v2 prompt = prompt_toolkit.PromptSession(history=self.history).prompt except AttributeError: # prompt_toolkit v1 prompt = partial(prompt_toolkit.prompt, history=self.history) return prompt( msg, multiline=self.multiline, vi_mode=self.vi_mode, wrap_lines=self.wrap_lines, enable_history_search=self.history_search, lexer=PygmentsLexer(CoconutLexer), style=style_from_pygments_cls( pygments.styles.get_style_by_name(self.style), ), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build_vars(path=None): """Build initial vars."""
init_vars = { "__name__": "__main__", "__package__": None, "reload": reload, } if path is not None: init_vars["__file__"] = fixpath(path) # put reserved_vars in for auto-completion purposes for var in reserved_vars: init_vars[var] = None return init_vars
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fix_pickle(self): """Fix pickling of Coconut header objects."""
from coconut import __coconut__ # this is expensive, so only do it here for var in self.vars: if not var.startswith("__") and var in dir(__coconut__): self.vars[var] = getattr(__coconut__, var)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def handling_errors(self, all_errors_exit=False): """Handle execution errors."""
try: yield except SystemExit as err: self.exit(err.code) except BaseException: etype, value, tb = sys.exc_info() for _ in range(num_added_tb_layers): if tb is None: break tb = tb.tb_next traceback.print_exception(etype, value, tb) if all_errors_exit: self.exit(1)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run(self, code, use_eval=None, path=None, all_errors_exit=False, store=True): """Execute Python code."""
if use_eval is None: run_func = interpret elif use_eval is True: run_func = eval else: run_func = exec_func with self.handling_errors(all_errors_exit): if path is None: result = run_func(code, self.vars) else: use_vars = self.build_vars(path) try: result = run_func(code, use_vars) finally: self.vars.update(use_vars) if store: self.store(code) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def run_file(self, path, all_errors_exit=True): """Execute a Python file."""
path = fixpath(path) with self.handling_errors(all_errors_exit): module_vars = run_file(path) self.vars.update(module_vars) self.store("from " + splitname(path)[1] + " import *")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def was_run_code(self, get_all=True): """Get all the code that was run."""
if self.stored is None: return "" else: if get_all: self.stored = ["\n".join(self.stored)] return self.stored[-1]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_reqs(which="main"): """Gets requirements from all_reqs with versions."""
reqs = [] for req in all_reqs[which]: req_str = req + ">=" + ver_tuple_to_str(min_versions[req]) if req in version_strictly: req_str += ",<" + ver_tuple_to_str(min_versions[req][:-1]) + "." + str(min_versions[req][-1] + 1) reqs.append(req_str) return reqs
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def uniqueify_all(init_reqs, *other_reqs): """Find the union of all the given requirements."""
union = set(init_reqs) for reqs in other_reqs: union.update(reqs) return list(union)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def all_versions(req): """Get all versions of req from PyPI."""
import requests url = "https://pypi.python.org/pypi/" + req + "/json" return tuple(requests.get(url).json()["releases"].keys())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def newer(new_ver, old_ver, strict=False): """Determines if the first version tuple is newer than the second. True if newer, False if older, None if difference is after specified version parts."""
if old_ver == new_ver or old_ver + (0,) == new_ver: return False for n, o in zip(new_ver, old_ver): if not isinstance(n, int): o = str(o) if o < n: return True elif o > n: return False return not strict
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def print_new_versions(strict=False): """Prints new requirement versions."""
new_updates = [] same_updates = [] for req in everything_in(all_reqs): new_versions = [] same_versions = [] for ver_str in all_versions(req): if newer(ver_str_to_tuple(ver_str), min_versions[req], strict=True): new_versions.append(ver_str) elif not strict and newer(ver_str_to_tuple(ver_str), min_versions[req]): same_versions.append(ver_str) update_str = req + ": " + ver_tuple_to_str(min_versions[req]) + " -> " + ", ".join( new_versions + ["(" + v + ")" for v in same_versions], ) if new_versions: new_updates.append(update_str) elif same_versions: same_updates.append(update_str) print("\n".join(new_updates + same_updates))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def format_error(err_type, err_value, err_trace=None): """Properly formats the specified error."""
if err_trace is None: err_parts = "".join(traceback.format_exception_only(err_type, err_value)).strip().split(": ", 1) if len(err_parts) == 1: err_name, err_msg = err_parts[0], "" else: err_name, err_msg = err_parts err_name = err_name.split(".")[-1] return err_name + ": " + err_msg else: return "".join(traceback.format_exception(err_type, err_value, err_trace)).strip()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def complain(error): """Raises in develop; warns in release."""
if callable(error): if DEVELOP: raise error() elif DEVELOP: raise error else: logger.warn_err(error)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def copy_from(self, other): """Copy other onto self."""
self.verbose, self.quiet, self.path, self.name, self.tracing = other.verbose, other.quiet, other.path, other.name, other.tracing
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def display(self, messages, sig="", debug=False): """Prints an iterator of messages."""
full_message = "".join( sig + line for line in " ".join( str(msg) for msg in messages ).splitlines(True) ) if not full_message: full_message = sig.rstrip() if debug: printerr(full_message) else: print(full_message)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_error(self): """Properly formats the current error."""
exc_info = sys.exc_info() if exc_info[0] is None: return None else: err_type, err_value, err_trace = exc_info[0], exc_info[1], None if self.verbose and len(exc_info) > 2: err_trace = exc_info[2] return format_error(err_type, err_value, err_trace)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def in_path(self, new_path, old_path=None): """Temporarily enters a path."""
self.path = new_path try: yield finally: self.path = old_path
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def display_exc(self): """Properly prints an exception in the exception context."""
errmsg = self.get_error() if errmsg is not None: if self.path is not None: errmsg_lines = ["in " + self.path + ":"] for line in errmsg.splitlines(): if line: line = " " * taberrfmt + line errmsg_lines.append(line) errmsg = "\n".join(errmsg_lines) printerr(errmsg)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def show_tabulated(self, begin, middle, end): """Shows a tabulated message."""
internal_assert(len(begin) < info_tabulation, "info message too long", begin) self.show(begin + " " * (info_tabulation - len(begin)) + middle + " " + end)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def log_tag(self, tag, code, multiline=False): """Logs a tagged message if tracing."""
if self.tracing: if callable(code): code = code() tagstr = "[" + str(tag) + "]" if multiline: printerr(tagstr + "\n" + displayable(code)) else: printerr(tagstr, ascii(code))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def log_trace(self, tag, original, loc, tokens=None, extra=None): """Formats and displays a trace if tracing."""
if self.tracing: tag, original, loc = displayable(tag), displayable(original), int(loc) if "{" not in tag: out = ["[" + tag + "]"] add_line_col = True if tokens is not None: if isinstance(tokens, Exception): msg = displayable(str(tokens)) if "{" in msg: head, middle = msg.split("{", 1) middle, tail = middle.rsplit("}", 1) msg = head + "{...}" + tail out.append(msg) add_line_col = False elif len(tokens) == 1 and isinstance(tokens[0], str): out.append(ascii(tokens[0])) else: out.append(ascii(tokens)) if add_line_col: out.append("(line:" + str(lineno(loc, original)) + ", col:" + str(col(loc, original)) + ")") if extra is not None: out.append("from " + ascii(extra)) printerr(*out)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def patch_logging(self): """Patches built-in Python logging if necessary."""
if not hasattr(logging, "getLogger"): def getLogger(name=None): other = Logger(self) if name is not None: other.name = name return other logging.getLogger = getLogger
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def pylog(self, *args, **kwargs): """Display all available logging information."""
printerr(self.name, args, kwargs, traceback.format_exc())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lenient_add_filter(self, *args, **kwargs): """Disables the raiseonerror filter."""
if args and args[0] != "raiseonerror": self.original_add_filter(*args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def on_modified(self, event): """Handle a file modified event."""
path = event.src_path if path not in self.saw: self.saw.add(path) self.recompile(path)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def split_function_call(tokens, loc): """Split into positional arguments and keyword arguments."""
pos_args = [] star_args = [] kwd_args = [] dubstar_args = [] for arg in tokens: argstr = "".join(arg) if len(arg) == 1: if star_args or kwd_args or dubstar_args: raise CoconutDeferredSyntaxError("positional arguments must come first", loc) pos_args.append(argstr) elif len(arg) == 2: if arg[0] == "*": if kwd_args or dubstar_args: raise CoconutDeferredSyntaxError("star unpacking must come before keyword arguments", loc) star_args.append(argstr) elif arg[0] == "**": dubstar_args.append(argstr) else: kwd_args.append(argstr) else: raise CoconutInternalException("invalid function call argument", arg) return pos_args, star_args, kwd_args, dubstar_args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_infix_items(tokens, callback=infix_error): """Perform infix token processing. Takes a callback that (takes infix tokens and returns a string) to handle inner infix calls. """
internal_assert(len(tokens) >= 3, "invalid infix tokens", tokens) (arg1, func, arg2), tokens = tokens[:3], tokens[3:] args = list(arg1) + list(arg2) while tokens: args = [callback([args, func, []])] (func, newarg), tokens = tokens[:2], tokens[2:] args += list(newarg) return func, args
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def comp_pipe_handle(loc, tokens): """Process pipe function composition."""
internal_assert(len(tokens) >= 3 and len(tokens) % 2 == 1, "invalid composition pipe tokens", tokens) funcs = [tokens[0]] stars = [] direction = None for i in range(1, len(tokens), 2): op, fn = tokens[i], tokens[i + 1] new_direction, star = comp_pipe_info(op) if direction is None: direction = new_direction elif new_direction != direction: raise CoconutDeferredSyntaxError("cannot mix function composition pipe operators with different directions", loc) funcs.append(fn) stars.append(star) if direction == "backwards": funcs.reverse() stars.reverse() func = funcs.pop(0) funcstars = zip(funcs, stars) return "_coconut_base_compose(" + func + ", " + ", ".join( "(%s, %s)" % (f, star) for f, star in funcstars ) + ")"
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def none_coalesce_handle(tokens): """Process the None-coalescing operator."""
if len(tokens) == 1: return tokens[0] elif tokens[0].isalnum(): return "({b} if {a} is None else {a})".format( a=tokens[0], b=none_coalesce_handle(tokens[1:]), ) else: return "(lambda {x}: {b} if {x} is None else {x})({a})".format( x=none_coalesce_var, a=tokens[0], b=none_coalesce_handle(tokens[1:]), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def attrgetter_atom_handle(loc, tokens): """Process attrgetter literals."""
name, args = attrgetter_atom_split(tokens) if args is None: return '_coconut.operator.attrgetter("' + name + '")' elif "." in name: raise CoconutDeferredSyntaxError("cannot have attribute access in implicit methodcaller partial", loc) elif args == "": return '_coconut.operator.methodcaller("' + tokens[0] + '")' else: return '_coconut.operator.methodcaller("' + tokens[0] + '", ' + tokens[2] + ")"
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def lazy_list_handle(tokens): """Process lazy lists."""
if len(tokens) == 0: return "_coconut.iter(())" else: return ( "(%s() for %s in (" % (func_var, func_var) + "lambda: " + ", lambda: ".join(tokens) + ("," if len(tokens) == 1 else "") + "))" )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def infix_handle(tokens): """Process infix calls."""
func, args = get_infix_items(tokens, callback=infix_handle) return "(" + func + ")(" + ", ".join(args) + ")"
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def op_funcdef_handle(tokens): """Process infix defs."""
func, base_args = get_infix_items(tokens) args = [] for arg in base_args[:-1]: rstrip_arg = arg.rstrip() if not rstrip_arg.endswith(unwrapper): if not rstrip_arg.endswith(","): arg += ", " elif arg.endswith(","): arg += " " args.append(arg) last_arg = base_args[-1] if last_arg.rstrip().endswith(","): last_arg = last_arg.rsplit(",")[0] args.append(last_arg) return func + "(" + "".join(args) + ")"
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def typedef_callable_handle(tokens): """Process -> to Callable inside type annotations."""
if len(tokens) == 1: return '_coconut.typing.Callable[..., ' + tokens[0] + ']' elif len(tokens) == 2: return '_coconut.typing.Callable[[' + tokens[0] + '], ' + tokens[1] + ']' else: raise CoconutInternalException("invalid Callable typedef tokens", tokens)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def math_funcdef_handle(tokens): """Process assignment function definition."""
internal_assert(len(tokens) == 2, "invalid assignment function definition tokens", tokens) return tokens[0] + ("" if tokens[1].startswith("\n") else " ") + tokens[1]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decorator_handle(tokens): """Process decorators."""
defs = [] decorates = [] for i, tok in enumerate(tokens): if "simple" in tok and len(tok) == 1: decorates.append("@" + tok[0]) elif "test" in tok and len(tok) == 1: varname = decorator_var + "_" + str(i) defs.append(varname + " = " + tok[0]) decorates.append("@" + varname) else: raise CoconutInternalException("invalid decorator tokens", tok) return "\n".join(defs + decorates) + "\n"
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def match_handle(loc, tokens): """Process match blocks."""
if len(tokens) == 4: matches, match_type, item, stmts = tokens cond = None elif len(tokens) == 5: matches, match_type, item, cond, stmts = tokens else: raise CoconutInternalException("invalid match statement tokens", tokens) if match_type == "in": invert = False elif match_type == "not in": invert = True else: raise CoconutInternalException("invalid match type", match_type) matching = Matcher(loc, match_check_var) matching.match(matches, match_to_var) if cond: matching.add_guard(cond) return ( match_to_var + " = " + item + "\n" + matching.build(stmts, invert=invert) )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def except_handle(tokens): """Process except statements."""
if len(tokens) == 1: errs, asname = tokens[0], None elif len(tokens) == 2: errs, asname = tokens else: raise CoconutInternalException("invalid except tokens", tokens) out = "except " if "list" in tokens: out += "(" + errs + ")" else: out += errs if asname is not None: out += " as " + asname return out
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def subscriptgroup_handle(tokens): """Process subscriptgroups."""
internal_assert(0 < len(tokens) <= 3, "invalid slice args", tokens) args = [] for arg in tokens: if not arg: arg = "None" args.append(arg) if len(args) == 1: return args[0] else: return "_coconut.slice(" + ", ".join(args) + ")"
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def itemgetter_handle(tokens): """Process implicit itemgetter partials."""
internal_assert(len(tokens) == 2, "invalid implicit itemgetter args", tokens) op, args = tokens if op == "[": return "_coconut.operator.itemgetter(" + args + ")" elif op == "$[": return "_coconut.functools.partial(_coconut_igetitem, index=" + args + ")" else: raise CoconutInternalException("invalid implicit itemgetter type", op)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def namelist_handle(tokens): """Process inline nonlocal and global statements."""
if len(tokens) == 1: return tokens[0] elif len(tokens) == 2: return tokens[0] + "\n" + tokens[0] + " = " + tokens[1] else: raise CoconutInternalException("invalid in-line nonlocal / global tokens", tokens)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compose_item_handle(tokens): """Process function composition."""
if len(tokens) < 1: raise CoconutInternalException("invalid function composition tokens", tokens) elif len(tokens) == 1: return tokens[0] else: return "_coconut_forward_compose(" + ", ".join(reversed(tokens)) + ")"
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def tco_return_handle(tokens): """Process tail-call-optimizable return statements."""
internal_assert(len(tokens) == 2, "invalid tail-call-optimizable return statement tokens", tokens) if tokens[1].startswith("()"): return "return _coconut_tail_call(" + tokens[0] + ")" + tokens[1][2:] # tokens[1] contains \n else: return "return _coconut_tail_call(" + tokens[0] + ", " + tokens[1][1:]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def split_func_name_args_params_handle(tokens): """Process splitting a function into name, params, and args."""
internal_assert(len(tokens) == 2, "invalid function definition splitting tokens", tokens) func_name = tokens[0] func_args = [] func_params = [] for arg in tokens[1]: if len(arg) > 1 and arg[0] in ("*", "**"): func_args.append(arg[1]) elif arg[0] != "*": func_args.append(arg[0]) func_params.append("".join(arg)) return [ func_name, ", ".join(func_args), "(" + ", ".join(func_params) + ")", ]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def join_match_funcdef(tokens): """Join the pieces of a pattern-matching function together."""
if len(tokens) == 2: (func, insert_after_docstring), body = tokens docstring = None elif len(tokens) == 3: (func, insert_after_docstring), docstring, body = tokens else: raise CoconutInternalException("invalid docstring insertion tokens", tokens) # insert_after_docstring and body are their own self-contained suites, but we # expect them to both be one suite, so we have to join them together insert_after_docstring, dedent = split_trailing_indent(insert_after_docstring) indent, body = split_leading_indent(body) indentation = collapse_indents(dedent + indent) return ( func + (docstring if docstring is not None else "") + insert_after_docstring + indentation + body )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def where_stmt_handle(tokens): """Process a where statement."""
internal_assert(len(tokens) == 2, "invalid where statement tokens", tokens) base_stmt, assignment_stmts = tokens stmts = list(assignment_stmts) + [base_stmt] return "\n".join(stmts) + "\n"
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_grammar_names(): """Set names of grammar elements to their variable names."""
for varname, val in vars(Grammar).items(): if isinstance(val, ParserElement): setattr(Grammar, varname, val.setName(varname))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def version(which="num"): """Get the Coconut version."""
if which in VERSIONS: return VERSIONS[which] else: raise CoconutException( "invalid version type " + ascii(which), extra="valid versions are " + ", ".join(VERSIONS), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse(code="", mode="sys"): """Compile Coconut code."""
if CLI.comp is None: setup() if mode in PARSERS: return PARSERS[mode](CLI.comp)(code) else: raise CoconutException( "invalid parse mode " + ascii(mode), extra="valid modes are " + ", ".join(PARSERS), )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def auto_compilation(on=True): """Turn automatic compilation of Coconut files on or off."""
if on: if coconut_importer not in sys.meta_path: sys.meta_path.insert(0, coconut_importer) else: try: sys.meta_path.remove(coconut_importer) except ValueError: pass
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def find_module(self, fullname, path=None): """Searches for a Coconut file of the given name and compiles it."""
basepaths = [""] + list(sys.path) if fullname.startswith("."): if path is None: # we can't do a relative import if there's no package path return None fullname = fullname[1:] basepaths.insert(0, path) fullpath = os.path.join(*fullname.split(".")) for head in basepaths: path = os.path.join(head, fullpath) filepath = path + self.ext dirpath = os.path.join(path, "__init__" + self.ext) if os.path.exists(filepath): self.run_compiler(filepath) # Coconut file was found and compiled, now let Python import it return None if os.path.exists(dirpath): self.run_compiler(path) # Coconut package was found and compiled, now let Python import it return None return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def report(self, morfs=None): """ Generate a part of json report for coveralls `morfs` is a list of modules or filenames. `outfile` is a file object to write the json to. """
units = None if hasattr(self, 'find_code_units'): self.find_code_units(morfs) else: units = self.find_file_reporters(morfs) if units is None: if hasattr(self, 'code_units'): units = self.code_units else: units = self.file_reporters for cu in units: try: analyzed = self.coverage._analyze(cu) # pylint: disable=W0212 self.parse_file(cu, analyzed) except NoSource: if not self.config.ignore_errors: log.warning('No source for %s', cu.filename) except NotPython: # Only report errors for .py files, and only if we didn't # explicitly suppress those errors. if cu.should_be_python() and not self.config.ignore_errors: log.warning('Source file is not python %s', cu.filename) except KeyError: cov3x = __version__[0] < 4 cov40 = __version__[0] == 4 and __version__[1] < 1 if cov3x or cov40: raise CoverallsException( 'Old (<4.1) versions of coverage.py do not work ' 'consistently on new versions of Python. Please ' 'upgrade your coverage.py.' ) raise return self.source_files
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_arcs(analysis): """ Hit stats for each branch. Returns a flat list where every four values represent a branch: 1. line-number 2. block-number (not used) 3. branch-number 4. hits (we only get 1/0 from coverage.py) """
if not analysis.has_arcs(): return None branch_lines = analysis.branch_lines() branches = [] for l1, l2 in analysis.arcs_executed(): if l1 in branch_lines: branches.extend((l1, 0, abs(l2), 1)) for l1, l2 in analysis.arcs_missing(): if l1 in branch_lines: branches.extend((l1, 0, abs(l2), 0)) return branches
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def parse_file(self, cu, analysis): """Generate data for single file"""
if hasattr(analysis, 'parser'): filename = cu.file_locator.relative_filename(cu.filename) source_lines = analysis.parser.lines with cu.source_file() as source_file: source = source_file.read() try: if sys.version_info < (3, 0): encoding = source_encoding(source) if encoding != 'utf-8': source = source.decode(encoding).encode('utf-8') except UnicodeDecodeError: log.warning( 'Source file %s can not be properly decoded, skipping. ' 'Please check if encoding declaration is ok', os.path.basename(cu.filename)) return else: if hasattr(cu, 'relative_filename'): filename = cu.relative_filename() else: filename = analysis.coverage.file_locator.relative_filename( cu.filename) token_lines = analysis.file_reporter.source_token_lines() source_lines = list(enumerate(token_lines)) source = analysis.file_reporter.source() coverage_lines = [self.get_hits(i, analysis) for i in range(1, len(source_lines) + 1)] # ensure results are properly merged between platforms posix_filename = filename.replace(os.path.sep, '/') results = { 'name': posix_filename, 'source': source, 'coverage': coverage_lines, } branches = self.get_arcs(analysis) if branches: results['branches'] = branches self.source_files.append(results)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def git_info(): """ A hash of Git data that can be used to display more information to users. Example: "git": { "head": { "id": "5e837ce92220be64821128a70f6093f836dd2c05", "author_name": "Wil Gieseler", "author_email": "wil@example.com", "committer_name": "Wil Gieseler", "committer_email": "wil@example.com", "message": "depend on simplecov >= 0.7" }, "branch": "master", "remotes": [{ "name": "origin", "url": "https://github.com/lemurheavy/coveralls-ruby.git" }] } """
try: branch = (os.environ.get('APPVEYOR_REPO_BRANCH') or os.environ.get('BUILDKITE_BRANCH') or os.environ.get('CI_BRANCH') or os.environ.get('CIRCLE_BRANCH') or os.environ.get('GIT_BRANCH') or os.environ.get('TRAVIS_BRANCH') or os.environ.get('BRANCH_NAME') or run_command('git', 'rev-parse', '--abbrev-ref', 'HEAD')) head = { 'id': gitlog('%H'), 'author_name': gitlog('%aN'), 'author_email': gitlog('%ae'), 'committer_name': gitlog('%cN'), 'committer_email': gitlog('%ce'), 'message': gitlog('%s'), } remotes = [{'name': line.split()[0], 'url': line.split()[1]} for line in run_command('git', 'remote', '-v').splitlines() if '(fetch)' in line] except (CoverallsException, EnvironmentError) as ex: # When git is not available, try env vars as per Coveralls docs: # https://docs.coveralls.io/mercurial-support # Additionally, these variables have been extended by GIT_URL and # GIT_REMOTE branch = os.environ.get('GIT_BRANCH') head = { 'id': os.environ.get('GIT_ID'), 'author_name': os.environ.get('GIT_AUTHOR_NAME'), 'author_email': os.environ.get('GIT_AUTHOR_EMAIL'), 'committer_name': os.environ.get('GIT_COMMITTER_NAME'), 'committer_email': os.environ.get('GIT_COMMITTER_EMAIL'), 'message': os.environ.get('GIT_MESSAGE'), } remotes = [{ 'name': os.environ.get('GIT_REMOTE'), 'url': os.environ.get('GIT_URL'), }] if not all(head.values()): log.warning('Failed collecting git data. Are you running ' 'coveralls inside a git repository? Is git installed?', exc_info=ex) return {} return { 'git': { 'branch': branch, 'head': head, 'remotes': remotes, }, }
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_report(self): """Generate json dumped report for coveralls api."""
data = self.create_data() try: json_string = json.dumps(data) except UnicodeDecodeError as e: log.error('ERROR: While preparing JSON:', exc_info=e) self.debug_bad_encoding(data) raise log_string = re.sub(r'"repo_token": "(.+?)"', '"repo_token": "[secure]"', json_string) log.debug(log_string) log.debug('==\nReporting %s files\n==\n', len(data['source_files'])) for source_file in data['source_files']: log.debug('%s - %s/%s', source_file['name'], sum(filter(None, source_file['coverage'])), len(source_file['coverage'])) return json_string
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def save_report(self, file_path): """Write coveralls report to file."""
try: report = self.create_report() except coverage.CoverageException as e: log.error('Failure to gather coverage:', exc_info=e) else: with open(file_path, 'w') as report_file: report_file.write(report)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_data(self, extra=None): r""" Generate object for api. Example json: { "service_job_id": "1234567890", "service_name": "travis-ci", "source_files": [ { "name": "example.py", "source": "def four\n 4\nend", "coverage": [null, 1, null] }, { "name": "two.py", "source": "def seven\n eight\n nine\nend", "coverage": [null, 1, 0, null] } ], "parallel": True } """
if self._data: return self._data self._data = {'source_files': self.get_coverage()} self._data.update(git_info()) self._data.update(self.config) if extra: if 'source_files' in extra: self._data['source_files'].extend(extra['source_files']) else: log.warning('No data to be merged; does the json file contain ' '"source_files" data?') return self._data
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def debug_bad_encoding(data): """Let's try to help user figure out what is at fault."""
at_fault_files = set() for source_file_data in data['source_files']: for value in source_file_data.values(): try: json.dumps(value) except UnicodeDecodeError: at_fault_files.add(source_file_data['name']) if at_fault_files: log.error('HINT: Following files cannot be decoded properly into ' 'unicode. Check their content: %s', ', '.join(at_fault_files))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def clean_params(params, drop_nones=True, recursive=True): """Clean up a dict of API parameters to be sent to the Coinbase API. Some endpoints require boolean options to be represented as integers. By default, will remove all keys whose value is None, so that they will not be sent to the API endpoint at all. """
cleaned = {} for key, value in six.iteritems(params): if drop_nones and value is None: continue if recursive and isinstance(value, dict): value = clean_params(value, drop_nones, recursive) cleaned[key] = value return cleaned
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def encode_params(params, **kwargs): """Clean and JSON-encode a dict of parameters."""
cleaned = clean_params(params, **kwargs) return json.dumps(cleaned)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def check_uri_security(uri): """Warns if the URL is insecure."""
if urlparse(uri).scheme != 'https': warning_message = ( 'WARNING: this client is sending a request to an insecure' ' API endpoint. Any API request you make may expose your API key and' ' secret to third parties. Consider using the default endpoint:\n\n' ' %s\n') % uri warnings.warn(warning_message, UserWarning) return uri
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _build_session(self, auth_class, *args, **kwargs): """Internal helper for creating a requests `session` with the correct authentication handling. """
session = requests.session() session.auth = auth_class(*args, **kwargs) session.headers.update({'CB-VERSION': self.API_VERSION, 'Accept': 'application/json', 'Content-Type': 'application/json', 'User-Agent': 'coinbase/python/2.0'}) return session
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _create_api_uri(self, *parts): """Internal helper for creating fully qualified endpoint URIs."""
return urljoin(self.BASE_API_URI, '/'.join(imap(quote, parts)))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _request(self, method, *relative_path_parts, **kwargs): """Internal helper for creating HTTP requests to the Coinbase API. Raises an APIError if the response is not 20X. Otherwise, returns the response object. Not intended for direct use by API consumers. """
uri = self._create_api_uri(*relative_path_parts) data = kwargs.get('data', None) if data and isinstance(data, dict): kwargs['data'] = encode_params(data) if self.VERIFY_SSL: kwargs.setdefault('verify', COINBASE_CRT_PATH) else: kwargs.setdefault('verify', False) kwargs.update(verify=self.VERIFY_SSL) response = getattr(self.session, method)(uri, **kwargs) return self._handle_response(response)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _handle_response(self, response): """Internal helper for handling API responses from the Coinbase server. Raises the appropriate exceptions when necessary; otherwise, returns the response. """
if not str(response.status_code).startswith('2'): raise build_api_error(response) return response
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get(self, *args, **kwargs): """Get requests can be paginated, ensure we iterate through all the pages."""
prev_data = kwargs.pop('prev_data', []) resp = self._request('get', *args, **kwargs) resp_content = resp._content if not resp_content: # No content so its obviously not paginated return resp # if resp._content is a bytes object, decode it so we can loads it as json if isinstance(resp_content, bytes): resp_content = resp_content.decode('utf-8') # Load the json so we can use the data as python objects content = json.loads(resp_content) if 'pagination' not in content: # Result is not paginated return resp page_info = content['pagination'] if not page_info['next_uri']: # next_uri is None when the cursor has been iterated to the last element content['data'].extend(prev_data) # If resp._content was is a bytes object, only set it as a bytes object if isinstance(resp_content, bytes): resp._content = json.dumps(content).decode('utf-8') else: resp._content = json.dumps(content) return resp prev_data.extend(content['data']) next_page_id = page_info['next_uri'].split('=')[-1] kwargs.update({ 'prev_data': prev_data, 'params': {'starting_after': next_page_id} }) return self._get(*args, **kwargs)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _remove_non_methods(): """Removes any object in dict that is not a registered method."""
cur_module = sys.modules[__name__] my_globals = dict(globals()) # Import here so that it doesn't get added to the global namespace or deleted. # pylint: disable=g-import-not-at-top from prettytensor.pretty_tensor_class import PrettyTensor for name, _ in six.iteritems(my_globals): if not hasattr(PrettyTensor, name): delattr(cur_module, name) # Remove a couple of special ones.... if hasattr(cur_module, 'bookkeeper'): delattr(cur_module, 'bookkeeper')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def regularizer(name, regularization_fn, name_filter='weights'): """Wraps a regularizer in a parameter-function. Args: name: The name scope for this regularizer. regularization_fn: A function with signature: fn(variable) -> loss `Tensor` or `None`. name_filter: A regex that will be used to filter variables by name. Returns: A parameter modification function that adds the loss to the REGULARIZATION_LOSSES graph key. """
regex = re.compile(name_filter) def fn(var_name, variable, phase): if phase is pt.Phase.train and regex.search(var_name): with tf.name_scope(None, name, [variable]): loss = regularization_fn(variable) if loss is not None: tf.add_to_collection(tf.GraphKeys.REGULARIZATION_LOSSES, loss) return variable return fn
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def l2_regularizer(decay, name_filter='weights'): """Create an l2 regularizer."""
return regularizer( 'l2_regularizer', lambda x: tf.nn.l2_loss(x) * decay, name_filter=name_filter)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def l1_regularizer(decay, name_filter='weights'): """Create an l1 regularizer."""
return regularizer( 'l1_regularizer', lambda x: tf.reduce_sum(tf.abs(x)) * decay, name_filter=name_filter)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def compose(*parameter_functions): """Composes multiple modification functions in order. Args: *parameter_functions: The functions to compose. Returns: A parameter modification function that consists of applying all the provided functions. """
def composed_fn(var_name, variable, phase): for fn in parameter_functions: variable = fn(var_name, variable, phase) return variable return composed_fn
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def l1_regression_loss(y, target, name=None): """Calculates the sum of absolute errors between y and target. Args: y: the calculated values. target: the desired values. name: the name for this op, defaults to l1_regression Returns: A tensorflow op. """
with tf.name_scope(name, 'l1_regression', [y, target]) as scope: y = tf.convert_to_tensor(y, name='y') target = tf.convert_to_tensor(target, name='target') return reduce_batch_sum(tf.abs(y - target), name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def l2_regression_sq_loss(y, target, name=None): """Calculates the sum of squared errors between y and target. Args: y: the calculated values. target: the desired values. name: the name for this op, defaults to l2_regression Returns: A tensorflow op. """
with tf.name_scope(name, 'l2_regression_sq', [y, target]) as scope: y = tf.convert_to_tensor(y, name='y') target = tf.convert_to_tensor(target, name='target') return reduce_batch_sum(tf.square(y - target), name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def l2_regression_loss(y, target, name=None): """Calculates the square root of the SSE between y and target. Args: y: the calculated values. target: the desired values. name: the name for this op, defaults to l2_regression Returns: A tensorflow op. """
with tf.name_scope(name, 'l2_regression', [y, target]) as scope: y = tf.convert_to_tensor(y, name='y') target = tf.convert_to_tensor(target, name='target') return tf.sqrt(l2_regression_sq_loss(y, target, name=scope))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def cos_distance(t1, t2, epsilon=1e-12, name=None): """Cos distance between t1 and t2 and caps the gradient of the Square Root. Args: t1: A tensor t2: A tensor that can be multiplied by t1. epsilon: A lower bound value for the distance. The square root is used as the normalizer. name: Optional name for this op. Returns: The cos distance between t1 and t2. """
with tf.name_scope(name, 'cos_distance', [t1, t2]) as scope: t1 = tf.convert_to_tensor(t1, name='t1') t2 = tf.convert_to_tensor(t2, name='t2') x_inv_norm = tf.rsqrt(tf.maximum(length_squared(t1) * length_squared(t2), epsilon)) return tf.subtract(1.0, dot_product(t1, t2) * x_inv_norm, name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dot_distance(t1, t2, name=None): """dot "distance" between t1 and t2. Args: t1: A tensor. t2: A tensor that is the same size as t1. name: Optional name for this op. Returns: The dot distance between t1 and t2. """
with tf.name_scope(name, 'dot_distance', [t1, t2]) as scope: return -dot_product(t1, t2, name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def l2_distance_sq(t1, t2, name=None): """Square of l2 distance between t1 and t2. Args: t1: A tensor. t2: A tensor that is the same size as t1. name: Optional name for this op. Returns: The l2 distance between t1 and t2. """
with tf.name_scope(name, 'l2_distance_sq', [t1, t2]) as scope: t1 = tf.convert_to_tensor(t1, name='t1') t2 = tf.convert_to_tensor(t2, name='t2') return length_squared(tf.subtract(t1, t2), name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def l2_distance(t1, t2, epsilon=1e-12, name=None): """l2 distance between t1 and t2 and caps the gradient of the Square Root. Args: t1: A tensor. t2: A tensor that is the same size as t1. epsilon: A lower bound for distance, useful to avoid sqrt of very small values that can blow up gradients. name: Optional name for this op. Returns: The l2 distance between t1 and t2. """
with tf.name_scope(name, 'l2_distance', [t1, t2]) as scope: t1 = tf.convert_to_tensor(t1, name='t1') t2 = tf.convert_to_tensor(t2, name='t2') return tf.sqrt(tf.maximum(l2_distance_sq(t1, t2, scope), epsilon))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def l1_distance(t1, t2, name=None): """l1 distance between t1 and t2. Args: t1: A tensor. t2: A tensor that is the same size as t1. name: Optional name for this op. Returns: The l1 distance between t1 and t2. """
with tf.name_scope(name, 'l1_distance', [t1, t2]) as scope: t1 = tf.convert_to_tensor(t1, name='t1') t2 = tf.convert_to_tensor(t2, name='t2') sub = tf.subtract(t1, t2) reduction_dim = _last_index(sub, 1) return tf.reduce_sum(tf.abs(sub), reduction_dim, name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def leaky_relu(x, name=None): """Creates a leaky_relu. This is an alternate non-linearity to relu. The leaky part of the relu may prevent dead Neurons in a model since the gradient doesn't go completely to 0. Args: x: The input tensor. name: Optional name for this op. Returns: x if x > 0 otherwise 0.01 * x. """
with tf.name_scope(name, 'leaky_relu', [x]) as scope: x = tf.convert_to_tensor(x, name='x') return tf.where(tf.less(x, 0.0), 0.01 * x, x, name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def softplus(x, scale=1.0, name=None): """Computes softplus with a scale factor to sharpen of the hinge. This is an alternate non-linearity to relu. It has a similar shape, but it has a smooth transition from the linear part to 0. Args: x: A tensor. scale: A float that sharpens the curve. name: Optional name. Returns: y = log(1 + exp(scale * x)) / scale """
if scale == 1: return tf.nn.softplus(x) else: with tf.name_scope(name, 'softplus', [x]): scale = tf.convert_to_tensor(scale, dtype=x.dtype.base_dtype) return tf.nn.softplus(x * scale) / scale
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def l1_normalize(x, dim, epsilon=1e-12, name=None): """l1 normalizes x. Args: x: The tensor to normalize. dim: The dimension to normalize along. epsilon: Lower bound on the norm, used to avoid exploding gradients as the norm approaches 0. name: Optional name for this op. Returns: x normalized along dim. """
with tf.name_scope(name, 'l1_normalize', [x]) as scope: x = tf.convert_to_tensor(x, name='x') x = tf.verify_tensor_all_finite(x, 'Error at input %s' % scope) x_norm = tf.maximum(tf.reduce_sum(tf.abs(x), [dim], keep_dims=True), epsilon) return tf.div(x, x_norm, name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def every_other(x, name=None): """Drops every other value from the tensor and returns a 1D tensor. This is useful if you are running multiple inputs through a model tower before splitting them and you want to line it up with some other data. Args: x: the target tensor. name: the name for this op, defaults to every_other Returns: A tensorflow op. """
with tf.name_scope(name, 'every_other', [x]) as scope: x = tf.convert_to_tensor(x, name='x') return tf.reshape( tf.slice( tf.reshape(x, [-1, 2]), [0, 0], [-1, 1]), [-1], name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dot_product(t1, t2, keep_dims=False, name=None, reduction_dim=None): """Computes the dot product of t1 and t2. Args: t1: A rank 2 tensor. t2: A tensor that is the same size as t1. keep_dims: If true, reduction does not change the rank of the input. name: Optional name for this op. reduction_dim: The dimension to reduce, by default choose the last one and if no shape is specified guess 1. Returns: The dot product. """
with tf.name_scope(name, 'dot', [t1, t2]) as scope: t1 = tf.convert_to_tensor(t1, name='t1') t2 = tf.convert_to_tensor(t2, name='t2') mul = tf.multiply(t1, t2) if not reduction_dim: reduction_dim = _last_index(mul, 1) return tf.reduce_sum(mul, reduction_dim, name=scope, keep_dims=keep_dims)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def length_squared(x, keep_dims=False, name=None, reduction_dim=None): """Computes the squared length of x. Args: x: A tensor. keep_dims: If true, reduction does not change the rank of the input. name: Optional name for this op. reduction_dim: The dimension to reduce, by default choose the last one and if no shape is specified guess 1. Returns: The squared length of x. """
with tf.name_scope(name, 'length_squared', [x]) as scope: x = tf.convert_to_tensor(x, name='x') if not reduction_dim: reduction_dim = _last_index(x, 1) return tf.reduce_sum( tf.square(x), reduction_dim, keep_dims=keep_dims, name=scope)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unzip(x, split_dim, current_length, num_splits=2, name=None): """Splits a tensor by unzipping along the split_dim. For example the following array split into 2 would be: [1, 2, 3, 4, 5, 6] -> [1, 3, 5], [2, 4, 6] and by 3: [1, 2, 3, 4] -> [1, 4], [2], [3] Args: x: The tensor to split. split_dim: The dimension to split along. current_length: Current length along the split_dim. num_splits: The number of splits. name: Optional name for this op. Returns: A length num_splits sequence. """
with tf.name_scope(name, 'unzip', [x]) as scope: x = tf.convert_to_tensor(x, name='x') # There is probably a more efficient way to do this. all_splits = tf.split( value=x, num_or_size_splits=current_length, axis=split_dim, name=scope) splits = [[] for _ in xrange(num_splits)] for i in xrange(current_length): splits[i % num_splits].append(all_splits[i]) return [tf.concat(s, split_dim) for s in splits]
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _last_index(x, default_dim): """Returns the last dimension's index or default_dim if x has no shape."""
if x.get_shape().ndims is not None: return len(x.get_shape()) - 1 else: return default_dim
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _all_dims(x, default_dims=None): """Returns a list of dims in x or default_dims if the rank is unknown."""
if x.get_shape().ndims is not None: return list(xrange(x.get_shape().ndims)) else: return default_dims
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def he_init(n_inputs, n_outputs, activation_fn, uniform=True): """Sets the parameter initialization using the method described. This method is designed to keep the scale of the gradients roughly the same in all layers with ReLU activations. He et al. (2015): Delving deep into rectifiers: surpassing human-level performance on imageNet classification. International Conference on Computer Vision. For activations other than ReLU and ReLU6, this method uses Xavier initialization as in xavier_init(). Args: n_inputs: The number of input nodes into each output. n_outputs: The number of output nodes for each input. activation_fn: Activation function used in this layer. uniform: If uniform distribution will be used for Xavier initialization. Normal distribution will be used if False. Returns: An initializer. """
def in_relu_family(activation_fn): if isinstance(activation_fn, collections.Sequence): activation_fn = activation_fn[0] return activation_fn in (tf.nn.relu, tf.nn.relu6) if in_relu_family(activation_fn): stddev = math.sqrt(2.0 / n_inputs) # TODO(): Evaluates truncated_normal_initializer. return tf.random_normal_initializer(stddev=stddev) else: return xavier_init(n_inputs, n_outputs, uniform)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def xavier_init(n_inputs, n_outputs, uniform=True): """Set the parameter initialization using the method described. This method is designed to keep the scale of the gradients roughly the same in all layers. Xavier Glorot and Yoshua Bengio (2010): Understanding the difficulty of training deep feedforward neural networks. International conference on artificial intelligence and statistics. Args: n_inputs: The number of input nodes into each output. n_outputs: The number of output nodes for each input. uniform: If true use a uniform distribution, otherwise use a normal. Returns: An initializer. """
if uniform: # 6 was used in the paper. init_range = math.sqrt(6.0 / (n_inputs + n_outputs)) return tf.random_uniform_initializer(-init_range, init_range) else: # 3 gives us approximately the same limits as above since this repicks # values greater than 2 standard deviations from the mean. stddev = math.sqrt(3.0 / (n_inputs + n_outputs)) return tf.truncated_normal_initializer(stddev=stddev)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def spatial_slice_zeros(x): """Experimental summary that shows how many planes are unused for a batch."""
return tf.cast(tf.reduce_all(tf.less_equal(x, 0.0), [0, 1, 2]), tf.float32)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _pool(input_layer, pool_fn, kernel, stride, edges, name): """Applies a pooling function."""
input_layer.get_shape().assert_has_rank(4) if input_layer.get_shape().ndims not in (None, 4): raise ValueError('Pooling requires a rank 4 tensor: %s' % input_layer.get_shape()) kernel = _kernel(kernel) stride = _stride(stride) size = [1, kernel[0], kernel[1], 1] new_head = pool_fn(input_layer.tensor, size, stride, edges, name=name) return input_layer.with_tensor(new_head)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def average_pool(input_layer, kernel, stride, edges=PAD_SAME, name=PROVIDED): """Performs average pooling. `kernel` is the patch that will be pooled and it describes the pooling along each of the 4 dimensions. `stride` is how big to take each step. Because more often than not, pooling is only done on the width and height of the image, the following shorthands are supported: * scalar (e.g. 3): Square pooling on the image (`[b, c, r, d] = [1, 3, 3, 1]`). * singleton list (e.g. [3]): Square pooling on the image (`[b, c, r, d] = [1, 3, 3, 1]`). * list of length 2 (e.g. [3, 2]): Square pooling on the image (`[b, c, r, d] = [1, 3, 2, 1]`). Args: input_layer: The chainable object, supplied. kernel: The size of the patch for the pool, either an int or a length 1 or 2 sequence (if length 1 or int, it is expanded). stride: The strides as a length 1, 2 or 4 sequence or an integer. If an int, length 1 or 2, the stride in the first and last dimensions are 1. edges: Either `pt.PAD_SAME`' or `pt.PAD_VALID` to control the padding. name: The name for this operation is also used to create/find the parameter variables. Returns: Handle to this layer. """
return _pool(input_layer, tf.nn.avg_pool, kernel, stride, edges, name)