id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
224,300
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._create_breadcrumbs
def _create_breadcrumbs(self, relpath): """Create filesystem browsing breadcrumb navigation. That is, make each path segment into a clickable element that takes you to that dir. """ if relpath == '.': breadcrumbs = [] else: path_parts = [os.path.basename(self._root)] + relpath.split(os.path.sep) path_links = ['/'.join(path_parts[1:i + 1]) for i, name in enumerate(path_parts)] breadcrumbs = [{'link_path': link_path, 'name': name} for link_path, name in zip(path_links, path_parts)] return breadcrumbs
python
def _create_breadcrumbs(self, relpath): if relpath == '.': breadcrumbs = [] else: path_parts = [os.path.basename(self._root)] + relpath.split(os.path.sep) path_links = ['/'.join(path_parts[1:i + 1]) for i, name in enumerate(path_parts)] breadcrumbs = [{'link_path': link_path, 'name': name} for link_path, name in zip(path_links, path_parts)] return breadcrumbs
[ "def", "_create_breadcrumbs", "(", "self", ",", "relpath", ")", ":", "if", "relpath", "==", "'.'", ":", "breadcrumbs", "=", "[", "]", "else", ":", "path_parts", "=", "[", "os", ".", "path", ".", "basename", "(", "self", ".", "_root", ")", "]", "+", ...
Create filesystem browsing breadcrumb navigation. That is, make each path segment into a clickable element that takes you to that dir.
[ "Create", "filesystem", "browsing", "breadcrumb", "navigation", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L341-L353
224,301
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._default_template_args
def _default_template_args(self, content_template): """Initialize template args.""" def include(text, args): template_name = pystache.render(text, args) return self._renderer.render_name(template_name, args) # Our base template calls include on the content_template. ret = {'content_template': content_template} ret['include'] = lambda text: include(text, ret) return ret
python
def _default_template_args(self, content_template): def include(text, args): template_name = pystache.render(text, args) return self._renderer.render_name(template_name, args) # Our base template calls include on the content_template. ret = {'content_template': content_template} ret['include'] = lambda text: include(text, ret) return ret
[ "def", "_default_template_args", "(", "self", ",", "content_template", ")", ":", "def", "include", "(", "text", ",", "args", ")", ":", "template_name", "=", "pystache", ".", "render", "(", "text", ",", "args", ")", "return", "self", ".", "_renderer", ".", ...
Initialize template args.
[ "Initialize", "template", "args", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L355-L363
224,302
pantsbuild/pants
src/python/pants/option/arg_splitter.py
ArgSplitter._consume_flags
def _consume_flags(self): """Read flags until we encounter the first token that isn't a flag.""" flags = [] while self._at_flag(): flag = self._unconsumed_args.pop() if not self._check_for_help_request(flag): flags.append(flag) return flags
python
def _consume_flags(self): flags = [] while self._at_flag(): flag = self._unconsumed_args.pop() if not self._check_for_help_request(flag): flags.append(flag) return flags
[ "def", "_consume_flags", "(", "self", ")", ":", "flags", "=", "[", "]", "while", "self", ".", "_at_flag", "(", ")", ":", "flag", "=", "self", ".", "_unconsumed_args", ".", "pop", "(", ")", "if", "not", "self", ".", "_check_for_help_request", "(", "flag...
Read flags until we encounter the first token that isn't a flag.
[ "Read", "flags", "until", "we", "encounter", "the", "first", "token", "that", "isn", "t", "a", "flag", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/option/arg_splitter.py#L232-L239
224,303
pantsbuild/pants
src/python/pants/option/arg_splitter.py
ArgSplitter._descope_flag
def _descope_flag(self, flag, default_scope): """If the flag is prefixed by its scope, in the old style, extract the scope. Otherwise assume it belongs to default_scope. returns a pair (scope, flag). """ for scope_prefix, scope_info in self._known_scoping_prefixes: for flag_prefix in ['--', '--no-']: prefix = flag_prefix + scope_prefix if flag.startswith(prefix): scope = scope_info.scope if scope_info.category == ScopeInfo.SUBSYSTEM and default_scope != GLOBAL_SCOPE: # We allow goal.task --subsystem-foo to refer to the task-level subsystem instance, # i.e., as if qualified by --subsystem-goal-task-foo. # Note that this means that we can't set a task option on the cmd-line if its # name happens to start with a subsystem scope. # TODO: Either fix this or at least detect such options and warn. task_subsystem_scope = '{}.{}'.format(scope_info.scope, default_scope) if task_subsystem_scope in self._known_scopes: # Such a task subsystem actually exists. scope = task_subsystem_scope return scope, flag_prefix + flag[len(prefix):] return default_scope, flag
python
def _descope_flag(self, flag, default_scope): for scope_prefix, scope_info in self._known_scoping_prefixes: for flag_prefix in ['--', '--no-']: prefix = flag_prefix + scope_prefix if flag.startswith(prefix): scope = scope_info.scope if scope_info.category == ScopeInfo.SUBSYSTEM and default_scope != GLOBAL_SCOPE: # We allow goal.task --subsystem-foo to refer to the task-level subsystem instance, # i.e., as if qualified by --subsystem-goal-task-foo. # Note that this means that we can't set a task option on the cmd-line if its # name happens to start with a subsystem scope. # TODO: Either fix this or at least detect such options and warn. task_subsystem_scope = '{}.{}'.format(scope_info.scope, default_scope) if task_subsystem_scope in self._known_scopes: # Such a task subsystem actually exists. scope = task_subsystem_scope return scope, flag_prefix + flag[len(prefix):] return default_scope, flag
[ "def", "_descope_flag", "(", "self", ",", "flag", ",", "default_scope", ")", ":", "for", "scope_prefix", ",", "scope_info", "in", "self", ".", "_known_scoping_prefixes", ":", "for", "flag_prefix", "in", "[", "'--'", ",", "'--no-'", "]", ":", "prefix", "=", ...
If the flag is prefixed by its scope, in the old style, extract the scope. Otherwise assume it belongs to default_scope. returns a pair (scope, flag).
[ "If", "the", "flag", "is", "prefixed", "by", "its", "scope", "in", "the", "old", "style", "extract", "the", "scope", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/option/arg_splitter.py#L241-L263
224,304
pantsbuild/pants
src/python/pants/scm/git.py
Git.detect_worktree
def detect_worktree(cls, binary='git', subdir=None): """Detect the git working tree above cwd and return it; else, return None. :param string binary: The path to the git binary to use, 'git' by default. :param string subdir: The path to start searching for a git repo. :returns: path to the directory where the git working tree is rooted. :rtype: string """ # TODO(John Sirois): This is only used as a factory for a Git instance in # pants.base.build_environment.get_scm, encapsulate in a true factory method. cmd = [binary, 'rev-parse', '--show-toplevel'] try: if subdir: with pushd(subdir): process, out = cls._invoke(cmd) else: process, out = cls._invoke(cmd) cls._check_result(cmd, process.returncode, raise_type=Scm.ScmException) except Scm.ScmException: return None return cls._cleanse(out)
python
def detect_worktree(cls, binary='git', subdir=None): # TODO(John Sirois): This is only used as a factory for a Git instance in # pants.base.build_environment.get_scm, encapsulate in a true factory method. cmd = [binary, 'rev-parse', '--show-toplevel'] try: if subdir: with pushd(subdir): process, out = cls._invoke(cmd) else: process, out = cls._invoke(cmd) cls._check_result(cmd, process.returncode, raise_type=Scm.ScmException) except Scm.ScmException: return None return cls._cleanse(out)
[ "def", "detect_worktree", "(", "cls", ",", "binary", "=", "'git'", ",", "subdir", "=", "None", ")", ":", "# TODO(John Sirois): This is only used as a factory for a Git instance in", "# pants.base.build_environment.get_scm, encapsulate in a true factory method.", "cmd", "=", "[", ...
Detect the git working tree above cwd and return it; else, return None. :param string binary: The path to the git binary to use, 'git' by default. :param string subdir: The path to start searching for a git repo. :returns: path to the directory where the git working tree is rooted. :rtype: string
[ "Detect", "the", "git", "working", "tree", "above", "cwd", "and", "return", "it", ";", "else", "return", "None", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/scm/git.py#L40-L60
224,305
pantsbuild/pants
src/python/pants/scm/git.py
Git.clone
def clone(cls, repo_url, dest, binary='git'): """Clone the repo at repo_url into dest. :param string binary: The path to the git binary to use, 'git' by default. :returns: an instance of this class representing the cloned repo. :rtype: Git """ cmd = [binary, 'clone', repo_url, dest] process, out = cls._invoke(cmd) cls._check_result(cmd, process.returncode) return cls(binary=binary, worktree=dest)
python
def clone(cls, repo_url, dest, binary='git'): cmd = [binary, 'clone', repo_url, dest] process, out = cls._invoke(cmd) cls._check_result(cmd, process.returncode) return cls(binary=binary, worktree=dest)
[ "def", "clone", "(", "cls", ",", "repo_url", ",", "dest", ",", "binary", "=", "'git'", ")", ":", "cmd", "=", "[", "binary", ",", "'clone'", ",", "repo_url", ",", "dest", "]", "process", ",", "out", "=", "cls", ".", "_invoke", "(", "cmd", ")", "cl...
Clone the repo at repo_url into dest. :param string binary: The path to the git binary to use, 'git' by default. :returns: an instance of this class representing the cloned repo. :rtype: Git
[ "Clone", "the", "repo", "at", "repo_url", "into", "dest", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/scm/git.py#L63-L73
224,306
pantsbuild/pants
src/python/pants/scm/git.py
Git._invoke
def _invoke(cls, cmd): """Invoke the given command, and return a tuple of process and raw binary output. stderr flows to wherever its currently mapped for the parent process - generally to the terminal where the user can see the error. :param list cmd: The command in the form of a list of strings :returns: The completed process object and its standard output. :raises: Scm.LocalException if there was a problem exec'ing the command at all. """ try: process = subprocess.Popen(cmd, stdout=subprocess.PIPE) except OSError as e: # Binary DNE or is not executable raise cls.LocalException('Failed to execute command {}: {}'.format(' '.join(cmd), e)) out, _ = process.communicate() return process, out
python
def _invoke(cls, cmd): try: process = subprocess.Popen(cmd, stdout=subprocess.PIPE) except OSError as e: # Binary DNE or is not executable raise cls.LocalException('Failed to execute command {}: {}'.format(' '.join(cmd), e)) out, _ = process.communicate() return process, out
[ "def", "_invoke", "(", "cls", ",", "cmd", ")", ":", "try", ":", "process", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "except", "OSError", "as", "e", ":", "# Binary DNE or is not executable", "raise", ...
Invoke the given command, and return a tuple of process and raw binary output. stderr flows to wherever its currently mapped for the parent process - generally to the terminal where the user can see the error. :param list cmd: The command in the form of a list of strings :returns: The completed process object and its standard output. :raises: Scm.LocalException if there was a problem exec'ing the command at all.
[ "Invoke", "the", "given", "command", "and", "return", "a", "tuple", "of", "process", "and", "raw", "binary", "output", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/scm/git.py#L76-L92
224,307
pantsbuild/pants
src/python/pants/scm/git.py
Git._get_upstream
def _get_upstream(self): """Return the remote and remote merge branch for the current branch""" if not self._remote or not self._branch: branch = self.branch_name if not branch: raise Scm.LocalException('Failed to determine local branch') def get_local_config(key): value = self._check_output(['config', '--local', '--get', key], raise_type=Scm.LocalException) return value.strip() self._remote = self._remote or get_local_config('branch.{}.remote'.format(branch)) self._branch = self._branch or get_local_config('branch.{}.merge'.format(branch)) return self._remote, self._branch
python
def _get_upstream(self): if not self._remote or not self._branch: branch = self.branch_name if not branch: raise Scm.LocalException('Failed to determine local branch') def get_local_config(key): value = self._check_output(['config', '--local', '--get', key], raise_type=Scm.LocalException) return value.strip() self._remote = self._remote or get_local_config('branch.{}.remote'.format(branch)) self._branch = self._branch or get_local_config('branch.{}.merge'.format(branch)) return self._remote, self._branch
[ "def", "_get_upstream", "(", "self", ")", ":", "if", "not", "self", ".", "_remote", "or", "not", "self", ".", "_branch", ":", "branch", "=", "self", ".", "branch_name", "if", "not", "branch", ":", "raise", "Scm", ".", "LocalException", "(", "'Failed to d...
Return the remote and remote merge branch for the current branch
[ "Return", "the", "remote", "and", "remote", "merge", "branch", "for", "the", "current", "branch" ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/scm/git.py#L261-L275
224,308
pantsbuild/pants
src/python/pants/scm/git.py
GitRepositoryReader.listdir
def listdir(self, relpath): """Like os.listdir, but reads from the git repository. :returns: a list of relative filenames """ path = self._realpath(relpath) if not path.endswith('/'): raise self.NotADirException(self.rev, relpath) if path[0] == '/' or path.startswith('../'): return os.listdir(path) tree = self._read_tree(path[:-1]) return list(tree.keys())
python
def listdir(self, relpath): path = self._realpath(relpath) if not path.endswith('/'): raise self.NotADirException(self.rev, relpath) if path[0] == '/' or path.startswith('../'): return os.listdir(path) tree = self._read_tree(path[:-1]) return list(tree.keys())
[ "def", "listdir", "(", "self", ",", "relpath", ")", ":", "path", "=", "self", ".", "_realpath", "(", "relpath", ")", "if", "not", "path", ".", "endswith", "(", "'/'", ")", ":", "raise", "self", ".", "NotADirException", "(", "self", ".", "rev", ",", ...
Like os.listdir, but reads from the git repository. :returns: a list of relative filenames
[ "Like", "os", ".", "listdir", "but", "reads", "from", "the", "git", "repository", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/scm/git.py#L438-L452
224,309
pantsbuild/pants
src/python/pants/scm/git.py
GitRepositoryReader.open
def open(self, relpath): """Read a file out of the repository at a certain revision. This is complicated because, unlike vanilla git cat-file, this follows symlinks in the repo. If a symlink points outside repo, the file is read from the filesystem; that's because presumably whoever put that symlink there knew what they were doing. """ path = self._realpath(relpath) if path.endswith('/'): raise self.IsDirException(self.rev, relpath) if path.startswith('../') or path[0] == '/': yield open(path, 'rb') return object_type, data = self._read_object_from_repo(rev=self.rev, relpath=path) if object_type == b'tree': raise self.IsDirException(self.rev, relpath) assert object_type == b'blob' yield io.BytesIO(data)
python
def open(self, relpath): path = self._realpath(relpath) if path.endswith('/'): raise self.IsDirException(self.rev, relpath) if path.startswith('../') or path[0] == '/': yield open(path, 'rb') return object_type, data = self._read_object_from_repo(rev=self.rev, relpath=path) if object_type == b'tree': raise self.IsDirException(self.rev, relpath) assert object_type == b'blob' yield io.BytesIO(data)
[ "def", "open", "(", "self", ",", "relpath", ")", ":", "path", "=", "self", ".", "_realpath", "(", "relpath", ")", "if", "path", ".", "endswith", "(", "'/'", ")", ":", "raise", "self", ".", "IsDirException", "(", "self", ".", "rev", ",", "relpath", ...
Read a file out of the repository at a certain revision. This is complicated because, unlike vanilla git cat-file, this follows symlinks in the repo. If a symlink points outside repo, the file is read from the filesystem; that's because presumably whoever put that symlink there knew what they were doing.
[ "Read", "a", "file", "out", "of", "the", "repository", "at", "a", "certain", "revision", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/scm/git.py#L455-L475
224,310
pantsbuild/pants
src/python/pants/scm/git.py
GitRepositoryReader._realpath
def _realpath(self, relpath): """Follow symlinks to find the real path to a file or directory in the repo. :returns: if the expanded path points to a file, the relative path to that file; if a directory, the relative path + '/'; if a symlink outside the repo, a path starting with / or ../. """ obj, path_so_far = self._read_object(relpath, MAX_SYMLINKS_IN_REALPATH) if isinstance(obj, self.Symlink): raise self.SymlinkLoopException(self.rev, relpath) return path_so_far
python
def _realpath(self, relpath): obj, path_so_far = self._read_object(relpath, MAX_SYMLINKS_IN_REALPATH) if isinstance(obj, self.Symlink): raise self.SymlinkLoopException(self.rev, relpath) return path_so_far
[ "def", "_realpath", "(", "self", ",", "relpath", ")", ":", "obj", ",", "path_so_far", "=", "self", ".", "_read_object", "(", "relpath", ",", "MAX_SYMLINKS_IN_REALPATH", ")", "if", "isinstance", "(", "obj", ",", "self", ".", "Symlink", ")", ":", "raise", ...
Follow symlinks to find the real path to a file or directory in the repo. :returns: if the expanded path points to a file, the relative path to that file; if a directory, the relative path + '/'; if a symlink outside the repo, a path starting with / or ../.
[ "Follow", "symlinks", "to", "find", "the", "real", "path", "to", "a", "file", "or", "directory", "in", "the", "repo", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/scm/git.py#L478-L488
224,311
pantsbuild/pants
src/python/pants/scm/git.py
GitRepositoryReader._read_tree
def _read_tree(self, path): """Given a revision and path, parse the tree data out of git cat-file output. :returns: a dict from filename -> [list of Symlink, Dir, and File objects] """ path = self._fixup_dot_relative(path) tree = self._trees.get(path) if tree: return tree tree = {} object_type, tree_data = self._read_object_from_repo(rev=self.rev, relpath=path) assert object_type == b'tree' # The tree data here is (mode ' ' filename \0 20-byte-sha)* # It's transformed to a list of byte chars to allow iteration. # See http://python-future.org/compatible_idioms.html#byte-string-literals. tree_data = [bytes([b]) for b in tree_data] i = 0 while i < len(tree_data): start = i while tree_data[i] != b' ': i += 1 mode = b''.join(tree_data[start:i]) i += 1 # skip space start = i while tree_data[i] != NUL: i += 1 name = b''.join(tree_data[start:i]) sha = b''.join(tree_data[i + 1:i + 1 + GIT_HASH_LENGTH]) sha_hex = binascii.hexlify(sha) i += 1 + GIT_HASH_LENGTH if mode == b'120000': tree[name] = self.Symlink(name, sha_hex) elif mode == b'40000': tree[name] = self.Dir(name, sha_hex) else: tree[name] = self.File(name, sha_hex) self._trees[path] = tree return tree
python
def _read_tree(self, path): path = self._fixup_dot_relative(path) tree = self._trees.get(path) if tree: return tree tree = {} object_type, tree_data = self._read_object_from_repo(rev=self.rev, relpath=path) assert object_type == b'tree' # The tree data here is (mode ' ' filename \0 20-byte-sha)* # It's transformed to a list of byte chars to allow iteration. # See http://python-future.org/compatible_idioms.html#byte-string-literals. tree_data = [bytes([b]) for b in tree_data] i = 0 while i < len(tree_data): start = i while tree_data[i] != b' ': i += 1 mode = b''.join(tree_data[start:i]) i += 1 # skip space start = i while tree_data[i] != NUL: i += 1 name = b''.join(tree_data[start:i]) sha = b''.join(tree_data[i + 1:i + 1 + GIT_HASH_LENGTH]) sha_hex = binascii.hexlify(sha) i += 1 + GIT_HASH_LENGTH if mode == b'120000': tree[name] = self.Symlink(name, sha_hex) elif mode == b'40000': tree[name] = self.Dir(name, sha_hex) else: tree[name] = self.File(name, sha_hex) self._trees[path] = tree return tree
[ "def", "_read_tree", "(", "self", ",", "path", ")", ":", "path", "=", "self", ".", "_fixup_dot_relative", "(", "path", ")", "tree", "=", "self", ".", "_trees", ".", "get", "(", "path", ")", "if", "tree", ":", "return", "tree", "tree", "=", "{", "}"...
Given a revision and path, parse the tree data out of git cat-file output. :returns: a dict from filename -> [list of Symlink, Dir, and File objects]
[ "Given", "a", "revision", "and", "path", "parse", "the", "tree", "data", "out", "of", "git", "cat", "-", "file", "output", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/scm/git.py#L558-L597
224,312
pantsbuild/pants
src/python/pants/build_graph/bundle_mixin.py
BundleMixin.register_options
def register_options(cls, register): """Register options common to all bundle tasks.""" super(BundleMixin, cls).register_options(register) register('--archive', choices=list(archive.TYPE_NAMES), fingerprint=True, help='Create an archive of this type from the bundle. ' 'This option is also defined in app target. ' 'Precedence is CLI option > target option > pants.ini option.') # `target.id` ensures global uniqueness, this flag is provided primarily for # backward compatibility. register('--use-basename-prefix', advanced=True, type=bool, help='Use target basename to prefix bundle folder or archive; otherwise a unique ' 'identifier derived from target will be used.')
python
def register_options(cls, register): super(BundleMixin, cls).register_options(register) register('--archive', choices=list(archive.TYPE_NAMES), fingerprint=True, help='Create an archive of this type from the bundle. ' 'This option is also defined in app target. ' 'Precedence is CLI option > target option > pants.ini option.') # `target.id` ensures global uniqueness, this flag is provided primarily for # backward compatibility. register('--use-basename-prefix', advanced=True, type=bool, help='Use target basename to prefix bundle folder or archive; otherwise a unique ' 'identifier derived from target will be used.')
[ "def", "register_options", "(", "cls", ",", "register", ")", ":", "super", "(", "BundleMixin", ",", "cls", ")", ".", "register_options", "(", "register", ")", "register", "(", "'--archive'", ",", "choices", "=", "list", "(", "archive", ".", "TYPE_NAMES", "...
Register options common to all bundle tasks.
[ "Register", "options", "common", "to", "all", "bundle", "tasks", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/bundle_mixin.py#L21-L33
224,313
pantsbuild/pants
src/python/pants/build_graph/bundle_mixin.py
BundleMixin.resolved_option
def resolved_option(options, target, key): """Get value for option "key". Resolution precedence is CLI option > target option > pants.ini option. :param options: Options returned by `task.get_option()` :param target: Target :param key: Key to get using the resolution precedence """ option_value = options.get(key) if not isinstance(target, AppBase) or options.is_flagged(key): return option_value v = target.payload.get_field_value(key, None) return option_value if v is None else v
python
def resolved_option(options, target, key): option_value = options.get(key) if not isinstance(target, AppBase) or options.is_flagged(key): return option_value v = target.payload.get_field_value(key, None) return option_value if v is None else v
[ "def", "resolved_option", "(", "options", ",", "target", ",", "key", ")", ":", "option_value", "=", "options", ".", "get", "(", "key", ")", "if", "not", "isinstance", "(", "target", ",", "AppBase", ")", "or", "options", ".", "is_flagged", "(", "key", "...
Get value for option "key". Resolution precedence is CLI option > target option > pants.ini option. :param options: Options returned by `task.get_option()` :param target: Target :param key: Key to get using the resolution precedence
[ "Get", "value", "for", "option", "key", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/bundle_mixin.py#L43-L56
224,314
pantsbuild/pants
src/python/pants/build_graph/bundle_mixin.py
BundleMixin.symlink_bundles
def symlink_bundles(self, app, bundle_dir): """For each bundle in the given app, symlinks relevant matched paths. Validates that at least one path was matched by a bundle. """ for bundle_counter, bundle in enumerate(app.bundles): count = 0 for path, relpath in bundle.filemap.items(): bundle_path = os.path.join(bundle_dir, relpath) count += 1 if os.path.exists(bundle_path): continue if os.path.isfile(path): safe_mkdir(os.path.dirname(bundle_path)) os.symlink(path, bundle_path) elif os.path.isdir(path): safe_mkdir(bundle_path) if count == 0: raise TargetDefinitionException(app.target, 'Bundle index {} of "bundles" field ' 'does not match any files.'.format(bundle_counter))
python
def symlink_bundles(self, app, bundle_dir): for bundle_counter, bundle in enumerate(app.bundles): count = 0 for path, relpath in bundle.filemap.items(): bundle_path = os.path.join(bundle_dir, relpath) count += 1 if os.path.exists(bundle_path): continue if os.path.isfile(path): safe_mkdir(os.path.dirname(bundle_path)) os.symlink(path, bundle_path) elif os.path.isdir(path): safe_mkdir(bundle_path) if count == 0: raise TargetDefinitionException(app.target, 'Bundle index {} of "bundles" field ' 'does not match any files.'.format(bundle_counter))
[ "def", "symlink_bundles", "(", "self", ",", "app", ",", "bundle_dir", ")", ":", "for", "bundle_counter", ",", "bundle", "in", "enumerate", "(", "app", ".", "bundles", ")", ":", "count", "=", "0", "for", "path", ",", "relpath", "in", "bundle", ".", "fil...
For each bundle in the given app, symlinks relevant matched paths. Validates that at least one path was matched by a bundle.
[ "For", "each", "bundle", "in", "the", "given", "app", "symlinks", "relevant", "matched", "paths", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/bundle_mixin.py#L58-L80
224,315
pantsbuild/pants
src/python/pants/build_graph/bundle_mixin.py
BundleMixin.publish_results
def publish_results(self, dist_dir, use_basename_prefix, vt, bundle_dir, archivepath, id, archive_ext): """Publish a copy of the bundle and archive from the results dir in dist.""" # TODO (from mateor) move distdir management somewhere more general purpose. name = vt.target.basename if use_basename_prefix else id bundle_copy = os.path.join(dist_dir, '{}-bundle'.format(name)) absolute_symlink(bundle_dir, bundle_copy) self.context.log.info( 'created bundle copy {}'.format(os.path.relpath(bundle_copy, get_buildroot()))) if archivepath: ext = archive.archive_extensions.get(archive_ext, archive_ext) archive_copy = os.path.join(dist_dir,'{}.{}'.format(name, ext)) safe_mkdir_for(archive_copy) # Ensure parent dir exists atomic_copy(archivepath, archive_copy) self.context.log.info( 'created archive copy {}'.format(os.path.relpath(archive_copy, get_buildroot())))
python
def publish_results(self, dist_dir, use_basename_prefix, vt, bundle_dir, archivepath, id, archive_ext): # TODO (from mateor) move distdir management somewhere more general purpose. name = vt.target.basename if use_basename_prefix else id bundle_copy = os.path.join(dist_dir, '{}-bundle'.format(name)) absolute_symlink(bundle_dir, bundle_copy) self.context.log.info( 'created bundle copy {}'.format(os.path.relpath(bundle_copy, get_buildroot()))) if archivepath: ext = archive.archive_extensions.get(archive_ext, archive_ext) archive_copy = os.path.join(dist_dir,'{}.{}'.format(name, ext)) safe_mkdir_for(archive_copy) # Ensure parent dir exists atomic_copy(archivepath, archive_copy) self.context.log.info( 'created archive copy {}'.format(os.path.relpath(archive_copy, get_buildroot())))
[ "def", "publish_results", "(", "self", ",", "dist_dir", ",", "use_basename_prefix", ",", "vt", ",", "bundle_dir", ",", "archivepath", ",", "id", ",", "archive_ext", ")", ":", "# TODO (from mateor) move distdir management somewhere more general purpose.", "name", "=", "v...
Publish a copy of the bundle and archive from the results dir in dist.
[ "Publish", "a", "copy", "of", "the", "bundle", "and", "archive", "from", "the", "results", "dir", "in", "dist", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/bundle_mixin.py#L82-L97
224,316
pantsbuild/pants
contrib/node/src/python/pants/contrib/node/tasks/node_resolve.py
NodeResolve.prepare
def prepare(cls, options, round_manager): """Allow each resolver to declare additional product requirements.""" super(NodeResolve, cls).prepare(options, round_manager) for resolver in cls._resolver_by_type.values(): resolver.prepare(options, round_manager)
python
def prepare(cls, options, round_manager): super(NodeResolve, cls).prepare(options, round_manager) for resolver in cls._resolver_by_type.values(): resolver.prepare(options, round_manager)
[ "def", "prepare", "(", "cls", ",", "options", ",", "round_manager", ")", ":", "super", "(", "NodeResolve", ",", "cls", ")", ".", "prepare", "(", "options", ",", "round_manager", ")", "for", "resolver", "in", "cls", ".", "_resolver_by_type", ".", "values", ...
Allow each resolver to declare additional product requirements.
[ "Allow", "each", "resolver", "to", "declare", "additional", "product", "requirements", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/contrib/node/src/python/pants/contrib/node/tasks/node_resolve.py#L78-L82
224,317
pantsbuild/pants
contrib/node/src/python/pants/contrib/node/tasks/node_resolve.py
NodeResolve._topological_sort
def _topological_sort(self, targets): """Topologically order a list of targets""" target_set = set(targets) return [t for t in reversed(sort_targets(targets)) if t in target_set]
python
def _topological_sort(self, targets): target_set = set(targets) return [t for t in reversed(sort_targets(targets)) if t in target_set]
[ "def", "_topological_sort", "(", "self", ",", "targets", ")", ":", "target_set", "=", "set", "(", "targets", ")", "return", "[", "t", "for", "t", "in", "reversed", "(", "sort_targets", "(", "targets", ")", ")", "if", "t", "in", "target_set", "]" ]
Topologically order a list of targets
[ "Topologically", "order", "a", "list", "of", "targets" ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/contrib/node/src/python/pants/contrib/node/tasks/node_resolve.py#L125-L129
224,318
pantsbuild/pants
src/python/pants/util/osutil.py
safe_kill
def safe_kill(pid, signum): """Kill a process with the specified signal, catching nonfatal errors.""" assert(isinstance(pid, IntegerForPid)) assert(isinstance(signum, int)) try: os.kill(pid, signum) except (IOError, OSError) as e: if e.errno in [errno.ESRCH, errno.EPERM]: pass elif e.errno == errno.EINVAL: raise ValueError("Invalid signal number {}: {}" .format(signum, e), e) else: raise
python
def safe_kill(pid, signum): assert(isinstance(pid, IntegerForPid)) assert(isinstance(signum, int)) try: os.kill(pid, signum) except (IOError, OSError) as e: if e.errno in [errno.ESRCH, errno.EPERM]: pass elif e.errno == errno.EINVAL: raise ValueError("Invalid signal number {}: {}" .format(signum, e), e) else: raise
[ "def", "safe_kill", "(", "pid", ",", "signum", ")", ":", "assert", "(", "isinstance", "(", "pid", ",", "IntegerForPid", ")", ")", "assert", "(", "isinstance", "(", "signum", ",", "int", ")", ")", "try", ":", "os", ".", "kill", "(", "pid", ",", "sig...
Kill a process with the specified signal, catching nonfatal errors.
[ "Kill", "a", "process", "with", "the", "specified", "signal", "catching", "nonfatal", "errors", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/util/osutil.py#L73-L87
224,319
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyResolveResult.all_linked_artifacts_exist
def all_linked_artifacts_exist(self): """All of the artifact paths for this resolve point to existing files.""" if not self.has_resolved_artifacts: return False for path in self.resolved_artifact_paths: if not os.path.isfile(path): return False else: return True
python
def all_linked_artifacts_exist(self): if not self.has_resolved_artifacts: return False for path in self.resolved_artifact_paths: if not os.path.isfile(path): return False else: return True
[ "def", "all_linked_artifacts_exist", "(", "self", ")", ":", "if", "not", "self", ".", "has_resolved_artifacts", ":", "return", "False", "for", "path", "in", "self", ".", "resolved_artifact_paths", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "pat...
All of the artifact paths for this resolve point to existing files.
[ "All", "of", "the", "artifact", "paths", "for", "this", "resolve", "point", "to", "existing", "files", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L397-L405
224,320
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyResolveResult.resolved_jars_for_each_target
def resolved_jars_for_each_target(self, conf, targets): """Yields the resolved jars for each passed JarLibrary. If there is no report for the requested conf, yields nothing. :param conf: The ivy conf to load jars for. :param targets: The collection of JarLibrary targets to find resolved jars for. :yield: target, resolved_jars :raises IvyTaskMixin.UnresolvedJarError """ ivy_info = self._ivy_info_for(conf) if not ivy_info: return jar_library_targets = [t for t in targets if isinstance(t, JarLibrary)] ivy_jar_memo = {} for target in jar_library_targets: # Add the artifacts from each dependency module. resolved_jars = self._resolved_jars_with_hardlinks(conf, ivy_info, ivy_jar_memo, self._jar_dependencies_for_target(conf, target), target) yield target, resolved_jars
python
def resolved_jars_for_each_target(self, conf, targets): ivy_info = self._ivy_info_for(conf) if not ivy_info: return jar_library_targets = [t for t in targets if isinstance(t, JarLibrary)] ivy_jar_memo = {} for target in jar_library_targets: # Add the artifacts from each dependency module. resolved_jars = self._resolved_jars_with_hardlinks(conf, ivy_info, ivy_jar_memo, self._jar_dependencies_for_target(conf, target), target) yield target, resolved_jars
[ "def", "resolved_jars_for_each_target", "(", "self", ",", "conf", ",", "targets", ")", ":", "ivy_info", "=", "self", ".", "_ivy_info_for", "(", "conf", ")", "if", "not", "ivy_info", ":", "return", "jar_library_targets", "=", "[", "t", "for", "t", "in", "ta...
Yields the resolved jars for each passed JarLibrary. If there is no report for the requested conf, yields nothing. :param conf: The ivy conf to load jars for. :param targets: The collection of JarLibrary targets to find resolved jars for. :yield: target, resolved_jars :raises IvyTaskMixin.UnresolvedJarError
[ "Yields", "the", "resolved", "jars", "for", "each", "passed", "JarLibrary", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L423-L446
224,321
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyInfo.traverse_dependency_graph
def traverse_dependency_graph(self, ref, collector, memo=None): """Traverses module graph, starting with ref, collecting values for each ref into the sets created by the collector function. :param ref an IvyModuleRef to start traversing the ivy dependency graph :param collector a function that takes a ref and returns a new set of values to collect for that ref, which will also be updated with all the dependencies accumulated values :param memo is a dict of ref -> set that memoizes the results of each node in the graph. If provided, allows for retaining cache across calls. :returns the accumulated set for ref """ resolved_ref = self.refs_by_unversioned_refs.get(ref.unversioned) if resolved_ref: ref = resolved_ref if memo is None: memo = dict() visited = set() return self._do_traverse_dependency_graph(ref, collector, memo, visited)
python
def traverse_dependency_graph(self, ref, collector, memo=None): resolved_ref = self.refs_by_unversioned_refs.get(ref.unversioned) if resolved_ref: ref = resolved_ref if memo is None: memo = dict() visited = set() return self._do_traverse_dependency_graph(ref, collector, memo, visited)
[ "def", "traverse_dependency_graph", "(", "self", ",", "ref", ",", "collector", ",", "memo", "=", "None", ")", ":", "resolved_ref", "=", "self", ".", "refs_by_unversioned_refs", ".", "get", "(", "ref", ".", "unversioned", ")", "if", "resolved_ref", ":", "ref"...
Traverses module graph, starting with ref, collecting values for each ref into the sets created by the collector function. :param ref an IvyModuleRef to start traversing the ivy dependency graph :param collector a function that takes a ref and returns a new set of values to collect for that ref, which will also be updated with all the dependencies accumulated values :param memo is a dict of ref -> set that memoizes the results of each node in the graph. If provided, allows for retaining cache across calls. :returns the accumulated set for ref
[ "Traverses", "module", "graph", "starting", "with", "ref", "collecting", "values", "for", "each", "ref", "into", "the", "sets", "created", "by", "the", "collector", "function", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L632-L650
224,322
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyInfo.get_resolved_jars_for_coordinates
def get_resolved_jars_for_coordinates(self, coordinates, memo=None): """Collects jars for the passed coordinates. Because artifacts are only fetched for the "winning" version of a module, the artifacts will not always represent the version originally declared by the library. This method is transitive within the passed coordinates dependencies. :param coordinates collections.Iterable: Collection of coordinates to collect transitive resolved jars for. :param memo: See `traverse_dependency_graph`. :returns: All the artifacts for all of the jars for the provided coordinates, including transitive dependencies. :rtype: list of :class:`pants.java.jar.ResolvedJar` """ def to_resolved_jar(jar_ref, jar_path): return ResolvedJar(coordinate=M2Coordinate(org=jar_ref.org, name=jar_ref.name, rev=jar_ref.rev, classifier=jar_ref.classifier, ext=jar_ref.ext), cache_path=jar_path) resolved_jars = OrderedSet() def create_collection(dep): return OrderedSet([dep]) for jar in coordinates: classifier = jar.classifier if self._conf == 'default' else self._conf jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev, classifier, jar.ext) for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo): for artifact_path in self._artifacts_by_ref[module_ref.unversioned]: resolved_jars.add(to_resolved_jar(module_ref, artifact_path)) return resolved_jars
python
def get_resolved_jars_for_coordinates(self, coordinates, memo=None): def to_resolved_jar(jar_ref, jar_path): return ResolvedJar(coordinate=M2Coordinate(org=jar_ref.org, name=jar_ref.name, rev=jar_ref.rev, classifier=jar_ref.classifier, ext=jar_ref.ext), cache_path=jar_path) resolved_jars = OrderedSet() def create_collection(dep): return OrderedSet([dep]) for jar in coordinates: classifier = jar.classifier if self._conf == 'default' else self._conf jar_module_ref = IvyModuleRef(jar.org, jar.name, jar.rev, classifier, jar.ext) for module_ref in self.traverse_dependency_graph(jar_module_ref, create_collection, memo): for artifact_path in self._artifacts_by_ref[module_ref.unversioned]: resolved_jars.add(to_resolved_jar(module_ref, artifact_path)) return resolved_jars
[ "def", "get_resolved_jars_for_coordinates", "(", "self", ",", "coordinates", ",", "memo", "=", "None", ")", ":", "def", "to_resolved_jar", "(", "jar_ref", ",", "jar_path", ")", ":", "return", "ResolvedJar", "(", "coordinate", "=", "M2Coordinate", "(", "org", "...
Collects jars for the passed coordinates. Because artifacts are only fetched for the "winning" version of a module, the artifacts will not always represent the version originally declared by the library. This method is transitive within the passed coordinates dependencies. :param coordinates collections.Iterable: Collection of coordinates to collect transitive resolved jars for. :param memo: See `traverse_dependency_graph`. :returns: All the artifacts for all of the jars for the provided coordinates, including transitive dependencies. :rtype: list of :class:`pants.java.jar.ResolvedJar`
[ "Collects", "jars", "for", "the", "passed", "coordinates", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L652-L683
224,323
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyUtils.do_resolve
def do_resolve(cls, executor, extra_args, ivyxml, jvm_options, workdir_report_paths_by_conf, confs, ivy_resolution_cache_dir, ivy_cache_classpath_filename, resolve_hash_name, workunit_factory, workunit_name): """Execute Ivy with the given ivy.xml and copies all relevant files into the workdir. This method does an Ivy resolve, which may be either a Pants resolve or a Pants fetch depending on whether there is an existing frozen resolution. After it is run, the Ivy reports are copied into the workdir at the paths specified by workdir_report_paths_by_conf along with a file containing a list of all the requested artifacts and their transitive dependencies. :param executor: A JVM executor to use to invoke ivy. :param extra_args: Extra arguments to pass to ivy. :param ivyxml: The input ivy.xml containing the dependencies to resolve. :param jvm_options: A list of jvm option strings to use for the ivy invoke, or None. :param workdir_report_paths_by_conf: A dict mapping confs to report paths in the workdir. :param confs: The confs used in the resolve. :param resolve_hash_name: The hash to use as the module name for finding the ivy report file. :param workunit_factory: A workunit factory for the ivy invoke, or None. :param workunit_name: A workunit name for the ivy invoke, or None. """ ivy = Bootstrapper.default_ivy(bootstrap_workunit_factory=workunit_factory) with safe_concurrent_creation(ivy_cache_classpath_filename) as raw_target_classpath_file_tmp: extra_args = extra_args or [] args = ['-cachepath', raw_target_classpath_file_tmp] + extra_args with cls._ivy_lock: cls._exec_ivy(ivy, confs, ivyxml, args, jvm_options=jvm_options, executor=executor, workunit_name=workunit_name, workunit_factory=workunit_factory) if not os.path.exists(raw_target_classpath_file_tmp): raise cls.IvyError('Ivy failed to create classpath file at {}' .format(raw_target_classpath_file_tmp)) cls._copy_ivy_reports(workdir_report_paths_by_conf, confs, ivy_resolution_cache_dir, resolve_hash_name) logger.debug('Moved ivy classfile file to {dest}' .format(dest=ivy_cache_classpath_filename))
python
def do_resolve(cls, executor, extra_args, ivyxml, jvm_options, workdir_report_paths_by_conf, confs, ivy_resolution_cache_dir, ivy_cache_classpath_filename, resolve_hash_name, workunit_factory, workunit_name): ivy = Bootstrapper.default_ivy(bootstrap_workunit_factory=workunit_factory) with safe_concurrent_creation(ivy_cache_classpath_filename) as raw_target_classpath_file_tmp: extra_args = extra_args or [] args = ['-cachepath', raw_target_classpath_file_tmp] + extra_args with cls._ivy_lock: cls._exec_ivy(ivy, confs, ivyxml, args, jvm_options=jvm_options, executor=executor, workunit_name=workunit_name, workunit_factory=workunit_factory) if not os.path.exists(raw_target_classpath_file_tmp): raise cls.IvyError('Ivy failed to create classpath file at {}' .format(raw_target_classpath_file_tmp)) cls._copy_ivy_reports(workdir_report_paths_by_conf, confs, ivy_resolution_cache_dir, resolve_hash_name) logger.debug('Moved ivy classfile file to {dest}' .format(dest=ivy_cache_classpath_filename))
[ "def", "do_resolve", "(", "cls", ",", "executor", ",", "extra_args", ",", "ivyxml", ",", "jvm_options", ",", "workdir_report_paths_by_conf", ",", "confs", ",", "ivy_resolution_cache_dir", ",", "ivy_cache_classpath_filename", ",", "resolve_hash_name", ",", "workunit_fact...
Execute Ivy with the given ivy.xml and copies all relevant files into the workdir. This method does an Ivy resolve, which may be either a Pants resolve or a Pants fetch depending on whether there is an existing frozen resolution. After it is run, the Ivy reports are copied into the workdir at the paths specified by workdir_report_paths_by_conf along with a file containing a list of all the requested artifacts and their transitive dependencies. :param executor: A JVM executor to use to invoke ivy. :param extra_args: Extra arguments to pass to ivy. :param ivyxml: The input ivy.xml containing the dependencies to resolve. :param jvm_options: A list of jvm option strings to use for the ivy invoke, or None. :param workdir_report_paths_by_conf: A dict mapping confs to report paths in the workdir. :param confs: The confs used in the resolve. :param resolve_hash_name: The hash to use as the module name for finding the ivy report file. :param workunit_factory: A workunit factory for the ivy invoke, or None. :param workunit_name: A workunit name for the ivy invoke, or None.
[ "Execute", "Ivy", "with", "the", "given", "ivy", ".", "xml", "and", "copies", "all", "relevant", "files", "into", "the", "workdir", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L732-L774
224,324
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyUtils._hardlink_cachepath
def _hardlink_cachepath(cls, ivy_repository_cache_dir, inpath, hardlink_dir, outpath): """hardlinks all paths listed in inpath that are under ivy_repository_cache_dir into hardlink_dir. If there is an existing hardlink for a file under inpath, it is used rather than creating a new hardlink. Preserves all other paths. Writes the resulting paths to outpath. Returns a map of path -> hardlink to that path. """ safe_mkdir(hardlink_dir) # The ivy_repository_cache_dir might itself be a hardlink. In this case, ivy may return paths that # reference the realpath of the .jar file after it is resolved in the cache dir. To handle # this case, add both the hardlink'ed path and the realpath to the jar to the hardlink map. real_ivy_cache_dir = os.path.realpath(ivy_repository_cache_dir) hardlink_map = OrderedDict() inpaths = cls._load_classpath_from_cachepath(inpath) paths = OrderedSet([os.path.realpath(path) for path in inpaths]) for path in paths: if path.startswith(real_ivy_cache_dir): hardlink_map[path] = os.path.join(hardlink_dir, os.path.relpath(path, real_ivy_cache_dir)) else: # This path is outside the cache. We won't hardlink it. hardlink_map[path] = path # Create hardlinks for paths in the ivy cache dir. for path, hardlink in six.iteritems(hardlink_map): if path == hardlink: # Skip paths that aren't going to be hardlinked. continue safe_mkdir(os.path.dirname(hardlink)) safe_hardlink_or_copy(path, hardlink) # (re)create the classpath with all of the paths with safe_open(outpath, 'w') as outfile: outfile.write(':'.join(OrderedSet(hardlink_map.values()))) return dict(hardlink_map)
python
def _hardlink_cachepath(cls, ivy_repository_cache_dir, inpath, hardlink_dir, outpath): safe_mkdir(hardlink_dir) # The ivy_repository_cache_dir might itself be a hardlink. In this case, ivy may return paths that # reference the realpath of the .jar file after it is resolved in the cache dir. To handle # this case, add both the hardlink'ed path and the realpath to the jar to the hardlink map. real_ivy_cache_dir = os.path.realpath(ivy_repository_cache_dir) hardlink_map = OrderedDict() inpaths = cls._load_classpath_from_cachepath(inpath) paths = OrderedSet([os.path.realpath(path) for path in inpaths]) for path in paths: if path.startswith(real_ivy_cache_dir): hardlink_map[path] = os.path.join(hardlink_dir, os.path.relpath(path, real_ivy_cache_dir)) else: # This path is outside the cache. We won't hardlink it. hardlink_map[path] = path # Create hardlinks for paths in the ivy cache dir. for path, hardlink in six.iteritems(hardlink_map): if path == hardlink: # Skip paths that aren't going to be hardlinked. continue safe_mkdir(os.path.dirname(hardlink)) safe_hardlink_or_copy(path, hardlink) # (re)create the classpath with all of the paths with safe_open(outpath, 'w') as outfile: outfile.write(':'.join(OrderedSet(hardlink_map.values()))) return dict(hardlink_map)
[ "def", "_hardlink_cachepath", "(", "cls", ",", "ivy_repository_cache_dir", ",", "inpath", ",", "hardlink_dir", ",", "outpath", ")", ":", "safe_mkdir", "(", "hardlink_dir", ")", "# The ivy_repository_cache_dir might itself be a hardlink. In this case, ivy may return paths that", ...
hardlinks all paths listed in inpath that are under ivy_repository_cache_dir into hardlink_dir. If there is an existing hardlink for a file under inpath, it is used rather than creating a new hardlink. Preserves all other paths. Writes the resulting paths to outpath. Returns a map of path -> hardlink to that path.
[ "hardlinks", "all", "paths", "listed", "in", "inpath", "that", "are", "under", "ivy_repository_cache_dir", "into", "hardlink_dir", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L833-L869
224,325
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyUtils.xml_report_path
def xml_report_path(cls, resolution_cache_dir, resolve_hash_name, conf): """The path to the xml report ivy creates after a retrieve. :API: public :param string cache_dir: The path of the ivy cache dir used for resolves. :param string resolve_hash_name: Hash from the Cache key from the VersionedTargetSet used for resolution. :param string conf: The ivy conf name (e.g. "default"). :returns: The report path. :rtype: string """ return os.path.join(resolution_cache_dir, '{}-{}-{}.xml'.format(IvyUtils.INTERNAL_ORG_NAME, resolve_hash_name, conf))
python
def xml_report_path(cls, resolution_cache_dir, resolve_hash_name, conf): return os.path.join(resolution_cache_dir, '{}-{}-{}.xml'.format(IvyUtils.INTERNAL_ORG_NAME, resolve_hash_name, conf))
[ "def", "xml_report_path", "(", "cls", ",", "resolution_cache_dir", ",", "resolve_hash_name", ",", "conf", ")", ":", "return", "os", ".", "path", ".", "join", "(", "resolution_cache_dir", ",", "'{}-{}-{}.xml'", ".", "format", "(", "IvyUtils", ".", "INTERNAL_ORG_N...
The path to the xml report ivy creates after a retrieve. :API: public :param string cache_dir: The path of the ivy cache dir used for resolves. :param string resolve_hash_name: Hash from the Cache key from the VersionedTargetSet used for resolution. :param string conf: The ivy conf name (e.g. "default"). :returns: The report path. :rtype: string
[ "The", "path", "to", "the", "xml", "report", "ivy", "creates", "after", "a", "retrieve", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L872-L885
224,326
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyUtils.parse_xml_report
def parse_xml_report(cls, conf, path): """Parse the ivy xml report corresponding to the name passed to ivy. :API: public :param string conf: the ivy conf name (e.g. "default") :param string path: The path to the ivy report file. :returns: The info in the xml report. :rtype: :class:`IvyInfo` :raises: :class:`IvyResolveMappingError` if no report exists. """ if not os.path.exists(path): raise cls.IvyResolveReportError('Missing expected ivy output file {}'.format(path)) logger.debug("Parsing ivy report {}".format(path)) ret = IvyInfo(conf) etree = ET.parse(path) doc = etree.getroot() for module in doc.findall('dependencies/module'): org = module.get('organisation') name = module.get('name') for revision in module.findall('revision'): rev = revision.get('name') callers = [] for caller in revision.findall('caller'): callers.append(IvyModuleRef(caller.get('organisation'), caller.get('name'), caller.get('callerrev'))) for artifact in revision.findall('artifacts/artifact'): classifier = artifact.get('extra-classifier') ext = artifact.get('ext') ivy_module_ref = IvyModuleRef(org=org, name=name, rev=rev, classifier=classifier, ext=ext) artifact_cache_path = artifact.get('location') ivy_module = IvyModule(ivy_module_ref, artifact_cache_path, tuple(callers)) ret.add_module(ivy_module) return ret
python
def parse_xml_report(cls, conf, path): if not os.path.exists(path): raise cls.IvyResolveReportError('Missing expected ivy output file {}'.format(path)) logger.debug("Parsing ivy report {}".format(path)) ret = IvyInfo(conf) etree = ET.parse(path) doc = etree.getroot() for module in doc.findall('dependencies/module'): org = module.get('organisation') name = module.get('name') for revision in module.findall('revision'): rev = revision.get('name') callers = [] for caller in revision.findall('caller'): callers.append(IvyModuleRef(caller.get('organisation'), caller.get('name'), caller.get('callerrev'))) for artifact in revision.findall('artifacts/artifact'): classifier = artifact.get('extra-classifier') ext = artifact.get('ext') ivy_module_ref = IvyModuleRef(org=org, name=name, rev=rev, classifier=classifier, ext=ext) artifact_cache_path = artifact.get('location') ivy_module = IvyModule(ivy_module_ref, artifact_cache_path, tuple(callers)) ret.add_module(ivy_module) return ret
[ "def", "parse_xml_report", "(", "cls", ",", "conf", ",", "path", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "raise", "cls", ".", "IvyResolveReportError", "(", "'Missing expected ivy output file {}'", ".", "format", "(", ...
Parse the ivy xml report corresponding to the name passed to ivy. :API: public :param string conf: the ivy conf name (e.g. "default") :param string path: The path to the ivy report file. :returns: The info in the xml report. :rtype: :class:`IvyInfo` :raises: :class:`IvyResolveMappingError` if no report exists.
[ "Parse", "the", "ivy", "xml", "report", "corresponding", "to", "the", "name", "passed", "to", "ivy", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L888-L927
224,327
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyUtils.generate_fetch_ivy
def generate_fetch_ivy(cls, jars, ivyxml, confs, resolve_hash_name): """Generates an ivy xml with all jars marked as intransitive using the all conflict manager.""" org = IvyUtils.INTERNAL_ORG_NAME name = resolve_hash_name extra_configurations = [conf for conf in confs if conf and conf != 'default'] # Use org name _and_ rev so that we can have dependencies with different versions. This will # allow for batching fetching if we want to do that. jars_by_key = OrderedDict() for jar in jars: jars_by_key.setdefault((jar.org, jar.name, jar.rev), []).append(jar) dependencies = [cls._generate_fetch_jar_template(_jars) for _jars in jars_by_key.values()] template_data = TemplateData(org=org, module=name, extra_configurations=extra_configurations, dependencies=dependencies) template_relpath = os.path.join('templates', 'ivy_utils', 'ivy_fetch.xml.mustache') cls._write_ivy_xml_file(ivyxml, template_data, template_relpath)
python
def generate_fetch_ivy(cls, jars, ivyxml, confs, resolve_hash_name): org = IvyUtils.INTERNAL_ORG_NAME name = resolve_hash_name extra_configurations = [conf for conf in confs if conf and conf != 'default'] # Use org name _and_ rev so that we can have dependencies with different versions. This will # allow for batching fetching if we want to do that. jars_by_key = OrderedDict() for jar in jars: jars_by_key.setdefault((jar.org, jar.name, jar.rev), []).append(jar) dependencies = [cls._generate_fetch_jar_template(_jars) for _jars in jars_by_key.values()] template_data = TemplateData(org=org, module=name, extra_configurations=extra_configurations, dependencies=dependencies) template_relpath = os.path.join('templates', 'ivy_utils', 'ivy_fetch.xml.mustache') cls._write_ivy_xml_file(ivyxml, template_data, template_relpath)
[ "def", "generate_fetch_ivy", "(", "cls", ",", "jars", ",", "ivyxml", ",", "confs", ",", "resolve_hash_name", ")", ":", "org", "=", "IvyUtils", ".", "INTERNAL_ORG_NAME", "name", "=", "resolve_hash_name", "extra_configurations", "=", "[", "conf", "for", "conf", ...
Generates an ivy xml with all jars marked as intransitive using the all conflict manager.
[ "Generates", "an", "ivy", "xml", "with", "all", "jars", "marked", "as", "intransitive", "using", "the", "all", "conflict", "manager", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L995-L1017
224,328
pantsbuild/pants
src/python/pants/backend/jvm/ivy_utils.py
IvyUtils.calculate_classpath
def calculate_classpath(cls, targets): """Creates a consistent classpath and list of excludes for the passed targets. It also modifies the JarDependency objects' excludes to contain all the jars excluded by provides. :param iterable targets: List of targets to collect JarDependencies and excludes from. :returns: A pair of a list of JarDependencies, and a set of excludes to apply globally. """ jars = OrderedDict() global_excludes = set() provide_excludes = set() targets_processed = set() # Support the ivy force concept when we sanely can for internal dep conflicts. # TODO(John Sirois): Consider supporting / implementing the configured ivy revision picking # strategy generally. def add_jar(jar): # TODO(John Sirois): Maven allows for depending on an artifact at one rev and one of its # attachments (classified artifacts) at another. Ivy does not, allow this, the dependency # can carry only 1 rev and that hosts multiple artifacts for that rev. This conflict # resolution happens at the classifier level, allowing skew in a # multi-artifact/multi-classifier dependency. We only find out about the skew later in # `_generate_jar_template` below which will blow up with a conflict. Move this logic closer # together to get a more clear validate, then emit ivy.xml then resolve flow instead of the # spread-out validations happening here. # See: https://github.com/pantsbuild/pants/issues/2239 coordinate = (jar.org, jar.name, jar.classifier) existing = jars.get(coordinate) jars[coordinate] = jar if not existing else cls._resolve_conflict(existing=existing, proposed=jar) def collect_jars(target): if isinstance(target, JarLibrary): for jar in target.jar_dependencies: add_jar(jar) def collect_excludes(target): target_excludes = target.payload.get_field_value('excludes') if target_excludes: global_excludes.update(target_excludes) def collect_provide_excludes(target): if not (isinstance(target, ExportableJvmLibrary) and target.provides): return logger.debug('Automatically excluding jar {}.{}, which is provided by {}'.format( target.provides.org, target.provides.name, target)) provide_excludes.add(Exclude(org=target.provides.org, name=target.provides.name)) def collect_elements(target): targets_processed.add(target) collect_jars(target) collect_excludes(target) collect_provide_excludes(target) for target in targets: target.walk(collect_elements, predicate=lambda target: target not in targets_processed) # If a source dep is exported (ie, has a provides clause), it should always override # remote/binary versions of itself, ie "round trip" dependencies. # TODO: Move back to applying provides excludes as target-level excludes when they are no # longer global. if provide_excludes: additional_excludes = tuple(provide_excludes) new_jars = OrderedDict() for coordinate, jar in jars.items(): new_jars[coordinate] = jar.copy(excludes=jar.excludes + additional_excludes) jars = new_jars return list(jars.values()), global_excludes
python
def calculate_classpath(cls, targets): jars = OrderedDict() global_excludes = set() provide_excludes = set() targets_processed = set() # Support the ivy force concept when we sanely can for internal dep conflicts. # TODO(John Sirois): Consider supporting / implementing the configured ivy revision picking # strategy generally. def add_jar(jar): # TODO(John Sirois): Maven allows for depending on an artifact at one rev and one of its # attachments (classified artifacts) at another. Ivy does not, allow this, the dependency # can carry only 1 rev and that hosts multiple artifacts for that rev. This conflict # resolution happens at the classifier level, allowing skew in a # multi-artifact/multi-classifier dependency. We only find out about the skew later in # `_generate_jar_template` below which will blow up with a conflict. Move this logic closer # together to get a more clear validate, then emit ivy.xml then resolve flow instead of the # spread-out validations happening here. # See: https://github.com/pantsbuild/pants/issues/2239 coordinate = (jar.org, jar.name, jar.classifier) existing = jars.get(coordinate) jars[coordinate] = jar if not existing else cls._resolve_conflict(existing=existing, proposed=jar) def collect_jars(target): if isinstance(target, JarLibrary): for jar in target.jar_dependencies: add_jar(jar) def collect_excludes(target): target_excludes = target.payload.get_field_value('excludes') if target_excludes: global_excludes.update(target_excludes) def collect_provide_excludes(target): if not (isinstance(target, ExportableJvmLibrary) and target.provides): return logger.debug('Automatically excluding jar {}.{}, which is provided by {}'.format( target.provides.org, target.provides.name, target)) provide_excludes.add(Exclude(org=target.provides.org, name=target.provides.name)) def collect_elements(target): targets_processed.add(target) collect_jars(target) collect_excludes(target) collect_provide_excludes(target) for target in targets: target.walk(collect_elements, predicate=lambda target: target not in targets_processed) # If a source dep is exported (ie, has a provides clause), it should always override # remote/binary versions of itself, ie "round trip" dependencies. # TODO: Move back to applying provides excludes as target-level excludes when they are no # longer global. if provide_excludes: additional_excludes = tuple(provide_excludes) new_jars = OrderedDict() for coordinate, jar in jars.items(): new_jars[coordinate] = jar.copy(excludes=jar.excludes + additional_excludes) jars = new_jars return list(jars.values()), global_excludes
[ "def", "calculate_classpath", "(", "cls", ",", "targets", ")", ":", "jars", "=", "OrderedDict", "(", ")", "global_excludes", "=", "set", "(", ")", "provide_excludes", "=", "set", "(", ")", "targets_processed", "=", "set", "(", ")", "# Support the ivy force con...
Creates a consistent classpath and list of excludes for the passed targets. It also modifies the JarDependency objects' excludes to contain all the jars excluded by provides. :param iterable targets: List of targets to collect JarDependencies and excludes from. :returns: A pair of a list of JarDependencies, and a set of excludes to apply globally.
[ "Creates", "a", "consistent", "classpath", "and", "list", "of", "excludes", "for", "the", "passed", "targets", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ivy_utils.py#L1027-L1097
224,329
pantsbuild/pants
src/python/pants/build_graph/mirrored_target_option_mixin.py
MirroredTargetOptionMixin.get_scalar_mirrored_target_option
def get_scalar_mirrored_target_option(self, option_name, target): """Get the attribute `field_name` from `target` if set, else from this subsystem's options.""" mirrored_option_declaration = self._mirrored_option_declarations[option_name] return mirrored_option_declaration.get_mirrored_scalar_option_value(target)
python
def get_scalar_mirrored_target_option(self, option_name, target): mirrored_option_declaration = self._mirrored_option_declarations[option_name] return mirrored_option_declaration.get_mirrored_scalar_option_value(target)
[ "def", "get_scalar_mirrored_target_option", "(", "self", ",", "option_name", ",", "target", ")", ":", "mirrored_option_declaration", "=", "self", ".", "_mirrored_option_declarations", "[", "option_name", "]", "return", "mirrored_option_declaration", ".", "get_mirrored_scala...
Get the attribute `field_name` from `target` if set, else from this subsystem's options.
[ "Get", "the", "attribute", "field_name", "from", "target", "if", "set", "else", "from", "this", "subsystem", "s", "options", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/mirrored_target_option_mixin.py#L100-L103
224,330
pantsbuild/pants
src/python/pants/subsystem/subsystem.py
Subsystem.scoped
def scoped(cls, optionable, removal_version=None, removal_hint=None): """Returns a dependency on this subsystem, scoped to `optionable`. :param removal_version: An optional deprecation version for this scoped Subsystem dependency. :param removal_hint: An optional hint to accompany a deprecation removal_version. Return value is suitable for use in SubsystemClientMixin.subsystem_dependencies(). """ return SubsystemDependency(cls, optionable.options_scope, removal_version, removal_hint)
python
def scoped(cls, optionable, removal_version=None, removal_hint=None): return SubsystemDependency(cls, optionable.options_scope, removal_version, removal_hint)
[ "def", "scoped", "(", "cls", ",", "optionable", ",", "removal_version", "=", "None", ",", "removal_hint", "=", "None", ")", ":", "return", "SubsystemDependency", "(", "cls", ",", "optionable", ".", "options_scope", ",", "removal_version", ",", "removal_hint", ...
Returns a dependency on this subsystem, scoped to `optionable`. :param removal_version: An optional deprecation version for this scoped Subsystem dependency. :param removal_hint: An optional hint to accompany a deprecation removal_version. Return value is suitable for use in SubsystemClientMixin.subsystem_dependencies().
[ "Returns", "a", "dependency", "on", "this", "subsystem", "scoped", "to", "optionable", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/subsystem/subsystem.py#L53-L61
224,331
pantsbuild/pants
src/python/pants/subsystem/subsystem.py
Subsystem.scoped_instance
def scoped_instance(cls, optionable): """Returns an instance of this subsystem for exclusive use by the given `optionable`. :API: public :param optionable: An optionable type or instance to scope this subsystem under. :type: :class:`pants.option.optionable.Optionable` :returns: The scoped subsystem instance. :rtype: :class:`pants.subsystem.subsystem.Subsystem` """ if not isinstance(optionable, Optionable) and not issubclass(optionable, Optionable): raise TypeError('Can only scope an instance against an Optionable, given {} of type {}.' .format(optionable, type(optionable))) return cls._instance_for_scope(cls.subscope(optionable.options_scope))
python
def scoped_instance(cls, optionable): if not isinstance(optionable, Optionable) and not issubclass(optionable, Optionable): raise TypeError('Can only scope an instance against an Optionable, given {} of type {}.' .format(optionable, type(optionable))) return cls._instance_for_scope(cls.subscope(optionable.options_scope))
[ "def", "scoped_instance", "(", "cls", ",", "optionable", ")", ":", "if", "not", "isinstance", "(", "optionable", ",", "Optionable", ")", "and", "not", "issubclass", "(", "optionable", ",", "Optionable", ")", ":", "raise", "TypeError", "(", "'Can only scope an ...
Returns an instance of this subsystem for exclusive use by the given `optionable`. :API: public :param optionable: An optionable type or instance to scope this subsystem under. :type: :class:`pants.option.optionable.Optionable` :returns: The scoped subsystem instance. :rtype: :class:`pants.subsystem.subsystem.Subsystem`
[ "Returns", "an", "instance", "of", "this", "subsystem", "for", "exclusive", "use", "by", "the", "given", "optionable", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/subsystem/subsystem.py#L98-L111
224,332
pantsbuild/pants
src/python/pants/pantsd/service/pants_service.py
_ServiceState.await_paused
def await_paused(self, timeout=None): """Blocks until the service is in the Paused state, then returns True. If a timeout is specified, the method may return False to indicate a timeout: with no timeout it will always (eventually) return True. Raises if the service is not currently in the Pausing state. """ deadline = time.time() + timeout if timeout else None with self._lock: # Wait until the service transitions out of Pausing. while self._state != self._PAUSED: if self._state != self._PAUSING: raise AssertionError('Cannot wait for {} to reach `{}` while it is in `{}`.'.format(self, self._PAUSED, self._state)) timeout = deadline - time.time() if deadline else None if timeout and timeout <= 0: return False self._condition.wait(timeout=timeout) return True
python
def await_paused(self, timeout=None): deadline = time.time() + timeout if timeout else None with self._lock: # Wait until the service transitions out of Pausing. while self._state != self._PAUSED: if self._state != self._PAUSING: raise AssertionError('Cannot wait for {} to reach `{}` while it is in `{}`.'.format(self, self._PAUSED, self._state)) timeout = deadline - time.time() if deadline else None if timeout and timeout <= 0: return False self._condition.wait(timeout=timeout) return True
[ "def", "await_paused", "(", "self", ",", "timeout", "=", "None", ")", ":", "deadline", "=", "time", ".", "time", "(", ")", "+", "timeout", "if", "timeout", "else", "None", "with", "self", ".", "_lock", ":", "# Wait until the service transitions out of Pausing....
Blocks until the service is in the Paused state, then returns True. If a timeout is specified, the method may return False to indicate a timeout: with no timeout it will always (eventually) return True. Raises if the service is not currently in the Pausing state.
[ "Blocks", "until", "the", "service", "is", "in", "the", "Paused", "state", "then", "returns", "True", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/pantsd/service/pants_service.py#L137-L155
224,333
pantsbuild/pants
src/python/pants/pantsd/service/pants_service.py
_ServiceState.maybe_pause
def maybe_pause(self, timeout=None): """Called by the service to indicate that it is pausable. If the service calls this method while the state is `Pausing`, the state will transition to `Paused`, and the service will block here until it is marked `Running` or `Terminating`. If the state is not currently `Pausing`, and a timeout is not passed, this method returns immediately. If a timeout is passed, this method blocks up to that number of seconds to wait to transition to `Pausing`. """ deadline = time.time() + timeout if timeout else None with self._lock: while self._state != self._PAUSING: # If we've been terminated, or the deadline has passed, return. timeout = deadline - time.time() if deadline else None if self._state == self._TERMINATING or not timeout or timeout <= 0: return # Otherwise, wait for the state to change. self._condition.wait(timeout=timeout) # Set Paused, and then wait until we are no longer Paused. self._set_state(self._PAUSED, self._PAUSING) while self._state == self._PAUSED: self._condition.wait()
python
def maybe_pause(self, timeout=None): deadline = time.time() + timeout if timeout else None with self._lock: while self._state != self._PAUSING: # If we've been terminated, or the deadline has passed, return. timeout = deadline - time.time() if deadline else None if self._state == self._TERMINATING or not timeout or timeout <= 0: return # Otherwise, wait for the state to change. self._condition.wait(timeout=timeout) # Set Paused, and then wait until we are no longer Paused. self._set_state(self._PAUSED, self._PAUSING) while self._state == self._PAUSED: self._condition.wait()
[ "def", "maybe_pause", "(", "self", ",", "timeout", "=", "None", ")", ":", "deadline", "=", "time", ".", "time", "(", ")", "+", "timeout", "if", "timeout", "else", "None", "with", "self", ".", "_lock", ":", "while", "self", ".", "_state", "!=", "self"...
Called by the service to indicate that it is pausable. If the service calls this method while the state is `Pausing`, the state will transition to `Paused`, and the service will block here until it is marked `Running` or `Terminating`. If the state is not currently `Pausing`, and a timeout is not passed, this method returns immediately. If a timeout is passed, this method blocks up to that number of seconds to wait to transition to `Pausing`.
[ "Called", "by", "the", "service", "to", "indicate", "that", "it", "is", "pausable", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/pantsd/service/pants_service.py#L157-L180
224,334
pantsbuild/pants
src/python/pants/pantsd/service/pants_service.py
_ServiceState.mark_pausing
def mark_pausing(self): """Requests that the service move to the Paused state, without waiting for it to do so. Raises if the service is not currently in the Running state. """ with self._lock: self._set_state(self._PAUSING, self._RUNNING)
python
def mark_pausing(self): with self._lock: self._set_state(self._PAUSING, self._RUNNING)
[ "def", "mark_pausing", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "self", ".", "_set_state", "(", "self", ".", "_PAUSING", ",", "self", ".", "_RUNNING", ")" ]
Requests that the service move to the Paused state, without waiting for it to do so. Raises if the service is not currently in the Running state.
[ "Requests", "that", "the", "service", "move", "to", "the", "Paused", "state", "without", "waiting", "for", "it", "to", "do", "so", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/pantsd/service/pants_service.py#L182-L188
224,335
pantsbuild/pants
src/python/pants/pantsd/service/pants_service.py
_ServiceState.mark_running
def mark_running(self): """Moves the service to the Running state. Raises if the service is not currently in the Paused state. """ with self._lock: self._set_state(self._RUNNING, self._PAUSED)
python
def mark_running(self): with self._lock: self._set_state(self._RUNNING, self._PAUSED)
[ "def", "mark_running", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "self", ".", "_set_state", "(", "self", ".", "_RUNNING", ",", "self", ".", "_PAUSED", ")" ]
Moves the service to the Running state. Raises if the service is not currently in the Paused state.
[ "Moves", "the", "service", "to", "the", "Running", "state", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/pantsd/service/pants_service.py#L190-L196
224,336
pantsbuild/pants
src/python/pants/engine/struct.py
Struct.kwargs
def kwargs(self): """Returns a dict of the kwargs for this Struct which were not interpreted by the baseclass. This excludes fields like `extends`, `merges`, and `abstract`, which are consumed by SerializableFactory.create and Validatable.validate. """ return {k: v for k, v in self._kwargs.items() if k not in self._INTERNAL_FIELDS}
python
def kwargs(self): return {k: v for k, v in self._kwargs.items() if k not in self._INTERNAL_FIELDS}
[ "def", "kwargs", "(", "self", ")", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "self", ".", "_kwargs", ".", "items", "(", ")", "if", "k", "not", "in", "self", ".", "_INTERNAL_FIELDS", "}" ]
Returns a dict of the kwargs for this Struct which were not interpreted by the baseclass. This excludes fields like `extends`, `merges`, and `abstract`, which are consumed by SerializableFactory.create and Validatable.validate.
[ "Returns", "a", "dict", "of", "the", "kwargs", "for", "this", "Struct", "which", "were", "not", "interpreted", "by", "the", "baseclass", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/engine/struct.py#L95-L101
224,337
pantsbuild/pants
src/python/pants/engine/struct.py
Struct.type_alias
def type_alias(self): """Return the type alias this target was constructed via. For a target read from a BUILD file, this will be target alias, like 'java_library'. For a target constructed in memory, this will be the simple class name, like 'JavaLibrary'. The end result is that the type alias should be the most natural way to refer to this target's type to the author of the target instance. :rtype: string """ type_alias = self._kwargs.get(self._TYPE_ALIAS_FIELD, None) return type_alias if type_alias is not None else type(self).__name__
python
def type_alias(self): type_alias = self._kwargs.get(self._TYPE_ALIAS_FIELD, None) return type_alias if type_alias is not None else type(self).__name__
[ "def", "type_alias", "(", "self", ")", ":", "type_alias", "=", "self", ".", "_kwargs", ".", "get", "(", "self", ".", "_TYPE_ALIAS_FIELD", ",", "None", ")", "return", "type_alias", "if", "type_alias", "is", "not", "None", "else", "type", "(", "self", ")",...
Return the type alias this target was constructed via. For a target read from a BUILD file, this will be target alias, like 'java_library'. For a target constructed in memory, this will be the simple class name, like 'JavaLibrary'. The end result is that the type alias should be the most natural way to refer to this target's type to the author of the target instance. :rtype: string
[ "Return", "the", "type", "alias", "this", "target", "was", "constructed", "via", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/engine/struct.py#L129-L141
224,338
google/tangent
tangent/compile.py
compile_file
def compile_file(source, globals_=None): """Compile by saving to file and importing that. Compiling the AST/source code this way ensures that the source code is readable by e.g. `pdb` or `inspect`. Args: source: The code to compile, either as a string or as an AST. globals_: A dictionary of variables that should be available as globals in the compiled module. They will be monkey patched after importing the module. Returns: A module object containing the compiled source code. """ if isinstance(source, gast.AST): source = quoting.to_source(source) # Write source to temporary file tempdir = tempfile.mkdtemp() uuid = str(uuid4().hex[:4]) tmpname = os.path.join(tempdir, 'tangent_%s.py' % uuid) with open(tmpname, 'w') as f: f.write(source) # Load the temporary file as a module module_name = 'tangent_%s' % uuid if six.PY3: spec = util.spec_from_file_location(module_name, tmpname) m = util.module_from_spec(spec) spec.loader.exec_module(m) else: m = imp.load_source(module_name, tmpname) # Update the modules namespace if globals_: m.__dict__.update(globals_) return m
python
def compile_file(source, globals_=None): if isinstance(source, gast.AST): source = quoting.to_source(source) # Write source to temporary file tempdir = tempfile.mkdtemp() uuid = str(uuid4().hex[:4]) tmpname = os.path.join(tempdir, 'tangent_%s.py' % uuid) with open(tmpname, 'w') as f: f.write(source) # Load the temporary file as a module module_name = 'tangent_%s' % uuid if six.PY3: spec = util.spec_from_file_location(module_name, tmpname) m = util.module_from_spec(spec) spec.loader.exec_module(m) else: m = imp.load_source(module_name, tmpname) # Update the modules namespace if globals_: m.__dict__.update(globals_) return m
[ "def", "compile_file", "(", "source", ",", "globals_", "=", "None", ")", ":", "if", "isinstance", "(", "source", ",", "gast", ".", "AST", ")", ":", "source", "=", "quoting", ".", "to_source", "(", "source", ")", "# Write source to temporary file", "tempdir",...
Compile by saving to file and importing that. Compiling the AST/source code this way ensures that the source code is readable by e.g. `pdb` or `inspect`. Args: source: The code to compile, either as a string or as an AST. globals_: A dictionary of variables that should be available as globals in the compiled module. They will be monkey patched after importing the module. Returns: A module object containing the compiled source code.
[ "Compile", "by", "saving", "to", "file", "and", "importing", "that", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/compile.py#L30-L67
224,339
google/tangent
tangent/compile.py
compile_function
def compile_function(node, globals_=None): """Convert an AST or string into a function with inspectable source. This function uses `compile_file` internally, but instead of returning the entire module it will return the function only. Args: node: A `FunctionDef` node or a `Module` node which contains at least one `FunctionDef` node. If a module contains multiple functions, a handle to the first one will be returned. globals_: See `compile_file` Returns: A handle to the compiled function. Raises: TypeError: If the input is not a string or AST. ValueError: If no function can be found. """ if not isinstance(node, gast.AST): if not isinstance(node, six.string_types): raise TypeError node = gast.parse(node) if isinstance(node, gast.Module): for succ in node.body: if isinstance(succ, gast.FunctionDef): name = succ.name break else: raise ValueError('no function found') elif isinstance(node, gast.FunctionDef): name = node.name else: raise TypeError module = compile_file(node, globals_) return getattr(module, name)
python
def compile_function(node, globals_=None): if not isinstance(node, gast.AST): if not isinstance(node, six.string_types): raise TypeError node = gast.parse(node) if isinstance(node, gast.Module): for succ in node.body: if isinstance(succ, gast.FunctionDef): name = succ.name break else: raise ValueError('no function found') elif isinstance(node, gast.FunctionDef): name = node.name else: raise TypeError module = compile_file(node, globals_) return getattr(module, name)
[ "def", "compile_function", "(", "node", ",", "globals_", "=", "None", ")", ":", "if", "not", "isinstance", "(", "node", ",", "gast", ".", "AST", ")", ":", "if", "not", "isinstance", "(", "node", ",", "six", ".", "string_types", ")", ":", "raise", "Ty...
Convert an AST or string into a function with inspectable source. This function uses `compile_file` internally, but instead of returning the entire module it will return the function only. Args: node: A `FunctionDef` node or a `Module` node which contains at least one `FunctionDef` node. If a module contains multiple functions, a handle to the first one will be returned. globals_: See `compile_file` Returns: A handle to the compiled function. Raises: TypeError: If the input is not a string or AST. ValueError: If no function can be found.
[ "Convert", "an", "AST", "or", "string", "into", "a", "function", "with", "inspectable", "source", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/compile.py#L70-L105
224,340
google/tangent
tangent/grad_util.py
autodiff_ast
def autodiff_ast(func, wrt, motion, mode, preserve_result, check_dims, verbose): """Perform AD on a single function and return the AST. Args: See `grad`. Returns: node: The AST of a module containing the adjoint and primal function definitions. required: A list of non-built in functions that this function called, and of which the primals and adjoints need to be made available in order for the returned function to run. """ node = annotate.resolve_calls(func) node = desugar.explicit_loop_indexes(node) fence.validate(node, inspect.getsource(func)) node = anf_.anf(node) if verbose >= 2: print('ANF') print(quoting.to_source(node)) if mode == 'reverse': node, required, stack = reverse_ad.reverse_ad(node.body[0], wrt, preserve_result, check_dims) if verbose >= 2: print('RAW') print(quoting.to_source(node)) if motion == 'split': node = reverse_ad.split(node, stack) else: node = reverse_ad.joint(node) if verbose >= 2: print('MOTION') print(quoting.to_source(node)) elif mode == 'forward': node, required = forward_ad.forward_ad(node.body[0], wrt, preserve_result, check_dims) return node, required
python
def autodiff_ast(func, wrt, motion, mode, preserve_result, check_dims, verbose): node = annotate.resolve_calls(func) node = desugar.explicit_loop_indexes(node) fence.validate(node, inspect.getsource(func)) node = anf_.anf(node) if verbose >= 2: print('ANF') print(quoting.to_source(node)) if mode == 'reverse': node, required, stack = reverse_ad.reverse_ad(node.body[0], wrt, preserve_result, check_dims) if verbose >= 2: print('RAW') print(quoting.to_source(node)) if motion == 'split': node = reverse_ad.split(node, stack) else: node = reverse_ad.joint(node) if verbose >= 2: print('MOTION') print(quoting.to_source(node)) elif mode == 'forward': node, required = forward_ad.forward_ad(node.body[0], wrt, preserve_result, check_dims) return node, required
[ "def", "autodiff_ast", "(", "func", ",", "wrt", ",", "motion", ",", "mode", ",", "preserve_result", ",", "check_dims", ",", "verbose", ")", ":", "node", "=", "annotate", ".", "resolve_calls", "(", "func", ")", "node", "=", "desugar", ".", "explicit_loop_in...
Perform AD on a single function and return the AST. Args: See `grad`. Returns: node: The AST of a module containing the adjoint and primal function definitions. required: A list of non-built in functions that this function called, and of which the primals and adjoints need to be made available in order for the returned function to run.
[ "Perform", "AD", "on", "a", "single", "function", "and", "return", "the", "AST", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/grad_util.py#L77-L113
224,341
google/tangent
tangent/grad_util.py
autodiff_tree
def autodiff_tree(func, wrt, motion, mode, preserve_result, check_dims, verbose): """Perform AD on all functions in a call tree. This function walks the call tree and differentiates each function in it. It also ensures that the global namespaces that each function in the call tree was in are merged. The `tangent` and `numpy` packages are added to the namespace here, so that the gradient templates can assume that they are present. Args: See `grad`. Returns: final: A single module which contains the primals and adjoints of all the functions in the call tree. namespace: A merged dictionary with all the variables in the global namespaces of each function. The primals and adjoints need access to these in order to execute. """ # Imported here to avoid circular imports import tangent namespace = {'tangent': tangent, 'numpy': numpy} done = set() final = gast.Module(body=[]) namespace.update(six.get_function_globals(func)) node, required = autodiff_ast(func, wrt, motion, mode, preserve_result, check_dims, verbose) final.body.extend(node.body) to_do = set(required) if motion == 'split' and mode == 'reverse': done.add((func, wrt)) to_do -= done while to_do: func, wrt = to_do.pop() namespace.update(six.get_function_globals(func)) node, required = autodiff_ast( func=func, wrt=wrt, motion='split', mode=mode, preserve_result=True, check_dims=False, verbose=verbose) final.body.extend(node.body) done.add((func, wrt)) to_do.update(required) to_do -= done return final, namespace
python
def autodiff_tree(func, wrt, motion, mode, preserve_result, check_dims, verbose): # Imported here to avoid circular imports import tangent namespace = {'tangent': tangent, 'numpy': numpy} done = set() final = gast.Module(body=[]) namespace.update(six.get_function_globals(func)) node, required = autodiff_ast(func, wrt, motion, mode, preserve_result, check_dims, verbose) final.body.extend(node.body) to_do = set(required) if motion == 'split' and mode == 'reverse': done.add((func, wrt)) to_do -= done while to_do: func, wrt = to_do.pop() namespace.update(six.get_function_globals(func)) node, required = autodiff_ast( func=func, wrt=wrt, motion='split', mode=mode, preserve_result=True, check_dims=False, verbose=verbose) final.body.extend(node.body) done.add((func, wrt)) to_do.update(required) to_do -= done return final, namespace
[ "def", "autodiff_tree", "(", "func", ",", "wrt", ",", "motion", ",", "mode", ",", "preserve_result", ",", "check_dims", ",", "verbose", ")", ":", "# Imported here to avoid circular imports", "import", "tangent", "namespace", "=", "{", "'tangent'", ":", "tangent", ...
Perform AD on all functions in a call tree. This function walks the call tree and differentiates each function in it. It also ensures that the global namespaces that each function in the call tree was in are merged. The `tangent` and `numpy` packages are added to the namespace here, so that the gradient templates can assume that they are present. Args: See `grad`. Returns: final: A single module which contains the primals and adjoints of all the functions in the call tree. namespace: A merged dictionary with all the variables in the global namespaces of each function. The primals and adjoints need access to these in order to execute.
[ "Perform", "AD", "on", "all", "functions", "in", "a", "call", "tree", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/grad_util.py#L116-L172
224,342
google/tangent
tangent/grad_util.py
vjp
def vjp(func, wrt=(0,), optimized=True, check_dims=True, preserve_result=False, verbose=0): """Convenience function to produce vector-Jacobian products. See `autodiff` for function arguments. Uses reverse-mode joint-motion autodiff to produce the VJP. """ return autodiff( func, wrt=wrt, motion='joint', mode='reverse', optimized=optimized, preserve_result=preserve_result, input_derivative=INPUT_DERIVATIVE.Required, check_dims=check_dims, verbose=verbose)
python
def vjp(func, wrt=(0,), optimized=True, check_dims=True, preserve_result=False, verbose=0): return autodiff( func, wrt=wrt, motion='joint', mode='reverse', optimized=optimized, preserve_result=preserve_result, input_derivative=INPUT_DERIVATIVE.Required, check_dims=check_dims, verbose=verbose)
[ "def", "vjp", "(", "func", ",", "wrt", "=", "(", "0", ",", ")", ",", "optimized", "=", "True", ",", "check_dims", "=", "True", ",", "preserve_result", "=", "False", ",", "verbose", "=", "0", ")", ":", "return", "autodiff", "(", "func", ",", "wrt", ...
Convenience function to produce vector-Jacobian products. See `autodiff` for function arguments. Uses reverse-mode joint-motion autodiff to produce the VJP.
[ "Convenience", "function", "to", "produce", "vector", "-", "Jacobian", "products", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/grad_util.py#L175-L195
224,343
google/tangent
tangent/grad_util.py
autodiff
def autodiff(func, wrt=(0,), optimized=True, motion='joint', mode='reverse', preserve_result=False, check_dims=True, input_derivative=INPUT_DERIVATIVE.Required, verbose=0): """Build the vector-Jacobian or Jacobian-vector product of a function `func`. For a vector-Jacobian product (reverse-mode autodiff): This function proceeds by finding the primals and adjoints of all the functions in the call tree. For a Jacobian-vector product (forward-mode autodiff): We first find the primals and tangents of all functions in the call tree. It then wraps the top level function (i.e. the one passed as `func`) in a slightly more user-friendly interface. It then compiles the function and attaches to it the global namespace it needs to run. Args: func: The function to take the gradient of. wrt: A tuple of argument indices to differentiate with respect to. By default the derivative is taken with respect to the first argument. optimized: Whether to optimize the gradient function (`True` by default). motion: Either 'split' (separate functions for forward and backward pass) or 'joint' motion (a single combined function). Joint mode is the default. mode: Either 'forward' or 'reverse' mode. Forward mode is more efficient when the input dimensionality is lower than the output dimensionality, whereas it is the opposite for reverse mode. input_derivative: An enum indicating whether the user must supply an input derivative, and if not, what the default value is. See the possible values of INPUT_DERIVATIVE in this file. preserve_result: A boolean indicating whether or not the generated gradient function should also return the output of the original function. If False, the return signature of the input and output functions will be > val = func(*args) > df = grad(func,preserve_result=False) > gradval = df(*args) If True, > val = func(*args) > df = grad(func,preserve_result=True) > gradval, val = df(*args) Note that if taking gradients with respect to multiple arguments, the primal value will be appended to the return signature. Ex: > val = func(x,y) > df = grad(func,wrt=(0,1),preserve_result=True) > dx,dy,val = df(x,y) verbose: If 1 the source code of the generated functions will be output to stdout at various stages of the process for debugging purposes. If > 1, all intermediate code generation steps will print. Returns: df: A function that calculates a derivative (see file-level documentation above for the kinds of derivatives available) with respect to arguments specified in `wrt`, using forward or reverse mode according to `mode`. If using reverse mode, the gradient is calculated in either split or joint motion according to the value passed in `motion`. If `preserve_result` is True, the function will also return the original result of `func`. """ # If the function had the with insert_grad_of statements removed, retrieve them func = getattr(func, 'tangent', func) # Generate the derivative node, namespace = autodiff_tree(func, wrt, motion, mode, preserve_result, check_dims, verbose) if mode == 'reverse' and motion == 'joint': # Pull the stack definition and initial gradient into the function body # TODO: Use first FunctionDef instead of first element node.body[0] = _create_joint(node.body[0], func, wrt, input_derivative) if verbose >= 2: print('INLINED') print(quoting.to_source(node)) if mode == 'forward': node = _create_forward(node) if optimized: # Optimize the resulting functions node = optimization.optimize(node) node = comments.remove_repeated_comments(node) if verbose >= 1: print(quoting.to_source(node)) # Compile and return module = compile_.compile_file(node, namespace) if mode == 'forward' or motion == 'joint': return getattr(module, node.body[0].name) else: # Compiling the top-level function in split mode makes no sense, but we use # it for testing; hence we don't care about the source being readable forward = getattr(module, node.body[0].name) backward = getattr(module, node.body[1].name) # Imported here to avoid circular imports import tangent def df(*args, **kwargs): _stack = tangent.Stack() init_grad = kwargs.pop('init_grad', 1.0) forward(_stack, *args, **kwargs) dx = backward(_stack, init_grad, *args, **kwargs) if len(dx) == 1: dx, = dx return dx return df
python
def autodiff(func, wrt=(0,), optimized=True, motion='joint', mode='reverse', preserve_result=False, check_dims=True, input_derivative=INPUT_DERIVATIVE.Required, verbose=0): # If the function had the with insert_grad_of statements removed, retrieve them func = getattr(func, 'tangent', func) # Generate the derivative node, namespace = autodiff_tree(func, wrt, motion, mode, preserve_result, check_dims, verbose) if mode == 'reverse' and motion == 'joint': # Pull the stack definition and initial gradient into the function body # TODO: Use first FunctionDef instead of first element node.body[0] = _create_joint(node.body[0], func, wrt, input_derivative) if verbose >= 2: print('INLINED') print(quoting.to_source(node)) if mode == 'forward': node = _create_forward(node) if optimized: # Optimize the resulting functions node = optimization.optimize(node) node = comments.remove_repeated_comments(node) if verbose >= 1: print(quoting.to_source(node)) # Compile and return module = compile_.compile_file(node, namespace) if mode == 'forward' or motion == 'joint': return getattr(module, node.body[0].name) else: # Compiling the top-level function in split mode makes no sense, but we use # it for testing; hence we don't care about the source being readable forward = getattr(module, node.body[0].name) backward = getattr(module, node.body[1].name) # Imported here to avoid circular imports import tangent def df(*args, **kwargs): _stack = tangent.Stack() init_grad = kwargs.pop('init_grad', 1.0) forward(_stack, *args, **kwargs) dx = backward(_stack, init_grad, *args, **kwargs) if len(dx) == 1: dx, = dx return dx return df
[ "def", "autodiff", "(", "func", ",", "wrt", "=", "(", "0", ",", ")", ",", "optimized", "=", "True", ",", "motion", "=", "'joint'", ",", "mode", "=", "'reverse'", ",", "preserve_result", "=", "False", ",", "check_dims", "=", "True", ",", "input_derivati...
Build the vector-Jacobian or Jacobian-vector product of a function `func`. For a vector-Jacobian product (reverse-mode autodiff): This function proceeds by finding the primals and adjoints of all the functions in the call tree. For a Jacobian-vector product (forward-mode autodiff): We first find the primals and tangents of all functions in the call tree. It then wraps the top level function (i.e. the one passed as `func`) in a slightly more user-friendly interface. It then compiles the function and attaches to it the global namespace it needs to run. Args: func: The function to take the gradient of. wrt: A tuple of argument indices to differentiate with respect to. By default the derivative is taken with respect to the first argument. optimized: Whether to optimize the gradient function (`True` by default). motion: Either 'split' (separate functions for forward and backward pass) or 'joint' motion (a single combined function). Joint mode is the default. mode: Either 'forward' or 'reverse' mode. Forward mode is more efficient when the input dimensionality is lower than the output dimensionality, whereas it is the opposite for reverse mode. input_derivative: An enum indicating whether the user must supply an input derivative, and if not, what the default value is. See the possible values of INPUT_DERIVATIVE in this file. preserve_result: A boolean indicating whether or not the generated gradient function should also return the output of the original function. If False, the return signature of the input and output functions will be > val = func(*args) > df = grad(func,preserve_result=False) > gradval = df(*args) If True, > val = func(*args) > df = grad(func,preserve_result=True) > gradval, val = df(*args) Note that if taking gradients with respect to multiple arguments, the primal value will be appended to the return signature. Ex: > val = func(x,y) > df = grad(func,wrt=(0,1),preserve_result=True) > dx,dy,val = df(x,y) verbose: If 1 the source code of the generated functions will be output to stdout at various stages of the process for debugging purposes. If > 1, all intermediate code generation steps will print. Returns: df: A function that calculates a derivative (see file-level documentation above for the kinds of derivatives available) with respect to arguments specified in `wrt`, using forward or reverse mode according to `mode`. If using reverse mode, the gradient is calculated in either split or joint motion according to the value passed in `motion`. If `preserve_result` is True, the function will also return the original result of `func`.
[ "Build", "the", "vector", "-", "Jacobian", "or", "Jacobian", "-", "vector", "product", "of", "a", "function", "func", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/grad_util.py#L220-L332
224,344
google/tangent
tangent/grad_util.py
_create_joint
def _create_joint(fwdbwd, func, wrt, input_derivative): """Create a user-friendly gradient function. By default, gradient functions expect the stack to be passed to them explicitly. This function modifies the function so that the stack doesn't need to be passed and gets initialized in the function body instead. For consistency, gradient functions always return a tuple, even if the gradient of only one input was required. We unpack the tuple if it is of length one. Args: fwdbwd: An AST. The function definition of the joint primal and adjoint. func: A function handle. The original function that was differentiated. wrt: A tuple of integers. The arguments with respect to which we differentiated. Returns: The function definition of the new function. """ # Correct return to be a non-tuple if there's only one element retval = fwdbwd.body[-1] if len(retval.value.elts) == 1: retval.value = retval.value.elts[0] # Make a stack init statement init_stack = quoting.quote('%s = tangent.Stack()' % fwdbwd.args.args[0].id) init_stack = comments.add_comment(init_stack, 'Initialize the tape') # Prepend the stack init to the top of the function fwdbwd.body = [init_stack] + fwdbwd.body # Replace the function arguments with the original ones grad_name = fwdbwd.args.args[1].id fwdbwd.args = quoting.parse_function(func).body[0].args # Give the function a nice name fwdbwd.name = naming.joint_name(func, wrt) # Allow the initial gradient to be passed as a keyword argument fwdbwd = ast_.append_args(fwdbwd, [grad_name]) if input_derivative == INPUT_DERIVATIVE.DefaultOne: fwdbwd.args.defaults.append(quoting.quote('1.0')) return fwdbwd
python
def _create_joint(fwdbwd, func, wrt, input_derivative): # Correct return to be a non-tuple if there's only one element retval = fwdbwd.body[-1] if len(retval.value.elts) == 1: retval.value = retval.value.elts[0] # Make a stack init statement init_stack = quoting.quote('%s = tangent.Stack()' % fwdbwd.args.args[0].id) init_stack = comments.add_comment(init_stack, 'Initialize the tape') # Prepend the stack init to the top of the function fwdbwd.body = [init_stack] + fwdbwd.body # Replace the function arguments with the original ones grad_name = fwdbwd.args.args[1].id fwdbwd.args = quoting.parse_function(func).body[0].args # Give the function a nice name fwdbwd.name = naming.joint_name(func, wrt) # Allow the initial gradient to be passed as a keyword argument fwdbwd = ast_.append_args(fwdbwd, [grad_name]) if input_derivative == INPUT_DERIVATIVE.DefaultOne: fwdbwd.args.defaults.append(quoting.quote('1.0')) return fwdbwd
[ "def", "_create_joint", "(", "fwdbwd", ",", "func", ",", "wrt", ",", "input_derivative", ")", ":", "# Correct return to be a non-tuple if there's only one element", "retval", "=", "fwdbwd", ".", "body", "[", "-", "1", "]", "if", "len", "(", "retval", ".", "value...
Create a user-friendly gradient function. By default, gradient functions expect the stack to be passed to them explicitly. This function modifies the function so that the stack doesn't need to be passed and gets initialized in the function body instead. For consistency, gradient functions always return a tuple, even if the gradient of only one input was required. We unpack the tuple if it is of length one. Args: fwdbwd: An AST. The function definition of the joint primal and adjoint. func: A function handle. The original function that was differentiated. wrt: A tuple of integers. The arguments with respect to which we differentiated. Returns: The function definition of the new function.
[ "Create", "a", "user", "-", "friendly", "gradient", "function", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/grad_util.py#L393-L435
224,345
google/tangent
tangent/grad_util.py
_create_forward
def _create_forward(out_node): """Create a user-friendly forward function. Ensures that a single value instead of a tuple is returned if the user asked for the gradient with respect to only one input. Args: out_node: The function definition AST. Returns: The function definition with potentially changed return statement. """ retval = out_node.body[0].body[-1] if len(retval.value.elts) == 1: retval.value = retval.value.elts[0] return out_node
python
def _create_forward(out_node): retval = out_node.body[0].body[-1] if len(retval.value.elts) == 1: retval.value = retval.value.elts[0] return out_node
[ "def", "_create_forward", "(", "out_node", ")", ":", "retval", "=", "out_node", ".", "body", "[", "0", "]", ".", "body", "[", "-", "1", "]", "if", "len", "(", "retval", ".", "value", ".", "elts", ")", "==", "1", ":", "retval", ".", "value", "=", ...
Create a user-friendly forward function. Ensures that a single value instead of a tuple is returned if the user asked for the gradient with respect to only one input. Args: out_node: The function definition AST. Returns: The function definition with potentially changed return statement.
[ "Create", "a", "user", "-", "friendly", "forward", "function", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/grad_util.py#L438-L453
224,346
google/tangent
tangent/__init__.py
tangent
def tangent(f): """A decorator which removes the `with insert_grad_of` statement. This allows the function to be called as usual. Args: f: A function Returns: A function with any `with insert_grad_of` context managers removed. """ node = annotate.resolve_calls(f) RemoveWith().visit(node) wrapped = functools.wraps(f)(compile_.compile_function(node)) wrapped.tangent = f return wrapped
python
def tangent(f): node = annotate.resolve_calls(f) RemoveWith().visit(node) wrapped = functools.wraps(f)(compile_.compile_function(node)) wrapped.tangent = f return wrapped
[ "def", "tangent", "(", "f", ")", ":", "node", "=", "annotate", ".", "resolve_calls", "(", "f", ")", "RemoveWith", "(", ")", ".", "visit", "(", "node", ")", "wrapped", "=", "functools", ".", "wraps", "(", "f", ")", "(", "compile_", ".", "compile_funct...
A decorator which removes the `with insert_grad_of` statement. This allows the function to be called as usual. Args: f: A function Returns: A function with any `with insert_grad_of` context managers removed.
[ "A", "decorator", "which", "removes", "the", "with", "insert_grad_of", "statement", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/__init__.py#L62-L77
224,347
google/tangent
tangent/cfg.py
forward
def forward(node, analysis): """Perform a given analysis on all functions within an AST.""" if not isinstance(analysis, Forward): raise TypeError('not a valid forward analysis object') for succ in gast.walk(node): if isinstance(succ, gast.FunctionDef): cfg_obj = CFG.build_cfg(succ) analysis.visit(cfg_obj.entry) return node
python
def forward(node, analysis): if not isinstance(analysis, Forward): raise TypeError('not a valid forward analysis object') for succ in gast.walk(node): if isinstance(succ, gast.FunctionDef): cfg_obj = CFG.build_cfg(succ) analysis.visit(cfg_obj.entry) return node
[ "def", "forward", "(", "node", ",", "analysis", ")", ":", "if", "not", "isinstance", "(", "analysis", ",", "Forward", ")", ":", "raise", "TypeError", "(", "'not a valid forward analysis object'", ")", "for", "succ", "in", "gast", ".", "walk", "(", "node", ...
Perform a given analysis on all functions within an AST.
[ "Perform", "a", "given", "analysis", "on", "all", "functions", "within", "an", "AST", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/cfg.py#L236-L244
224,348
google/tangent
tangent/cfg.py
CFG.backlink
def backlink(node): """Given a CFG with outgoing links, create incoming links.""" seen = set() to_see = [node] while to_see: node = to_see.pop() seen.add(node) for succ in node.next: succ.prev.add(node) if succ not in seen: to_see.append(succ)
python
def backlink(node): seen = set() to_see = [node] while to_see: node = to_see.pop() seen.add(node) for succ in node.next: succ.prev.add(node) if succ not in seen: to_see.append(succ)
[ "def", "backlink", "(", "node", ")", ":", "seen", "=", "set", "(", ")", "to_see", "=", "[", "node", "]", "while", "to_see", ":", "node", "=", "to_see", ".", "pop", "(", ")", "seen", ".", "add", "(", "node", ")", "for", "succ", "in", "node", "."...
Given a CFG with outgoing links, create incoming links.
[ "Given", "a", "CFG", "with", "outgoing", "links", "create", "incoming", "links", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/cfg.py#L67-L77
224,349
google/tangent
tangent/cfg.py
CFG.set_head
def set_head(self, node): """Link this node to the current leaves.""" for head in self.head: head.next.add(node) self.head[:] = [] self.head.append(node)
python
def set_head(self, node): for head in self.head: head.next.add(node) self.head[:] = [] self.head.append(node)
[ "def", "set_head", "(", "self", ",", "node", ")", ":", "for", "head", "in", "self", ".", "head", ":", "head", ".", "next", ".", "add", "(", "node", ")", "self", ".", "head", "[", ":", "]", "=", "[", "]", "self", ".", "head", ".", "append", "(...
Link this node to the current leaves.
[ "Link", "this", "node", "to", "the", "current", "leaves", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/cfg.py#L79-L84
224,350
google/tangent
tangent/cfg.py
CFG.build_cfg
def build_cfg(cls, node): """Build a CFG for a function. Args: node: A function definition the body of which to analyze. Returns: A CFG object. Raises: TypeError: If the input is not a function definition. """ if not isinstance(node, gast.FunctionDef): raise TypeError('input must be a function definition') cfg = cls() cfg.entry = Node(node.args) cfg.head = [cfg.entry] cfg.visit_statements(node.body) cfg.exit = Node(None) cfg.set_head(cfg.exit) cfg.backlink(cfg.entry) return cfg
python
def build_cfg(cls, node): if not isinstance(node, gast.FunctionDef): raise TypeError('input must be a function definition') cfg = cls() cfg.entry = Node(node.args) cfg.head = [cfg.entry] cfg.visit_statements(node.body) cfg.exit = Node(None) cfg.set_head(cfg.exit) cfg.backlink(cfg.entry) return cfg
[ "def", "build_cfg", "(", "cls", ",", "node", ")", ":", "if", "not", "isinstance", "(", "node", ",", "gast", ".", "FunctionDef", ")", ":", "raise", "TypeError", "(", "'input must be a function definition'", ")", "cfg", "=", "cls", "(", ")", "cfg", ".", "e...
Build a CFG for a function. Args: node: A function definition the body of which to analyze. Returns: A CFG object. Raises: TypeError: If the input is not a function definition.
[ "Build", "a", "CFG", "for", "a", "function", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/cfg.py#L87-L108
224,351
google/tangent
tangent/optimization.py
optimize
def optimize(node): """Perform a series of optimization passes. This function performs a series of optimizations (dead code elimination, constant folding, variable folding) on the given AST. It optimizes the code repeatedly until reaching a fixed point. The fixed point is determine roughly by checking whether the number of lines of generated source code changed after the latest pass. Args: node: The AST to optimize. Returns: The optimized AST. """ node = dead_code_elimination(node) node = constant_folding(node) node = assignment_propagation(node) return node
python
def optimize(node): node = dead_code_elimination(node) node = constant_folding(node) node = assignment_propagation(node) return node
[ "def", "optimize", "(", "node", ")", ":", "node", "=", "dead_code_elimination", "(", "node", ")", "node", "=", "constant_folding", "(", "node", ")", "node", "=", "assignment_propagation", "(", "node", ")", "return", "node" ]
Perform a series of optimization passes. This function performs a series of optimizations (dead code elimination, constant folding, variable folding) on the given AST. It optimizes the code repeatedly until reaching a fixed point. The fixed point is determine roughly by checking whether the number of lines of generated source code changed after the latest pass. Args: node: The AST to optimize. Returns: The optimized AST.
[ "Perform", "a", "series", "of", "optimization", "passes", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/optimization.py#L41-L58
224,352
google/tangent
tangent/optimization.py
dead_code_elimination
def dead_code_elimination(node): """Perform a simple form of dead code elimination on a Python AST. This method performs reaching definitions analysis on all function definitions. It then looks for the definition of variables that are not used elsewhere and removes those definitions. This function takes into consideration push and pop statements; if a pop statement is removed, it will also try to remove the accompanying push statement. Note that this *requires dead code elimination to be performed on the primal and adjoint simultaneously*. Args: node: The AST to optimize. Returns: The optimized AST. """ to_remove = set(def_[1] for def_ in annotate.unused(node) if not isinstance(def_[1], (gast.arguments, gast.For))) for n in list(to_remove): for succ in gast.walk(n): if anno.getanno(succ, 'push', False): to_remove.add(anno.getanno(succ, 'push')) transformers.Remove(to_remove).visit(node) anno.clearanno(node) return node
python
def dead_code_elimination(node): to_remove = set(def_[1] for def_ in annotate.unused(node) if not isinstance(def_[1], (gast.arguments, gast.For))) for n in list(to_remove): for succ in gast.walk(n): if anno.getanno(succ, 'push', False): to_remove.add(anno.getanno(succ, 'push')) transformers.Remove(to_remove).visit(node) anno.clearanno(node) return node
[ "def", "dead_code_elimination", "(", "node", ")", ":", "to_remove", "=", "set", "(", "def_", "[", "1", "]", "for", "def_", "in", "annotate", ".", "unused", "(", "node", ")", "if", "not", "isinstance", "(", "def_", "[", "1", "]", ",", "(", "gast", "...
Perform a simple form of dead code elimination on a Python AST. This method performs reaching definitions analysis on all function definitions. It then looks for the definition of variables that are not used elsewhere and removes those definitions. This function takes into consideration push and pop statements; if a pop statement is removed, it will also try to remove the accompanying push statement. Note that this *requires dead code elimination to be performed on the primal and adjoint simultaneously*. Args: node: The AST to optimize. Returns: The optimized AST.
[ "Perform", "a", "simple", "form", "of", "dead", "code", "elimination", "on", "a", "Python", "AST", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/optimization.py#L62-L88
224,353
google/tangent
tangent/optimization.py
read_counts
def read_counts(node): """Check how many times a variable definition was used. Args: node: An AST to analyze. Returns: A dictionary from assignment nodes to the number of times the assigned to variable was used. """ cfg.forward(node, cfg.ReachingDefinitions()) rc = ReadCounts() rc.visit(node) return rc.n_read
python
def read_counts(node): cfg.forward(node, cfg.ReachingDefinitions()) rc = ReadCounts() rc.visit(node) return rc.n_read
[ "def", "read_counts", "(", "node", ")", ":", "cfg", ".", "forward", "(", "node", ",", "cfg", ".", "ReachingDefinitions", "(", ")", ")", "rc", "=", "ReadCounts", "(", ")", "rc", ".", "visit", "(", "node", ")", "return", "rc", ".", "n_read" ]
Check how many times a variable definition was used. Args: node: An AST to analyze. Returns: A dictionary from assignment nodes to the number of times the assigned to variable was used.
[ "Check", "how", "many", "times", "a", "variable", "definition", "was", "used", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/optimization.py#L114-L128
224,354
google/tangent
tangent/optimization.py
assignment_propagation
def assignment_propagation(node): """Perform assignment propagation. Assignment propagation is not a compiler optimization as much as a readability optimization. If a variable name is used only once, it gets renamed when possible e.g. `y = x; z = y` will become `z = x`. Args: node: The AST to optimize. Returns: The optimized AST. """ n_reads = read_counts(node) to_remove = [] for succ in gast.walk(node): # We found an assignment of the form a = b # - Left-hand side is a Name, right-hand side is a Name. if (isinstance(succ, gast.Assign) and isinstance(succ.value, gast.Name) and len(succ.targets) == 1 and isinstance(succ.targets[0], gast.Name)): rhs_name = succ.value.id # We now find all the places that b was defined rhs_defs = [def_[1] for def_ in anno.getanno(succ, 'definitions_in') if def_[0] == rhs_name] # If b was defined in only one place (not an argument), and wasn't used # anywhere else but in a == b, and was defined as b = x, then we can fold # the statements if (len(rhs_defs) == 1 and isinstance(rhs_defs[0], gast.Assign) and n_reads[rhs_defs[0]] == 1 and isinstance(rhs_defs[0].value, gast.Name) and isinstance(rhs_defs[0].targets[0], gast.Name)): # Mark rhs_def for deletion to_remove.append(rhs_defs[0]) # Propagate the definition succ.value = rhs_defs[0].value # Remove the definitions we folded transformers.Remove(to_remove).visit(node) anno.clearanno(node) return node
python
def assignment_propagation(node): n_reads = read_counts(node) to_remove = [] for succ in gast.walk(node): # We found an assignment of the form a = b # - Left-hand side is a Name, right-hand side is a Name. if (isinstance(succ, gast.Assign) and isinstance(succ.value, gast.Name) and len(succ.targets) == 1 and isinstance(succ.targets[0], gast.Name)): rhs_name = succ.value.id # We now find all the places that b was defined rhs_defs = [def_[1] for def_ in anno.getanno(succ, 'definitions_in') if def_[0] == rhs_name] # If b was defined in only one place (not an argument), and wasn't used # anywhere else but in a == b, and was defined as b = x, then we can fold # the statements if (len(rhs_defs) == 1 and isinstance(rhs_defs[0], gast.Assign) and n_reads[rhs_defs[0]] == 1 and isinstance(rhs_defs[0].value, gast.Name) and isinstance(rhs_defs[0].targets[0], gast.Name)): # Mark rhs_def for deletion to_remove.append(rhs_defs[0]) # Propagate the definition succ.value = rhs_defs[0].value # Remove the definitions we folded transformers.Remove(to_remove).visit(node) anno.clearanno(node) return node
[ "def", "assignment_propagation", "(", "node", ")", ":", "n_reads", "=", "read_counts", "(", "node", ")", "to_remove", "=", "[", "]", "for", "succ", "in", "gast", ".", "walk", "(", "node", ")", ":", "# We found an assignment of the form a = b", "# - Left-hand sid...
Perform assignment propagation. Assignment propagation is not a compiler optimization as much as a readability optimization. If a variable name is used only once, it gets renamed when possible e.g. `y = x; z = y` will become `z = x`. Args: node: The AST to optimize. Returns: The optimized AST.
[ "Perform", "assignment", "propagation", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/optimization.py#L132-L172
224,355
google/tangent
tangent/tf_extensions.py
matmul_adjoint_x
def matmul_adjoint_x(dz, x, y, transpose_a, transpose_b): """Implementation of dtfmatmul wrt x, separate for readability.""" if not transpose_a and not transpose_b: return tf.matmul(dz, y, transpose_b=True) elif not transpose_a and transpose_b: return tf.matmul(dz, y) elif transpose_a and not transpose_b: return tf.matmul(y, dz, transpose_b=True) else: # transpose_a and transpose_b return tf.matmul(y, dz, transpose_a=True, transpose_b=True)
python
def matmul_adjoint_x(dz, x, y, transpose_a, transpose_b): if not transpose_a and not transpose_b: return tf.matmul(dz, y, transpose_b=True) elif not transpose_a and transpose_b: return tf.matmul(dz, y) elif transpose_a and not transpose_b: return tf.matmul(y, dz, transpose_b=True) else: # transpose_a and transpose_b return tf.matmul(y, dz, transpose_a=True, transpose_b=True)
[ "def", "matmul_adjoint_x", "(", "dz", ",", "x", ",", "y", ",", "transpose_a", ",", "transpose_b", ")", ":", "if", "not", "transpose_a", "and", "not", "transpose_b", ":", "return", "tf", ".", "matmul", "(", "dz", ",", "y", ",", "transpose_b", "=", "True...
Implementation of dtfmatmul wrt x, separate for readability.
[ "Implementation", "of", "dtfmatmul", "wrt", "x", "separate", "for", "readability", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/tf_extensions.py#L146-L155
224,356
google/tangent
tangent/tf_extensions.py
matmul_adjoint_y
def matmul_adjoint_y(dz, x, y, transpose_a, transpose_b): """Implementation of dtfmatmul, separate for readability.""" if not transpose_a and not transpose_b: return tf.matmul(x, dz, transpose_a=True) elif not transpose_a and transpose_b: return tf.matmul(dz, x, transpose_a=True) elif transpose_a and not transpose_b: return tf.matmul(x, dz) else: # transpose_a and transpose_b return tf.matmul(dz, x, transpose_a=True, transpose_b=True)
python
def matmul_adjoint_y(dz, x, y, transpose_a, transpose_b): if not transpose_a and not transpose_b: return tf.matmul(x, dz, transpose_a=True) elif not transpose_a and transpose_b: return tf.matmul(dz, x, transpose_a=True) elif transpose_a and not transpose_b: return tf.matmul(x, dz) else: # transpose_a and transpose_b return tf.matmul(dz, x, transpose_a=True, transpose_b=True)
[ "def", "matmul_adjoint_y", "(", "dz", ",", "x", ",", "y", ",", "transpose_a", ",", "transpose_b", ")", ":", "if", "not", "transpose_a", "and", "not", "transpose_b", ":", "return", "tf", ".", "matmul", "(", "x", ",", "dz", ",", "transpose_a", "=", "True...
Implementation of dtfmatmul, separate for readability.
[ "Implementation", "of", "dtfmatmul", "separate", "for", "readability", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/tf_extensions.py#L158-L167
224,357
google/tangent
tangent/naming.py
primal_name
def primal_name(func, wrt): """Name for the primal of a function.""" if not isinstance(func, types.FunctionType): raise TypeError(func) varnames = six.get_function_code(func).co_varnames return PRIMAL_NAME.format(func.__name__, ''.join(varnames[i] for i in wrt))
python
def primal_name(func, wrt): if not isinstance(func, types.FunctionType): raise TypeError(func) varnames = six.get_function_code(func).co_varnames return PRIMAL_NAME.format(func.__name__, ''.join(varnames[i] for i in wrt))
[ "def", "primal_name", "(", "func", ",", "wrt", ")", ":", "if", "not", "isinstance", "(", "func", ",", "types", ".", "FunctionType", ")", ":", "raise", "TypeError", "(", "func", ")", "varnames", "=", "six", ".", "get_function_code", "(", "func", ")", "....
Name for the primal of a function.
[ "Name", "for", "the", "primal", "of", "a", "function", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/naming.py#L31-L36
224,358
google/tangent
tangent/naming.py
Namer.build
def build(cls, node): """Construct a namer object for a given function scope.""" if not isinstance(node, gast.FunctionDef): raise ValueError namer = cls() namer.names.update(get_names(node)) return namer
python
def build(cls, node): if not isinstance(node, gast.FunctionDef): raise ValueError namer = cls() namer.names.update(get_names(node)) return namer
[ "def", "build", "(", "cls", ",", "node", ")", ":", "if", "not", "isinstance", "(", "node", ",", "gast", ".", "FunctionDef", ")", ":", "raise", "ValueError", "namer", "=", "cls", "(", ")", "namer", ".", "names", ".", "update", "(", "get_names", "(", ...
Construct a namer object for a given function scope.
[ "Construct", "a", "namer", "object", "for", "a", "given", "function", "scope", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/naming.py#L135-L141
224,359
google/tangent
tangent/naming.py
Namer.valid
def valid(self, name): """Ensure a variable name is valid. Note: Assumes variable names are ASCII, which isn't necessarily true in Python 3. Args: name: A proposed variable name. Returns: A valid version of the name. """ name = re.sub('[^0-9a-zA-Z_]', '', name) if re.match('[0-9]', name): name = '_' + name return name
python
def valid(self, name): name = re.sub('[^0-9a-zA-Z_]', '', name) if re.match('[0-9]', name): name = '_' + name return name
[ "def", "valid", "(", "self", ",", "name", ")", ":", "name", "=", "re", ".", "sub", "(", "'[^0-9a-zA-Z_]'", ",", "''", ",", "name", ")", "if", "re", ".", "match", "(", "'[0-9]'", ",", "name", ")", ":", "name", "=", "'_'", "+", "name", "return", ...
Ensure a variable name is valid. Note: Assumes variable names are ASCII, which isn't necessarily true in Python 3. Args: name: A proposed variable name. Returns: A valid version of the name.
[ "Ensure", "a", "variable", "name", "is", "valid", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/naming.py#L143-L158
224,360
google/tangent
tangent/naming.py
Namer.trim
def trim(self, name): """When the name is too long, use the LHS or a random string instead.""" if len(name) > self.MAX_LENGTH and self.target: name = self.TEMP_VAR.format(self._name(self.target)) if len(name) > self.MAX_LENGTH: while True: name = '_{:04x}'.format(random.randint(0, 16 ** 4 - 1)) if name not in self.names: break return name
python
def trim(self, name): if len(name) > self.MAX_LENGTH and self.target: name = self.TEMP_VAR.format(self._name(self.target)) if len(name) > self.MAX_LENGTH: while True: name = '_{:04x}'.format(random.randint(0, 16 ** 4 - 1)) if name not in self.names: break return name
[ "def", "trim", "(", "self", ",", "name", ")", ":", "if", "len", "(", "name", ")", ">", "self", ".", "MAX_LENGTH", "and", "self", ".", "target", ":", "name", "=", "self", ".", "TEMP_VAR", ".", "format", "(", "self", ".", "_name", "(", "self", ".",...
When the name is too long, use the LHS or a random string instead.
[ "When", "the", "name", "is", "too", "long", "use", "the", "LHS", "or", "a", "random", "string", "instead", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/naming.py#L160-L169
224,361
google/tangent
tangent/naming.py
Namer.unique
def unique(self, name): """Make a variable name unique by appending a number if needed.""" # Make sure the name is valid name = self.valid(name) # Make sure it's not too long name = self.trim(name) # Now make sure it's unique unique_name = name i = 2 while unique_name in self.names: unique_name = name + str(i) i += 1 self.names.add(unique_name) return unique_name
python
def unique(self, name): # Make sure the name is valid name = self.valid(name) # Make sure it's not too long name = self.trim(name) # Now make sure it's unique unique_name = name i = 2 while unique_name in self.names: unique_name = name + str(i) i += 1 self.names.add(unique_name) return unique_name
[ "def", "unique", "(", "self", ",", "name", ")", ":", "# Make sure the name is valid", "name", "=", "self", ".", "valid", "(", "name", ")", "# Make sure it's not too long", "name", "=", "self", ".", "trim", "(", "name", ")", "# Now make sure it's unique", "unique...
Make a variable name unique by appending a number if needed.
[ "Make", "a", "variable", "name", "unique", "by", "appending", "a", "number", "if", "needed", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/naming.py#L171-L184
224,362
google/tangent
tangent/utils.py
array_size
def array_size(x, axis): """Calculate the size of `x` along `axis` dimensions only.""" axis_shape = x.shape if axis is None else tuple(x.shape[a] for a in axis) return max(numpy.prod(axis_shape), 1)
python
def array_size(x, axis): axis_shape = x.shape if axis is None else tuple(x.shape[a] for a in axis) return max(numpy.prod(axis_shape), 1)
[ "def", "array_size", "(", "x", ",", "axis", ")", ":", "axis_shape", "=", "x", ".", "shape", "if", "axis", "is", "None", "else", "tuple", "(", "x", ".", "shape", "[", "a", "]", "for", "a", "in", "axis", ")", "return", "max", "(", "numpy", ".", "...
Calculate the size of `x` along `axis` dimensions only.
[ "Calculate", "the", "size", "of", "x", "along", "axis", "dimensions", "only", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L41-L44
224,363
google/tangent
tangent/utils.py
create_unbroadcast_axis
def create_unbroadcast_axis(shape, broadcast_shape): """Creates the reduction axis for unbroadcasting. Args: shape: A list. The shape after the broadcast operation. broadcast_shape: A list. The original shape the array being unbroadcast had. Returns: A list. The axes along which the array needs to be reduced. These axes will be distributed evenly into the original shape. """ return tuple( -(1 + i) for i in range(len(broadcast_shape)) if i >= len(shape) or broadcast_shape[-(1 + i)] > shape[-(1 + i)])
python
def create_unbroadcast_axis(shape, broadcast_shape): return tuple( -(1 + i) for i in range(len(broadcast_shape)) if i >= len(shape) or broadcast_shape[-(1 + i)] > shape[-(1 + i)])
[ "def", "create_unbroadcast_axis", "(", "shape", ",", "broadcast_shape", ")", ":", "return", "tuple", "(", "-", "(", "1", "+", "i", ")", "for", "i", "in", "range", "(", "len", "(", "broadcast_shape", ")", ")", "if", "i", ">=", "len", "(", "shape", ")"...
Creates the reduction axis for unbroadcasting. Args: shape: A list. The shape after the broadcast operation. broadcast_shape: A list. The original shape the array being unbroadcast had. Returns: A list. The axes along which the array needs to be reduced. These axes will be distributed evenly into the original shape.
[ "Creates", "the", "reduction", "axis", "for", "unbroadcasting", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L120-L134
224,364
google/tangent
tangent/utils.py
unreduce_array
def unreduce_array(array, shape, axis, keepdims): """Reverse summing over a dimension, NumPy implementation. Args: array: The array that was reduced. shape: The original shape of the array before reduction. axis: The axis or axes that were summed. keepdims: Whether these axes were kept as singleton axes. Returns: An array with axes broadcast to match the shape of the original array. """ # NumPy uses a special default value for keepdims, which is equivalent to # False. if axis is not None and (not keepdims or keepdims is numpy._NoValue): # pylint: disable=protected-access if isinstance(axis, int): axis = axis, for ax in sorted(axis): array = numpy.expand_dims(array, ax) return numpy.broadcast_to(array, shape)
python
def unreduce_array(array, shape, axis, keepdims): # NumPy uses a special default value for keepdims, which is equivalent to # False. if axis is not None and (not keepdims or keepdims is numpy._NoValue): # pylint: disable=protected-access if isinstance(axis, int): axis = axis, for ax in sorted(axis): array = numpy.expand_dims(array, ax) return numpy.broadcast_to(array, shape)
[ "def", "unreduce_array", "(", "array", ",", "shape", ",", "axis", ",", "keepdims", ")", ":", "# NumPy uses a special default value for keepdims, which is equivalent to", "# False.", "if", "axis", "is", "not", "None", "and", "(", "not", "keepdims", "or", "keepdims", ...
Reverse summing over a dimension, NumPy implementation. Args: array: The array that was reduced. shape: The original shape of the array before reduction. axis: The axis or axes that were summed. keepdims: Whether these axes were kept as singleton axes. Returns: An array with axes broadcast to match the shape of the original array.
[ "Reverse", "summing", "over", "a", "dimension", "NumPy", "implementation", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L185-L204
224,365
google/tangent
tangent/utils.py
astype
def astype(array, y): """A functional form of the `astype` method. Args: array: The array or number to cast. y: An array or number, as the input, whose type should be that of array. Returns: An array or number with the same dtype as `y`. """ if isinstance(y, autograd.core.Node): return array.astype(numpy.array(y.value).dtype) return array.astype(numpy.array(y).dtype)
python
def astype(array, y): if isinstance(y, autograd.core.Node): return array.astype(numpy.array(y.value).dtype) return array.astype(numpy.array(y).dtype)
[ "def", "astype", "(", "array", ",", "y", ")", ":", "if", "isinstance", "(", "y", ",", "autograd", ".", "core", ".", "Node", ")", ":", "return", "array", ".", "astype", "(", "numpy", ".", "array", "(", "y", ".", "value", ")", ".", "dtype", ")", ...
A functional form of the `astype` method. Args: array: The array or number to cast. y: An array or number, as the input, whose type should be that of array. Returns: An array or number with the same dtype as `y`.
[ "A", "functional", "form", "of", "the", "astype", "method", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L261-L273
224,366
google/tangent
tangent/utils.py
init_grad
def init_grad(obj, allow_lazy_initializer=False): """Initialize the gradient for an object. Args: obj: The object to initialize the gradient for, can be either a number, array, tuple, list, or dictionary. allow_lazy_initializer: Whether to allow using the ZeroGradient wrapper, for efficiency. Returns: An object of the same type, shape, etc. but with all numeric values set to zero. If the type is unknown, a zero is returned. """ if obj is None: # TODO: fixes.py appears to pass None value and expect 0.0 back. Bug? return 0.0 initializer, supports_lazy_initializer = grad_initializers[type(obj)] if supports_lazy_initializer: if isinstance(obj, ZeroGradient): if allow_lazy_initializer: return ZeroGradient(obj.like) else: # TODO: Not sure this should normally be hit. In forward-over-reverse? return obj.instantiate() else: if allow_lazy_initializer: return ZeroGradient(obj) else: assert not isinstance(obj, ZeroGradient) return initializer(obj)
python
def init_grad(obj, allow_lazy_initializer=False): if obj is None: # TODO: fixes.py appears to pass None value and expect 0.0 back. Bug? return 0.0 initializer, supports_lazy_initializer = grad_initializers[type(obj)] if supports_lazy_initializer: if isinstance(obj, ZeroGradient): if allow_lazy_initializer: return ZeroGradient(obj.like) else: # TODO: Not sure this should normally be hit. In forward-over-reverse? return obj.instantiate() else: if allow_lazy_initializer: return ZeroGradient(obj) else: assert not isinstance(obj, ZeroGradient) return initializer(obj)
[ "def", "init_grad", "(", "obj", ",", "allow_lazy_initializer", "=", "False", ")", ":", "if", "obj", "is", "None", ":", "# TODO: fixes.py appears to pass None value and expect 0.0 back. Bug?", "return", "0.0", "initializer", ",", "supports_lazy_initializer", "=", "grad_ini...
Initialize the gradient for an object. Args: obj: The object to initialize the gradient for, can be either a number, array, tuple, list, or dictionary. allow_lazy_initializer: Whether to allow using the ZeroGradient wrapper, for efficiency. Returns: An object of the same type, shape, etc. but with all numeric values set to zero. If the type is unknown, a zero is returned.
[ "Initialize", "the", "gradient", "for", "an", "object", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L384-L414
224,367
google/tangent
tangent/utils.py
register_add_grad
def register_add_grad(left_type, right_type, add_grad_function): """Register a new gradient adder supporting the given types. Gradient adders are used to add (in the sense of arithmetic addition) intermediate adjoint and tangent variables. TODO: Link to the document explaining the overall terminology and mechanics. Args: left_type: A Python type object. The data type of the left operand supported by the adder. right_type: A Python type object. The data type of the right operand supported by the adder. add_grad_function: A binary function that takes two arguments, left and right, of the types left_type and right_type respectively, and returns their sum. For example, the gradient adder for Numpy objects is np.add. Raises: ValueError: If the given type pair was already registered. """ key = (left_type, right_type) if key in grad_adders: raise ValueError('Types %s already mapped to %s' % (key, grad_adders[key])) grad_adders[key] = add_grad_function
python
def register_add_grad(left_type, right_type, add_grad_function): key = (left_type, right_type) if key in grad_adders: raise ValueError('Types %s already mapped to %s' % (key, grad_adders[key])) grad_adders[key] = add_grad_function
[ "def", "register_add_grad", "(", "left_type", ",", "right_type", ",", "add_grad_function", ")", ":", "key", "=", "(", "left_type", ",", "right_type", ")", "if", "key", "in", "grad_adders", ":", "raise", "ValueError", "(", "'Types %s already mapped to %s'", "%", ...
Register a new gradient adder supporting the given types. Gradient adders are used to add (in the sense of arithmetic addition) intermediate adjoint and tangent variables. TODO: Link to the document explaining the overall terminology and mechanics. Args: left_type: A Python type object. The data type of the left operand supported by the adder. right_type: A Python type object. The data type of the right operand supported by the adder. add_grad_function: A binary function that takes two arguments, left and right, of the types left_type and right_type respectively, and returns their sum. For example, the gradient adder for Numpy objects is np.add. Raises: ValueError: If the given type pair was already registered.
[ "Register", "a", "new", "gradient", "adder", "supporting", "the", "given", "types", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L456-L478
224,368
google/tangent
tangent/utils.py
add_grad
def add_grad(left, right): """Recursively add the gradient of two objects. Args: left: The left value to add. Can be either an array, a number, list or dictionary. right: The right value. Must be of the same type (recursively) as the left. Returns: The sum of the two gradients, which will of the same type. """ # We assume that initial gradients are always identity WRT add_grad. # We also assume that only init_grad could have created None values. assert left is not None and right is not None left_type = type(left) right_type = type(right) if left_type is ZeroGradient: return right if right_type is ZeroGradient: return left return grad_adders[(left_type, right_type)](left, right)
python
def add_grad(left, right): # We assume that initial gradients are always identity WRT add_grad. # We also assume that only init_grad could have created None values. assert left is not None and right is not None left_type = type(left) right_type = type(right) if left_type is ZeroGradient: return right if right_type is ZeroGradient: return left return grad_adders[(left_type, right_type)](left, right)
[ "def", "add_grad", "(", "left", ",", "right", ")", ":", "# We assume that initial gradients are always identity WRT add_grad.", "# We also assume that only init_grad could have created None values.", "assert", "left", "is", "not", "None", "and", "right", "is", "not", "None", ...
Recursively add the gradient of two objects. Args: left: The left value to add. Can be either an array, a number, list or dictionary. right: The right value. Must be of the same type (recursively) as the left. Returns: The sum of the two gradients, which will of the same type.
[ "Recursively", "add", "the", "gradient", "of", "two", "objects", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L521-L541
224,369
google/tangent
tangent/utils.py
register_shape_checker
def register_shape_checker(left_type, right_type, shape_checker_function): """Register a new shape checking function supporting given types. Shape checkers are primarily used to make sure that the seed derivatives passed into generated autodiff functions match their corresponding primal values. Args: left_type: A Python type object. The data type of the left operand supported by the adder. right_type: A Python type object. The data type of the right operand supported by the adder. shape_checker_function: A binary function that takes two arguments, left and right, of the types left_type and right_type respectively, and returns a boolean indicating whether or not they match. Raises: ValueError: If the given type pair was already registered. """ key = (left_type, right_type) if key in shape_checkers: raise ValueError('Types %s already mapped to %s' % (key, shape_checkers[key])) shape_checkers[key] = shape_checker_function
python
def register_shape_checker(left_type, right_type, shape_checker_function): key = (left_type, right_type) if key in shape_checkers: raise ValueError('Types %s already mapped to %s' % (key, shape_checkers[key])) shape_checkers[key] = shape_checker_function
[ "def", "register_shape_checker", "(", "left_type", ",", "right_type", ",", "shape_checker_function", ")", ":", "key", "=", "(", "left_type", ",", "right_type", ")", "if", "key", "in", "shape_checkers", ":", "raise", "ValueError", "(", "'Types %s already mapped to %s...
Register a new shape checking function supporting given types. Shape checkers are primarily used to make sure that the seed derivatives passed into generated autodiff functions match their corresponding primal values. Args: left_type: A Python type object. The data type of the left operand supported by the adder. right_type: A Python type object. The data type of the right operand supported by the adder. shape_checker_function: A binary function that takes two arguments, left and right, of the types left_type and right_type respectively, and returns a boolean indicating whether or not they match. Raises: ValueError: If the given type pair was already registered.
[ "Register", "a", "new", "shape", "checking", "function", "supporting", "given", "types", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L551-L574
224,370
google/tangent
tangent/utils.py
shapes_match
def shapes_match(a, b): """Recursively check if shapes of object `a` and `b` match. Will walk lists, tuples and dicts. Args: a: object of type (numpy.ndarray,tf.Tensor,list,tuple,dict) to check for matching shapes against `b`. b: object to check for matching shape against `a`. Returns: A boolean indicating whether the shapes of `a` and `b` match. """ if isinstance(a, (tuple, list)) and isinstance(b, (tuple, list)): if len(a) != len(b): return False return all([shapes_match(ia, ib) for ia, ib in zip(a, b)]) elif isinstance(a, dict) and isinstance(b, dict): if len(a) != len(b): return False match = True for (ak, av), (bk, bv) in zip(a.items(), b.items()): match = match and all([ak == bk and shapes_match(av, bv)]) return match else: shape_checker = shape_checkers[(type(a), type(b))] return shape_checker(a, b)
python
def shapes_match(a, b): if isinstance(a, (tuple, list)) and isinstance(b, (tuple, list)): if len(a) != len(b): return False return all([shapes_match(ia, ib) for ia, ib in zip(a, b)]) elif isinstance(a, dict) and isinstance(b, dict): if len(a) != len(b): return False match = True for (ak, av), (bk, bv) in zip(a.items(), b.items()): match = match and all([ak == bk and shapes_match(av, bv)]) return match else: shape_checker = shape_checkers[(type(a), type(b))] return shape_checker(a, b)
[ "def", "shapes_match", "(", "a", ",", "b", ")", ":", "if", "isinstance", "(", "a", ",", "(", "tuple", ",", "list", ")", ")", "and", "isinstance", "(", "b", ",", "(", "tuple", ",", "list", ")", ")", ":", "if", "len", "(", "a", ")", "!=", "len"...
Recursively check if shapes of object `a` and `b` match. Will walk lists, tuples and dicts. Args: a: object of type (numpy.ndarray,tf.Tensor,list,tuple,dict) to check for matching shapes against `b`. b: object to check for matching shape against `a`. Returns: A boolean indicating whether the shapes of `a` and `b` match.
[ "Recursively", "check", "if", "shapes", "of", "object", "a", "and", "b", "match", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L604-L630
224,371
google/tangent
tangent/utils.py
pop_stack
def pop_stack(stack, op_id): """Proxy of pop, where we know we're popping a stack off of a stack. We know that we don't need to differentiate through this. See pop() for more. Args: stack: The stack to pop from. op_id: A unique variable that is also passed into the matching push. Allows optimization passes to track pairs of pushes and pops. Returns: The last value. """ if __debug__: pushed_stack, pushed_op_id = stack.pop() assert pushed_op_id == op_id, 'Wanted %s, got %s' % (op_id, pushed_op_id) else: pushed_stack = stack.pop() return pushed_stack
python
def pop_stack(stack, op_id): if __debug__: pushed_stack, pushed_op_id = stack.pop() assert pushed_op_id == op_id, 'Wanted %s, got %s' % (op_id, pushed_op_id) else: pushed_stack = stack.pop() return pushed_stack
[ "def", "pop_stack", "(", "stack", ",", "op_id", ")", ":", "if", "__debug__", ":", "pushed_stack", ",", "pushed_op_id", "=", "stack", ".", "pop", "(", ")", "assert", "pushed_op_id", "==", "op_id", ",", "'Wanted %s, got %s'", "%", "(", "op_id", ",", "pushed_...
Proxy of pop, where we know we're popping a stack off of a stack. We know that we don't need to differentiate through this. See pop() for more. Args: stack: The stack to pop from. op_id: A unique variable that is also passed into the matching push. Allows optimization passes to track pairs of pushes and pops. Returns: The last value.
[ "Proxy", "of", "pop", "where", "we", "know", "we", "re", "popping", "a", "stack", "off", "of", "a", "stack", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L678-L697
224,372
google/tangent
tangent/utils.py
push_stack
def push_stack(stack, substack, op_id): """Proxy of push, where we know we're pushing a stack onto a stack. Used when differentiating call trees,where sub-functions get their own stack. See push() for more. Args: stack: The stack object, which must support appending values. substack: The stack to append. op_id: A unique variable that is also passed into the corresponding pop. Allows optimization passes to track pairs of pushes and pops. Raises: ValueError: If a non-stack value for `substack` is passed. """ if substack is not None and not isinstance(substack, Stack): raise ValueError( 'Substack should be type tangent.Stack or None, instead found %s' % type(substack)) if __debug__: stack.append((substack, op_id)) else: stack.append(substack)
python
def push_stack(stack, substack, op_id): if substack is not None and not isinstance(substack, Stack): raise ValueError( 'Substack should be type tangent.Stack or None, instead found %s' % type(substack)) if __debug__: stack.append((substack, op_id)) else: stack.append(substack)
[ "def", "push_stack", "(", "stack", ",", "substack", ",", "op_id", ")", ":", "if", "substack", "is", "not", "None", "and", "not", "isinstance", "(", "substack", ",", "Stack", ")", ":", "raise", "ValueError", "(", "'Substack should be type tangent.Stack or None, i...
Proxy of push, where we know we're pushing a stack onto a stack. Used when differentiating call trees,where sub-functions get their own stack. See push() for more. Args: stack: The stack object, which must support appending values. substack: The stack to append. op_id: A unique variable that is also passed into the corresponding pop. Allows optimization passes to track pairs of pushes and pops. Raises: ValueError: If a non-stack value for `substack` is passed.
[ "Proxy", "of", "push", "where", "we", "know", "we", "re", "pushing", "a", "stack", "onto", "a", "stack", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L700-L722
224,373
google/tangent
tangent/utils.py
grad_dot
def grad_dot(dy, x1, x2): """Gradient of NumPy dot product w.r.t. to the left hand side. Args: dy: The gradient with respect to the output. x1: The left hand side of the `numpy.dot` function. x2: The right hand side Returns: The gradient with respect to `x1` i.e. `x2.dot(dy.T)` with all the broadcasting involved. """ if len(numpy.shape(x1)) == 1: dy = numpy.atleast_2d(dy) elif len(numpy.shape(x2)) == 1: dy = numpy.transpose(numpy.atleast_2d(dy)) x2 = numpy.transpose(numpy.atleast_2d(x2)) x2_t = numpy.transpose(numpy.atleast_2d( numpy.sum(x2, axis=tuple(numpy.arange(numpy.ndim(x2) - 2))))) dy_x2 = numpy.sum(dy, axis=tuple(-numpy.arange(numpy.ndim(x2) - 2) - 2)) return numpy.reshape(numpy.dot(dy_x2, x2_t), numpy.shape(x1))
python
def grad_dot(dy, x1, x2): if len(numpy.shape(x1)) == 1: dy = numpy.atleast_2d(dy) elif len(numpy.shape(x2)) == 1: dy = numpy.transpose(numpy.atleast_2d(dy)) x2 = numpy.transpose(numpy.atleast_2d(x2)) x2_t = numpy.transpose(numpy.atleast_2d( numpy.sum(x2, axis=tuple(numpy.arange(numpy.ndim(x2) - 2))))) dy_x2 = numpy.sum(dy, axis=tuple(-numpy.arange(numpy.ndim(x2) - 2) - 2)) return numpy.reshape(numpy.dot(dy_x2, x2_t), numpy.shape(x1))
[ "def", "grad_dot", "(", "dy", ",", "x1", ",", "x2", ")", ":", "if", "len", "(", "numpy", ".", "shape", "(", "x1", ")", ")", "==", "1", ":", "dy", "=", "numpy", ".", "atleast_2d", "(", "dy", ")", "elif", "len", "(", "numpy", ".", "shape", "(",...
Gradient of NumPy dot product w.r.t. to the left hand side. Args: dy: The gradient with respect to the output. x1: The left hand side of the `numpy.dot` function. x2: The right hand side Returns: The gradient with respect to `x1` i.e. `x2.dot(dy.T)` with all the broadcasting involved.
[ "Gradient", "of", "NumPy", "dot", "product", "w", ".", "r", ".", "t", ".", "to", "the", "left", "hand", "side", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/utils.py#L755-L775
224,374
google/tangent
tangent/tracing.py
trace_grad
def trace_grad(fn, args): """Trace a function, and return a VJP and the function's output.""" from tensorflow.python.eager.backprop import make_vjp result, vjp = make_vjp(fn)(*args) return result, vjp
python
def trace_grad(fn, args): from tensorflow.python.eager.backprop import make_vjp result, vjp = make_vjp(fn)(*args) return result, vjp
[ "def", "trace_grad", "(", "fn", ",", "args", ")", ":", "from", "tensorflow", ".", "python", ".", "eager", ".", "backprop", "import", "make_vjp", "result", ",", "vjp", "=", "make_vjp", "(", "fn", ")", "(", "*", "args", ")", "return", "result", ",", "v...
Trace a function, and return a VJP and the function's output.
[ "Trace", "a", "function", "and", "return", "a", "VJP", "and", "the", "function", "s", "output", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/tracing.py#L22-L26
224,375
google/tangent
tangent/grads.py
get_module_functions
def get_module_functions(modules): """Finds functions that do not have implemented derivatives. Args: modules: A list of Python modules. Functions contained in these modules will be checked for membership in 'implemented', and if not found, will be added to an 'unimplemented' set implemented: A Python object containing implemented derivatives. A function should be checkable for membership using the `fn in implemented` syntax. Returns: module_fns: A set of functions, builtins or ufuncs in `modules`. """ module_fns = set() for module in modules: for key in dir(module): attr = getattr(module, key) if isinstance( attr, (types.BuiltinFunctionType, types.FunctionType, numpy.ufunc)): module_fns.add(attr) return module_fns
python
def get_module_functions(modules): module_fns = set() for module in modules: for key in dir(module): attr = getattr(module, key) if isinstance( attr, (types.BuiltinFunctionType, types.FunctionType, numpy.ufunc)): module_fns.add(attr) return module_fns
[ "def", "get_module_functions", "(", "modules", ")", ":", "module_fns", "=", "set", "(", ")", "for", "module", "in", "modules", ":", "for", "key", "in", "dir", "(", "module", ")", ":", "attr", "=", "getattr", "(", "module", ",", "key", ")", "if", "isi...
Finds functions that do not have implemented derivatives. Args: modules: A list of Python modules. Functions contained in these modules will be checked for membership in 'implemented', and if not found, will be added to an 'unimplemented' set implemented: A Python object containing implemented derivatives. A function should be checkable for membership using the `fn in implemented` syntax. Returns: module_fns: A set of functions, builtins or ufuncs in `modules`.
[ "Finds", "functions", "that", "do", "not", "have", "implemented", "derivatives", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/grads.py#L56-L76
224,376
google/tangent
tangent/fence.py
validate
def validate(node, source): """Call this function to validate an AST.""" # TODO: leaving strict checking off to support insert_grad_of lf = LanguageFence(source, strict=False) lf.visit(node) return node
python
def validate(node, source): # TODO: leaving strict checking off to support insert_grad_of lf = LanguageFence(source, strict=False) lf.visit(node) return node
[ "def", "validate", "(", "node", ",", "source", ")", ":", "# TODO: leaving strict checking off to support insert_grad_of", "lf", "=", "LanguageFence", "(", "source", ",", "strict", "=", "False", ")", "lf", ".", "visit", "(", "node", ")", "return", "node" ]
Call this function to validate an AST.
[ "Call", "this", "function", "to", "validate", "an", "AST", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/fence.py#L30-L35
224,377
google/tangent
tangent/ast.py
get_name
def get_name(node): """Get the name of a variable. Args: node: A `Name`, `Subscript` or `Attribute` node. Returns: The name of the variable e.g. `'x'` for `x`, `x.i` and `x[i]`. """ if isinstance(node, gast.Name): return node.id elif isinstance(node, (gast.Subscript, gast.Attribute)): return get_name(node.value) else: raise TypeError
python
def get_name(node): if isinstance(node, gast.Name): return node.id elif isinstance(node, (gast.Subscript, gast.Attribute)): return get_name(node.value) else: raise TypeError
[ "def", "get_name", "(", "node", ")", ":", "if", "isinstance", "(", "node", ",", "gast", ".", "Name", ")", ":", "return", "node", ".", "id", "elif", "isinstance", "(", "node", ",", "(", "gast", ".", "Subscript", ",", "gast", ".", "Attribute", ")", "...
Get the name of a variable. Args: node: A `Name`, `Subscript` or `Attribute` node. Returns: The name of the variable e.g. `'x'` for `x`, `x.i` and `x[i]`.
[ "Get", "the", "name", "of", "a", "variable", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/ast.py#L25-L39
224,378
google/tangent
tangent/ast.py
get_updated
def get_updated(node): """Return the variable names created or mutated by this statement. This function considers assign statements, augmented assign statements, and the targets of for loops, as well as function arguments. For example, `x[0] = 2` will return `x`, `x, y = 3, 4` will return `x` and `y`, `for i in range(x)` will return `i`, etc. Args: node: An AST node Returns: A set of variable names (strings) of all the variables created or mutated. """ if isinstance(node, gast.Assign): return set.union(*(_get_target(target) for target in node.targets)) elif isinstance(node, (gast.For, gast.AugAssign)): return _get_target(node.target) elif isinstance(node, gast.arguments): targets = set(arg.id for arg in node.args + node.kwonlyargs) if node.vararg: targets.add(node.vararg.id) if node.kwarg: targets.add(node.kwarg.id) return targets else: return set()
python
def get_updated(node): if isinstance(node, gast.Assign): return set.union(*(_get_target(target) for target in node.targets)) elif isinstance(node, (gast.For, gast.AugAssign)): return _get_target(node.target) elif isinstance(node, gast.arguments): targets = set(arg.id for arg in node.args + node.kwonlyargs) if node.vararg: targets.add(node.vararg.id) if node.kwarg: targets.add(node.kwarg.id) return targets else: return set()
[ "def", "get_updated", "(", "node", ")", ":", "if", "isinstance", "(", "node", ",", "gast", ".", "Assign", ")", ":", "return", "set", ".", "union", "(", "*", "(", "_get_target", "(", "target", ")", "for", "target", "in", "node", ".", "targets", ")", ...
Return the variable names created or mutated by this statement. This function considers assign statements, augmented assign statements, and the targets of for loops, as well as function arguments. For example, `x[0] = 2` will return `x`, `x, y = 3, 4` will return `x` and `y`, `for i in range(x)` will return `i`, etc. Args: node: An AST node Returns: A set of variable names (strings) of all the variables created or mutated.
[ "Return", "the", "variable", "names", "created", "or", "mutated", "by", "this", "statement", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/ast.py#L52-L80
224,379
google/tangent
tangent/ast.py
copy_node
def copy_node(node): """Copy a node but keep its annotations intact.""" if not isinstance(node, gast.AST): return [copy_node(n) for n in node] new_node = copy.deepcopy(node) setattr(new_node, anno.ANNOTATION_FIELD, getattr(node, anno.ANNOTATION_FIELD, {}).copy()) return new_node
python
def copy_node(node): if not isinstance(node, gast.AST): return [copy_node(n) for n in node] new_node = copy.deepcopy(node) setattr(new_node, anno.ANNOTATION_FIELD, getattr(node, anno.ANNOTATION_FIELD, {}).copy()) return new_node
[ "def", "copy_node", "(", "node", ")", ":", "if", "not", "isinstance", "(", "node", ",", "gast", ".", "AST", ")", ":", "return", "[", "copy_node", "(", "n", ")", "for", "n", "in", "node", "]", "new_node", "=", "copy", ".", "deepcopy", "(", "node", ...
Copy a node but keep its annotations intact.
[ "Copy", "a", "node", "but", "keep", "its", "annotations", "intact", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/ast.py#L83-L90
224,380
google/tangent
tangent/ast.py
is_insert_grad_of_statement
def is_insert_grad_of_statement(node): """Check whether a context manager calls `insert_grad_of`. Args: node: The context manager node. Returns: Whether or not this node contains `insert_grad_of` calls. Raises: ValueError: If the `insert_grad_of` calls are mixed with other calls. """ tangent_calls = [anno.getanno(item.context_expr, 'func', None) is utils.insert_grad_of for item in node.items] if all(tangent_calls): return True elif any(tangent_calls): raise ValueError else: return False
python
def is_insert_grad_of_statement(node): tangent_calls = [anno.getanno(item.context_expr, 'func', None) is utils.insert_grad_of for item in node.items] if all(tangent_calls): return True elif any(tangent_calls): raise ValueError else: return False
[ "def", "is_insert_grad_of_statement", "(", "node", ")", ":", "tangent_calls", "=", "[", "anno", ".", "getanno", "(", "item", ".", "context_expr", ",", "'func'", ",", "None", ")", "is", "utils", ".", "insert_grad_of", "for", "item", "in", "node", ".", "item...
Check whether a context manager calls `insert_grad_of`. Args: node: The context manager node. Returns: Whether or not this node contains `insert_grad_of` calls. Raises: ValueError: If the `insert_grad_of` calls are mixed with other calls.
[ "Check", "whether", "a", "context", "manager", "calls", "insert_grad_of", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/ast.py#L115-L134
224,381
google/tangent
tangent/comments.py
add_comment
def add_comment(node, text, location='above'): """Add a comment to the given node. If the `SourceWithCommentGenerator` class is used these comments will be output as part of the source code. Note that a node can only contain one comment. Subsequent calls to `add_comment` will ovverride the existing comments. Args: node: The AST node whose containing statement will be commented. text: A comment string. location: Where the comment should appear. Valid values are 'above', 'below' and 'right' Returns: The node with the comment stored as an annotation. """ anno.setanno(node, 'comment', dict(location=location, text=text), safe=False) return node
python
def add_comment(node, text, location='above'): anno.setanno(node, 'comment', dict(location=location, text=text), safe=False) return node
[ "def", "add_comment", "(", "node", ",", "text", ",", "location", "=", "'above'", ")", ":", "anno", ".", "setanno", "(", "node", ",", "'comment'", ",", "dict", "(", "location", "=", "location", ",", "text", "=", "text", ")", ",", "safe", "=", "False",...
Add a comment to the given node. If the `SourceWithCommentGenerator` class is used these comments will be output as part of the source code. Note that a node can only contain one comment. Subsequent calls to `add_comment` will ovverride the existing comments. Args: node: The AST node whose containing statement will be commented. text: A comment string. location: Where the comment should appear. Valid values are 'above', 'below' and 'right' Returns: The node with the comment stored as an annotation.
[ "Add", "a", "comment", "to", "the", "given", "node", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/comments.py#L27-L46
224,382
google/tangent
tangent/comments.py
remove_repeated_comments
def remove_repeated_comments(node): """Remove comments that repeat themselves. Multiple statements might be annotated with the same comment. This way if one of the statements is deleted during optimization passes, the comment won't be lost. This pass removes sequences of identical comments, leaving only the first one. Args: node: An AST Returns: An AST where comments are not repeated in sequence. """ last_comment = {'text': None} for _node in gast.walk(node): if anno.hasanno(_node, 'comment'): comment = anno.getanno(_node, 'comment') if comment['text'] == last_comment['text']: anno.delanno(_node, 'comment') last_comment = comment return node
python
def remove_repeated_comments(node): last_comment = {'text': None} for _node in gast.walk(node): if anno.hasanno(_node, 'comment'): comment = anno.getanno(_node, 'comment') if comment['text'] == last_comment['text']: anno.delanno(_node, 'comment') last_comment = comment return node
[ "def", "remove_repeated_comments", "(", "node", ")", ":", "last_comment", "=", "{", "'text'", ":", "None", "}", "for", "_node", "in", "gast", ".", "walk", "(", "node", ")", ":", "if", "anno", ".", "hasanno", "(", "_node", ",", "'comment'", ")", ":", ...
Remove comments that repeat themselves. Multiple statements might be annotated with the same comment. This way if one of the statements is deleted during optimization passes, the comment won't be lost. This pass removes sequences of identical comments, leaving only the first one. Args: node: An AST Returns: An AST where comments are not repeated in sequence.
[ "Remove", "comments", "that", "repeat", "themselves", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/comments.py#L49-L71
224,383
google/tangent
tangent/create.py
create_grad
def create_grad(node, namer, tangent=False): """Given a variable, create a variable for the gradient. Args: node: A node to create a gradient for, can be a normal variable (`x`) or a subscript (`x[i]`). namer: The namer object which will determine the name to use for the gradient. tangent: Whether a tangent (instead of adjoint) is created. Returns: node: A node representing the gradient with the correct name e.g. the gradient of `x[i]` is `dx[i]`. Note that this returns an invalid node, with the `ctx` attribute missing. It is assumed that this attribute is filled in later. Node has an `adjoint_var` annotation referring to the node it is an adjoint of. """ if not isinstance(node, (gast.Subscript, gast.Name, gast.Str)): raise TypeError if anno.hasanno(node, 'temp_var'): return create_grad(anno.getanno(node, 'temp_var'), namer, tangent) def _name_grad(node): if not isinstance(node, gast.Name): raise TypeError varname = node.id name = namer.grad(varname, tangent) grad_node = gast.Name( id=name, ctx=None, annotation=None) anno.setanno(grad_node, 'adjoint_var', node) return grad_node if isinstance(node, gast.Subscript): grad_node = create_grad(node.value, namer, tangent=tangent) grad_node.ctx = gast.Load() return gast.Subscript(value=grad_node, slice=node.slice, ctx=None) elif isinstance(node, gast.Str): grad_node = create_grad( gast.Name(id=node.s, ctx=None, annotation=None), namer, tangent=tangent) return gast.Str(grad_node.id) else: return _name_grad(node)
python
def create_grad(node, namer, tangent=False): if not isinstance(node, (gast.Subscript, gast.Name, gast.Str)): raise TypeError if anno.hasanno(node, 'temp_var'): return create_grad(anno.getanno(node, 'temp_var'), namer, tangent) def _name_grad(node): if not isinstance(node, gast.Name): raise TypeError varname = node.id name = namer.grad(varname, tangent) grad_node = gast.Name( id=name, ctx=None, annotation=None) anno.setanno(grad_node, 'adjoint_var', node) return grad_node if isinstance(node, gast.Subscript): grad_node = create_grad(node.value, namer, tangent=tangent) grad_node.ctx = gast.Load() return gast.Subscript(value=grad_node, slice=node.slice, ctx=None) elif isinstance(node, gast.Str): grad_node = create_grad( gast.Name(id=node.s, ctx=None, annotation=None), namer, tangent=tangent) return gast.Str(grad_node.id) else: return _name_grad(node)
[ "def", "create_grad", "(", "node", ",", "namer", ",", "tangent", "=", "False", ")", ":", "if", "not", "isinstance", "(", "node", ",", "(", "gast", ".", "Subscript", ",", "gast", ".", "Name", ",", "gast", ".", "Str", ")", ")", ":", "raise", "TypeErr...
Given a variable, create a variable for the gradient. Args: node: A node to create a gradient for, can be a normal variable (`x`) or a subscript (`x[i]`). namer: The namer object which will determine the name to use for the gradient. tangent: Whether a tangent (instead of adjoint) is created. Returns: node: A node representing the gradient with the correct name e.g. the gradient of `x[i]` is `dx[i]`. Note that this returns an invalid node, with the `ctx` attribute missing. It is assumed that this attribute is filled in later. Node has an `adjoint_var` annotation referring to the node it is an adjoint of.
[ "Given", "a", "variable", "create", "a", "variable", "for", "the", "gradient", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/create.py#L23-L67
224,384
google/tangent
tangent/create.py
create_temp_grad
def create_temp_grad(node, namer, tangent=False): """Create a variable to store partial gradients. Args: node: See `create_grad`. namer: See `create_grad`. tangent: See `create_grad`. Returns: node: See `create_grad`. Returns a node representing the partial gradient. Note that this is always a simple variable e.g. the temporary partial of `x[i]` can be something like `_dxi`. Nodes are given an annotation `temp_adjoint_var`. """ if not isinstance(node, (gast.Subscript, gast.Name)): raise TypeError def _name_temp_grad(node): name = namer.temp_grad(node.id, tangent) temp_node = gast.Name(id=name, annotation=None, ctx=None) return temp_node if isinstance(node, gast.Subscript): temp_node = _name_temp_grad(node.value) else: temp_node = _name_temp_grad(node) anno.setanno(temp_node, 'temp_adjoint_var', node) return temp_node
python
def create_temp_grad(node, namer, tangent=False): if not isinstance(node, (gast.Subscript, gast.Name)): raise TypeError def _name_temp_grad(node): name = namer.temp_grad(node.id, tangent) temp_node = gast.Name(id=name, annotation=None, ctx=None) return temp_node if isinstance(node, gast.Subscript): temp_node = _name_temp_grad(node.value) else: temp_node = _name_temp_grad(node) anno.setanno(temp_node, 'temp_adjoint_var', node) return temp_node
[ "def", "create_temp_grad", "(", "node", ",", "namer", ",", "tangent", "=", "False", ")", ":", "if", "not", "isinstance", "(", "node", ",", "(", "gast", ".", "Subscript", ",", "gast", ".", "Name", ")", ")", ":", "raise", "TypeError", "def", "_name_temp_...
Create a variable to store partial gradients. Args: node: See `create_grad`. namer: See `create_grad`. tangent: See `create_grad`. Returns: node: See `create_grad`. Returns a node representing the partial gradient. Note that this is always a simple variable e.g. the temporary partial of `x[i]` can be something like `_dxi`. Nodes are given an annotation `temp_adjoint_var`.
[ "Create", "a", "variable", "to", "store", "partial", "gradients", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/create.py#L70-L97
224,385
google/tangent
tangent/create.py
create_temp
def create_temp(node, namer): """Create a temporary variable. Args: node: Create a temporary variable to store this variable in. namer: A naming object that guarantees the names are unique. Returns: node: See `create_grad`. Returns a temporary variable, which is always a simple variable annotated with `temp_var`. """ if isinstance(node, gast.Name): name = node.id elif isinstance(node, (gast.Attribute, gast.Subscript)): name = node.value.id else: raise TypeError temp_node = gast.Name(id=namer.temp(name), annotation=None, ctx=None) anno.setanno(temp_node, 'temp_var', node) return temp_node
python
def create_temp(node, namer): if isinstance(node, gast.Name): name = node.id elif isinstance(node, (gast.Attribute, gast.Subscript)): name = node.value.id else: raise TypeError temp_node = gast.Name(id=namer.temp(name), annotation=None, ctx=None) anno.setanno(temp_node, 'temp_var', node) return temp_node
[ "def", "create_temp", "(", "node", ",", "namer", ")", ":", "if", "isinstance", "(", "node", ",", "gast", ".", "Name", ")", ":", "name", "=", "node", ".", "id", "elif", "isinstance", "(", "node", ",", "(", "gast", ".", "Attribute", ",", "gast", ".",...
Create a temporary variable. Args: node: Create a temporary variable to store this variable in. namer: A naming object that guarantees the names are unique. Returns: node: See `create_grad`. Returns a temporary variable, which is always a simple variable annotated with `temp_var`.
[ "Create", "a", "temporary", "variable", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/create.py#L100-L119
224,386
google/tangent
tangent/forward_ad.py
forward_ad
def forward_ad(node, wrt, preserve_result=False, check_dims=True): """Perform forward-mode AD on an AST. This function analyses the AST to determine which variables are active and proceeds by taking the naive derivative. Before returning the primal and adjoint it annotates push and pop statements as such. Args: node: A `FunctionDef` AST node. wrt: A tuple of argument indices with respect to which we take the derivative. preserve_result: A boolean indicating whether the original non-differentiated function value should be returned check_dims: A boolean indicating whether the provided derivatives should have the same shape as their corresponding arguments. Returns: mod: A `Module` node containing the naive primal and adjoint of the function which can be fed to the `split` and `joint` functions. required: A list of tuples of functions and argument indices. These functions were called by the function but did not have an adjoint. """ if not isinstance(node, gast.FunctionDef): raise TypeError # Activity analysis cfg_obj = cfg.CFG.build_cfg(node) cfg.Active(range(len(node.args.args))).visit(cfg_obj.entry) # Build forward mode function fad = ForwardAD(wrt, preserve_result, check_dims) node = fad.visit(node) # Annotate stacks node = annotate.find_stacks(node) # Clean up naive forward-mode fcode node = gast.Module([node]) anno.clearanno(node) return node, fad.required
python
def forward_ad(node, wrt, preserve_result=False, check_dims=True): if not isinstance(node, gast.FunctionDef): raise TypeError # Activity analysis cfg_obj = cfg.CFG.build_cfg(node) cfg.Active(range(len(node.args.args))).visit(cfg_obj.entry) # Build forward mode function fad = ForwardAD(wrt, preserve_result, check_dims) node = fad.visit(node) # Annotate stacks node = annotate.find_stacks(node) # Clean up naive forward-mode fcode node = gast.Module([node]) anno.clearanno(node) return node, fad.required
[ "def", "forward_ad", "(", "node", ",", "wrt", ",", "preserve_result", "=", "False", ",", "check_dims", "=", "True", ")", ":", "if", "not", "isinstance", "(", "node", ",", "gast", ".", "FunctionDef", ")", ":", "raise", "TypeError", "# Activity analysis", "c...
Perform forward-mode AD on an AST. This function analyses the AST to determine which variables are active and proceeds by taking the naive derivative. Before returning the primal and adjoint it annotates push and pop statements as such. Args: node: A `FunctionDef` AST node. wrt: A tuple of argument indices with respect to which we take the derivative. preserve_result: A boolean indicating whether the original non-differentiated function value should be returned check_dims: A boolean indicating whether the provided derivatives should have the same shape as their corresponding arguments. Returns: mod: A `Module` node containing the naive primal and adjoint of the function which can be fed to the `split` and `joint` functions. required: A list of tuples of functions and argument indices. These functions were called by the function but did not have an adjoint.
[ "Perform", "forward", "-", "mode", "AD", "on", "an", "AST", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/forward_ad.py#L556-L596
224,387
google/tangent
tangent/quoting.py
to_source
def to_source(node, indentation=' ' * 4): """Return source code of a given AST.""" if isinstance(node, gast.AST): node = gast.gast_to_ast(node) generator = SourceWithCommentGenerator(indentation, False, astor.string_repr.pretty_string) generator.visit(node) generator.result.append('\n') return astor.source_repr.pretty_source(generator.result).lstrip()
python
def to_source(node, indentation=' ' * 4): if isinstance(node, gast.AST): node = gast.gast_to_ast(node) generator = SourceWithCommentGenerator(indentation, False, astor.string_repr.pretty_string) generator.visit(node) generator.result.append('\n') return astor.source_repr.pretty_source(generator.result).lstrip()
[ "def", "to_source", "(", "node", ",", "indentation", "=", "' '", "*", "4", ")", ":", "if", "isinstance", "(", "node", ",", "gast", ".", "AST", ")", ":", "node", "=", "gast", ".", "gast_to_ast", "(", "node", ")", "generator", "=", "SourceWithCommentGene...
Return source code of a given AST.
[ "Return", "source", "code", "of", "a", "given", "AST", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/quoting.py#L70-L78
224,388
google/tangent
tangent/quoting.py
parse_function
def parse_function(fn): """Get the source of a function and return its AST.""" try: return parse_string(inspect.getsource(fn)) except (IOError, OSError) as e: raise ValueError( 'Cannot differentiate function: %s. Tangent must be able to access the ' 'source code of the function. Functions defined in a Python ' 'interpreter and functions backed by C extension modules do not ' 'have accessible source code.' % e)
python
def parse_function(fn): try: return parse_string(inspect.getsource(fn)) except (IOError, OSError) as e: raise ValueError( 'Cannot differentiate function: %s. Tangent must be able to access the ' 'source code of the function. Functions defined in a Python ' 'interpreter and functions backed by C extension modules do not ' 'have accessible source code.' % e)
[ "def", "parse_function", "(", "fn", ")", ":", "try", ":", "return", "parse_string", "(", "inspect", ".", "getsource", "(", "fn", ")", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "e", ":", "raise", "ValueError", "(", "'Cannot differentiate fun...
Get the source of a function and return its AST.
[ "Get", "the", "source", "of", "a", "function", "and", "return", "its", "AST", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/quoting.py#L81-L90
224,389
google/tangent
tangent/quoting.py
quote
def quote(src_string, return_expr=False): """Go from source code to AST nodes. This function returns a tree without enclosing `Module` or `Expr` nodes. Args: src_string: The source code to parse. return_expr: Whether or not to return a containing expression. This can be set to `True` if the result is to be part of a series of statements. Returns: An AST of the given source code. """ node = parse_string(src_string) body = node.body if len(body) == 1: if isinstance(body[0], gast.Expr) and not return_expr: out = body[0].value else: out = body[0] else: out = node return out
python
def quote(src_string, return_expr=False): node = parse_string(src_string) body = node.body if len(body) == 1: if isinstance(body[0], gast.Expr) and not return_expr: out = body[0].value else: out = body[0] else: out = node return out
[ "def", "quote", "(", "src_string", ",", "return_expr", "=", "False", ")", ":", "node", "=", "parse_string", "(", "src_string", ")", "body", "=", "node", ".", "body", "if", "len", "(", "body", ")", "==", "1", ":", "if", "isinstance", "(", "body", "[",...
Go from source code to AST nodes. This function returns a tree without enclosing `Module` or `Expr` nodes. Args: src_string: The source code to parse. return_expr: Whether or not to return a containing expression. This can be set to `True` if the result is to be part of a series of statements. Returns: An AST of the given source code.
[ "Go", "from", "source", "code", "to", "AST", "nodes", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/quoting.py#L98-L121
224,390
google/tangent
tangent/reverse_ad.py
get_push_pop
def get_push_pop(): """Create pop and push nodes that are linked. Returns: A push and pop node which have `push_func` and `pop_func` annotations respectively, identifying them as such. They also have a `pop` and `push` annotation respectively, which links the push node to the pop node and vice versa. """ push = copy.deepcopy(PUSH) pop = copy.deepcopy(POP) anno.setanno(push, 'pop', pop) anno.setanno(push, 'gen_push', True) anno.setanno(pop, 'push', push) op_id = _generate_op_id() return push, pop, op_id
python
def get_push_pop(): push = copy.deepcopy(PUSH) pop = copy.deepcopy(POP) anno.setanno(push, 'pop', pop) anno.setanno(push, 'gen_push', True) anno.setanno(pop, 'push', push) op_id = _generate_op_id() return push, pop, op_id
[ "def", "get_push_pop", "(", ")", ":", "push", "=", "copy", ".", "deepcopy", "(", "PUSH", ")", "pop", "=", "copy", ".", "deepcopy", "(", "POP", ")", "anno", ".", "setanno", "(", "push", ",", "'pop'", ",", "pop", ")", "anno", ".", "setanno", "(", "...
Create pop and push nodes that are linked. Returns: A push and pop node which have `push_func` and `pop_func` annotations respectively, identifying them as such. They also have a `pop` and `push` annotation respectively, which links the push node to the pop node and vice versa.
[ "Create", "pop", "and", "push", "nodes", "that", "are", "linked", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L64-L79
224,391
google/tangent
tangent/reverse_ad.py
get_push_pop_stack
def get_push_pop_stack(): """Create pop and push nodes for substacks that are linked. Returns: A push and pop node which have `push_func` and `pop_func` annotations respectively, identifying them as such. They also have a `pop` and `push` annotation respectively, which links the push node to the pop node and vice versa. """ push = copy.deepcopy(PUSH_STACK) pop = copy.deepcopy(POP_STACK) anno.setanno(push, 'pop', pop) anno.setanno(push, 'gen_push', True) anno.setanno(pop, 'push', push) op_id = _generate_op_id() return push, pop, op_id
python
def get_push_pop_stack(): push = copy.deepcopy(PUSH_STACK) pop = copy.deepcopy(POP_STACK) anno.setanno(push, 'pop', pop) anno.setanno(push, 'gen_push', True) anno.setanno(pop, 'push', push) op_id = _generate_op_id() return push, pop, op_id
[ "def", "get_push_pop_stack", "(", ")", ":", "push", "=", "copy", ".", "deepcopy", "(", "PUSH_STACK", ")", "pop", "=", "copy", ".", "deepcopy", "(", "POP_STACK", ")", "anno", ".", "setanno", "(", "push", ",", "'pop'", ",", "pop", ")", "anno", ".", "se...
Create pop and push nodes for substacks that are linked. Returns: A push and pop node which have `push_func` and `pop_func` annotations respectively, identifying them as such. They also have a `pop` and `push` annotation respectively, which links the push node to the pop node and vice versa.
[ "Create", "pop", "and", "push", "nodes", "for", "substacks", "that", "are", "linked", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L82-L97
224,392
google/tangent
tangent/reverse_ad.py
reverse_ad
def reverse_ad(node, wrt, preserve_result, check_dims): """Perform reverse-mode AD on an AST. This function analyses the AST to determine which variables are active and proceeds by taking the naive derivative. Before returning the primal and adjoint it annotates push and pop statements as such. Args: node: A `FunctionDef` AST node. wrt: A tuple of argument indices with respect to which we take the derivative. preserve_result: A boolean indicating whether the generated derivative function should also return the original return value. check_dims: A boolean indicating whether the seed derivatives should have their dimensions checked to match their primal counterpart. Returns: mod: A `Module` node containing the naive primal and adjoint of the function which can be fed to the `split` and `joint` functions. required: A list of tuples of functions and argument indices. These functions were called by the function but did not have an adjoint. """ if not isinstance(node, gast.FunctionDef): raise TypeError # Activity analysis cfg.forward(node, cfg.Active(wrt)) ad = ReverseAD(wrt, preserve_result, check_dims) pri, adj = ad.visit(node) mod = gast.Module(body=[pri, adj]) mod = annotate.find_stacks(mod) return mod, ad.required, ad.stack
python
def reverse_ad(node, wrt, preserve_result, check_dims): if not isinstance(node, gast.FunctionDef): raise TypeError # Activity analysis cfg.forward(node, cfg.Active(wrt)) ad = ReverseAD(wrt, preserve_result, check_dims) pri, adj = ad.visit(node) mod = gast.Module(body=[pri, adj]) mod = annotate.find_stacks(mod) return mod, ad.required, ad.stack
[ "def", "reverse_ad", "(", "node", ",", "wrt", ",", "preserve_result", ",", "check_dims", ")", ":", "if", "not", "isinstance", "(", "node", ",", "gast", ".", "FunctionDef", ")", ":", "raise", "TypeError", "# Activity analysis", "cfg", ".", "forward", "(", "...
Perform reverse-mode AD on an AST. This function analyses the AST to determine which variables are active and proceeds by taking the naive derivative. Before returning the primal and adjoint it annotates push and pop statements as such. Args: node: A `FunctionDef` AST node. wrt: A tuple of argument indices with respect to which we take the derivative. preserve_result: A boolean indicating whether the generated derivative function should also return the original return value. check_dims: A boolean indicating whether the seed derivatives should have their dimensions checked to match their primal counterpart. Returns: mod: A `Module` node containing the naive primal and adjoint of the function which can be fed to the `split` and `joint` functions. required: A list of tuples of functions and argument indices. These functions were called by the function but did not have an adjoint.
[ "Perform", "reverse", "-", "mode", "AD", "on", "an", "AST", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L818-L850
224,393
google/tangent
tangent/reverse_ad.py
store_state
def store_state(node, reaching, defined, stack): """Push the final state of the primal onto the stack for the adjoint. Python's scoping rules make it possible for variables to not be defined in certain blocks based on the control flow path taken at runtime. In order to make sure we don't try to push non-existing variables onto the stack, we defined these variables explicitly (by assigning `None` to them) at the beginning of the function. All the variables that reach the return statement are pushed onto the stack, and in the adjoint they are popped off in reverse order. Args: node: A module with the primal and adjoint function definitions as returned by `reverse_ad`. reaching: The variable definitions that reach the end of the primal. defined: The variables defined at the end of the primal. stack: The stack node to use for storing and restoring state. Returns: node: A node with the requisite pushes and pops added to make sure that state is transferred between primal and adjoint split motion calls. """ defs = [def_ for def_ in reaching if not isinstance(def_[1], gast.arguments)] if not len(defs): return node reaching, original_defs = zip(*defs) # Explicitly define variables that might or might not be in scope at the end assignments = [] for id_ in set(reaching) - defined: assignments.append(quoting.quote('{} = None'.format(id_))) # Store variables at the end of the function and restore them store = [] load = [] for id_, def_ in zip(reaching, original_defs): # If the original definition of a value that we need to store # was an initialization as a stack, then we should be using `push_stack` # to store its state, and `pop_stack` to restore it. This allows # us to avoid doing any `add_grad` calls on the stack, which result # in type errors in unoptimized mode (they are usually elided # after calling `dead_code_elimination`). if isinstance( def_, gast.Assign) and 'tangent.Stack()' in quoting.unquote(def_.value): push, pop, op_id = get_push_pop_stack() else: push, pop, op_id = get_push_pop() store.append( template.replace( 'push(_stack, val, op_id)', push=push, val=id_, _stack=stack, op_id=op_id)) load.append( template.replace( 'val = pop(_stack, op_id)', pop=pop, val=id_, _stack=stack, op_id=op_id)) body, return_ = node.body[0].body[:-1], node.body[0].body[-1] node.body[0].body = assignments + body + store + [return_] node.body[1].body = load[::-1] + node.body[1].body return node
python
def store_state(node, reaching, defined, stack): defs = [def_ for def_ in reaching if not isinstance(def_[1], gast.arguments)] if not len(defs): return node reaching, original_defs = zip(*defs) # Explicitly define variables that might or might not be in scope at the end assignments = [] for id_ in set(reaching) - defined: assignments.append(quoting.quote('{} = None'.format(id_))) # Store variables at the end of the function and restore them store = [] load = [] for id_, def_ in zip(reaching, original_defs): # If the original definition of a value that we need to store # was an initialization as a stack, then we should be using `push_stack` # to store its state, and `pop_stack` to restore it. This allows # us to avoid doing any `add_grad` calls on the stack, which result # in type errors in unoptimized mode (they are usually elided # after calling `dead_code_elimination`). if isinstance( def_, gast.Assign) and 'tangent.Stack()' in quoting.unquote(def_.value): push, pop, op_id = get_push_pop_stack() else: push, pop, op_id = get_push_pop() store.append( template.replace( 'push(_stack, val, op_id)', push=push, val=id_, _stack=stack, op_id=op_id)) load.append( template.replace( 'val = pop(_stack, op_id)', pop=pop, val=id_, _stack=stack, op_id=op_id)) body, return_ = node.body[0].body[:-1], node.body[0].body[-1] node.body[0].body = assignments + body + store + [return_] node.body[1].body = load[::-1] + node.body[1].body return node
[ "def", "store_state", "(", "node", ",", "reaching", ",", "defined", ",", "stack", ")", ":", "defs", "=", "[", "def_", "for", "def_", "in", "reaching", "if", "not", "isinstance", "(", "def_", "[", "1", "]", ",", "gast", ".", "arguments", ")", "]", "...
Push the final state of the primal onto the stack for the adjoint. Python's scoping rules make it possible for variables to not be defined in certain blocks based on the control flow path taken at runtime. In order to make sure we don't try to push non-existing variables onto the stack, we defined these variables explicitly (by assigning `None` to them) at the beginning of the function. All the variables that reach the return statement are pushed onto the stack, and in the adjoint they are popped off in reverse order. Args: node: A module with the primal and adjoint function definitions as returned by `reverse_ad`. reaching: The variable definitions that reach the end of the primal. defined: The variables defined at the end of the primal. stack: The stack node to use for storing and restoring state. Returns: node: A node with the requisite pushes and pops added to make sure that state is transferred between primal and adjoint split motion calls.
[ "Push", "the", "final", "state", "of", "the", "primal", "onto", "the", "stack", "for", "the", "adjoint", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L853-L920
224,394
google/tangent
tangent/reverse_ad.py
split
def split(node, stack): """Carry over the state from the primal to the adjoint. Args: node: A module with the primal and adjoint function definitions as returned by `reverse_ad`. stack: The stack node to use for storing and restoring state. Returns: func: A `Module` node with two function definitions containing the primal and adjoint respectively. """ node, defined, reaching = _fix(node) # Store and restore the state node = store_state(node, reaching, defined, stack) # Clean up anno.clearanno(node) return node
python
def split(node, stack): node, defined, reaching = _fix(node) # Store and restore the state node = store_state(node, reaching, defined, stack) # Clean up anno.clearanno(node) return node
[ "def", "split", "(", "node", ",", "stack", ")", ":", "node", ",", "defined", ",", "reaching", "=", "_fix", "(", "node", ")", "# Store and restore the state", "node", "=", "store_state", "(", "node", ",", "reaching", ",", "defined", ",", "stack", ")", "# ...
Carry over the state from the primal to the adjoint. Args: node: A module with the primal and adjoint function definitions as returned by `reverse_ad`. stack: The stack node to use for storing and restoring state. Returns: func: A `Module` node with two function definitions containing the primal and adjoint respectively.
[ "Carry", "over", "the", "state", "from", "the", "primal", "to", "the", "adjoint", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L923-L942
224,395
google/tangent
tangent/reverse_ad.py
joint
def joint(node): """Merge the bodies of primal and adjoint into a single function. Args: node: A module with the primal and adjoint function definitions as returned by `reverse_ad`. Returns: func: A `Module` node with a single function definition containing the combined primal and adjoint. """ node, _, _ = _fix(node) body = node.body[0].body[:-1] + node.body[1].body func = gast.Module(body=[gast.FunctionDef( name=node.body[0].name, args=node.body[1].args, body=body, decorator_list=[], returns=None)]) # Clean up anno.clearanno(func) return func
python
def joint(node): node, _, _ = _fix(node) body = node.body[0].body[:-1] + node.body[1].body func = gast.Module(body=[gast.FunctionDef( name=node.body[0].name, args=node.body[1].args, body=body, decorator_list=[], returns=None)]) # Clean up anno.clearanno(func) return func
[ "def", "joint", "(", "node", ")", ":", "node", ",", "_", ",", "_", "=", "_fix", "(", "node", ")", "body", "=", "node", ".", "body", "[", "0", "]", ".", "body", "[", ":", "-", "1", "]", "+", "node", ".", "body", "[", "1", "]", ".", "body",...
Merge the bodies of primal and adjoint into a single function. Args: node: A module with the primal and adjoint function definitions as returned by `reverse_ad`. Returns: func: A `Module` node with a single function definition containing the combined primal and adjoint.
[ "Merge", "the", "bodies", "of", "primal", "and", "adjoint", "into", "a", "single", "function", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L945-L963
224,396
google/tangent
tangent/reverse_ad.py
_fix
def _fix(node): """Fix the naive construction of the adjont. See `fixes.py` for details. This function also returns the result of reaching definitions analysis so that `split` mode can use this to carry over the state from primal to adjoint. Args: node: A module with the primal and adjoint function definitions as returned by `reverse_ad`. Returns: node: A module with the primal and adjoint function with additional variable definitions and such added so that pushes onto the stack and gradient accumulations are all valid. defined: The variables defined at the end of the primal. reaching: The variable definitions that reach the end of the primal. """ # Do reaching definitions analysis on primal and adjoint pri_cfg = cfg.CFG.build_cfg(node.body[0]) defined = cfg.Defined() defined.visit(pri_cfg.entry) reaching = cfg.ReachingDefinitions() reaching.visit(pri_cfg.entry) cfg.forward(node.body[1], cfg.Defined()) cfg.forward(node.body[1], cfg.ReachingDefinitions()) # Remove pushes of variables that were never defined fixes.CleanStack().visit(node) fixes.FixStack().visit(node.body[0]) # Change accumulation into definition if possible fixes.CleanGrad().visit(node.body[1]) # Define gradients that might or might not be defined fixes.FixGrad().visit(node.body[1]) return node, defined.exit, reaching.exit
python
def _fix(node): # Do reaching definitions analysis on primal and adjoint pri_cfg = cfg.CFG.build_cfg(node.body[0]) defined = cfg.Defined() defined.visit(pri_cfg.entry) reaching = cfg.ReachingDefinitions() reaching.visit(pri_cfg.entry) cfg.forward(node.body[1], cfg.Defined()) cfg.forward(node.body[1], cfg.ReachingDefinitions()) # Remove pushes of variables that were never defined fixes.CleanStack().visit(node) fixes.FixStack().visit(node.body[0]) # Change accumulation into definition if possible fixes.CleanGrad().visit(node.body[1]) # Define gradients that might or might not be defined fixes.FixGrad().visit(node.body[1]) return node, defined.exit, reaching.exit
[ "def", "_fix", "(", "node", ")", ":", "# Do reaching definitions analysis on primal and adjoint", "pri_cfg", "=", "cfg", ".", "CFG", ".", "build_cfg", "(", "node", ".", "body", "[", "0", "]", ")", "defined", "=", "cfg", ".", "Defined", "(", ")", "defined", ...
Fix the naive construction of the adjont. See `fixes.py` for details. This function also returns the result of reaching definitions analysis so that `split` mode can use this to carry over the state from primal to adjoint. Args: node: A module with the primal and adjoint function definitions as returned by `reverse_ad`. Returns: node: A module with the primal and adjoint function with additional variable definitions and such added so that pushes onto the stack and gradient accumulations are all valid. defined: The variables defined at the end of the primal. reaching: The variable definitions that reach the end of the primal.
[ "Fix", "the", "naive", "construction", "of", "the", "adjont", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L966-L1004
224,397
google/tangent
tangent/reverse_ad.py
ReverseAD.is_active
def is_active(self, node): """Checks whether a statement is active. An assignment is active when its right hand side contains active variables. Args: node: an instance of gast.Assign Returns: Whether the statement is active. """ # Special case: If the right hand side is a pop statement, we want to # process it if (isinstance(node.value, gast.Call) and anno.getanno(node.value, 'func', False) == utils.pop): return True for succ in gast.walk(node.value): if (isinstance(succ, gast.Name) and isinstance(succ.ctx, gast.Load) and succ.id in self.active_variables): return True return False
python
def is_active(self, node): # Special case: If the right hand side is a pop statement, we want to # process it if (isinstance(node.value, gast.Call) and anno.getanno(node.value, 'func', False) == utils.pop): return True for succ in gast.walk(node.value): if (isinstance(succ, gast.Name) and isinstance(succ.ctx, gast.Load) and succ.id in self.active_variables): return True return False
[ "def", "is_active", "(", "self", ",", "node", ")", ":", "# Special case: If the right hand side is a pop statement, we want to", "# process it", "if", "(", "isinstance", "(", "node", ".", "value", ",", "gast", ".", "Call", ")", "and", "anno", ".", "getanno", "(", ...
Checks whether a statement is active. An assignment is active when its right hand side contains active variables. Args: node: an instance of gast.Assign Returns: Whether the statement is active.
[ "Checks", "whether", "a", "statement", "is", "active", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L179-L200
224,398
google/tangent
tangent/reverse_ad.py
ReverseAD.visit_statements
def visit_statements(self, nodes): """Generate the adjoint of a series of statements.""" primals, adjoints = [], collections.deque() for node in nodes: primal, adjoint = self.visit(node) if not isinstance(primal, list): primal = [primal] if not isinstance(adjoint, list): adjoint = [adjoint] # Methods will return `None` if the node is to be removed, so remove them primals.extend(filter(None, primal)) # We reverse the order of the adjoints, but not the statements in # the adjoint itself adjoints.extendleft(filter(None, adjoint[::-1])) return primals, list(adjoints)
python
def visit_statements(self, nodes): primals, adjoints = [], collections.deque() for node in nodes: primal, adjoint = self.visit(node) if not isinstance(primal, list): primal = [primal] if not isinstance(adjoint, list): adjoint = [adjoint] # Methods will return `None` if the node is to be removed, so remove them primals.extend(filter(None, primal)) # We reverse the order of the adjoints, but not the statements in # the adjoint itself adjoints.extendleft(filter(None, adjoint[::-1])) return primals, list(adjoints)
[ "def", "visit_statements", "(", "self", ",", "nodes", ")", ":", "primals", ",", "adjoints", "=", "[", "]", ",", "collections", ".", "deque", "(", ")", "for", "node", "in", "nodes", ":", "primal", ",", "adjoint", "=", "self", ".", "visit", "(", "node"...
Generate the adjoint of a series of statements.
[ "Generate", "the", "adjoint", "of", "a", "series", "of", "statements", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L283-L297
224,399
google/tangent
tangent/reverse_ad.py
ReverseAD.primal_and_adjoint_for_tracing
def primal_and_adjoint_for_tracing(self, node): """Build the primal and adjoint of a traceable function. Args: node: ast.Call node of a function we wish to trace, instead of transform Returns: primal: new ast.Assign node to replace the original primal call adjoint: new ast.Assign node using the VJP generated in primal to calculate the adjoint. """ primal_template = grads.primals[tracing.Traceable] adjoint_template = grads.adjoints[tracing.Traceable] # Prep to_pack = node.args target = ast_.copy_node(self.orig_target) vjp = quoting.quote(self.namer.unique('%s_grad' % node.func.id)) tmp = create.create_temp(quoting.quote('tmp'), self.namer) assert len(node.keywords) == 0 # Full replacement of primal # TODO: do we need to set 'pri_call' on this? primal = template.replace( primal_template, namer=self.namer, result=target, fn=node.func, tmp=tmp, vjp=vjp, args=gast.Tuple(elts=to_pack, ctx=gast.Load())) # Building adjoint using the vjp generated with the primal dto_pack = gast.Tuple( elts=[create.create_temp_grad(arg, self.namer) for arg in to_pack], ctx=gast.Store()) adjoint = template.replace( adjoint_template, namer=self.namer, result=target, vjp=vjp, dargs=dto_pack) return primal, adjoint
python
def primal_and_adjoint_for_tracing(self, node): primal_template = grads.primals[tracing.Traceable] adjoint_template = grads.adjoints[tracing.Traceable] # Prep to_pack = node.args target = ast_.copy_node(self.orig_target) vjp = quoting.quote(self.namer.unique('%s_grad' % node.func.id)) tmp = create.create_temp(quoting.quote('tmp'), self.namer) assert len(node.keywords) == 0 # Full replacement of primal # TODO: do we need to set 'pri_call' on this? primal = template.replace( primal_template, namer=self.namer, result=target, fn=node.func, tmp=tmp, vjp=vjp, args=gast.Tuple(elts=to_pack, ctx=gast.Load())) # Building adjoint using the vjp generated with the primal dto_pack = gast.Tuple( elts=[create.create_temp_grad(arg, self.namer) for arg in to_pack], ctx=gast.Store()) adjoint = template.replace( adjoint_template, namer=self.namer, result=target, vjp=vjp, dargs=dto_pack) return primal, adjoint
[ "def", "primal_and_adjoint_for_tracing", "(", "self", ",", "node", ")", ":", "primal_template", "=", "grads", ".", "primals", "[", "tracing", ".", "Traceable", "]", "adjoint_template", "=", "grads", ".", "adjoints", "[", "tracing", ".", "Traceable", "]", "# Pr...
Build the primal and adjoint of a traceable function. Args: node: ast.Call node of a function we wish to trace, instead of transform Returns: primal: new ast.Assign node to replace the original primal call adjoint: new ast.Assign node using the VJP generated in primal to calculate the adjoint.
[ "Build", "the", "primal", "and", "adjoint", "of", "a", "traceable", "function", "." ]
6533e83af09de7345d1b438512679992f080dcc9
https://github.com/google/tangent/blob/6533e83af09de7345d1b438512679992f080dcc9/tangent/reverse_ad.py#L638-L682